code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
from mysql import connector
def get_db_connection():
try:
return connector.connect(host="server_database_1", user="root", password="password1234", database="SMARTHOUSE")
except connector.errors.DatabaseError:
connection = connector.connect(host="server_database_1", user="root", password="password1234")
cursor = connection.cursor()
cursor.execute("CREATE DATABASE SMARTHOUSE")
return get_db_connection()
|
normal
|
{
"blob_id": "6cb97e6f3c7ba312ec1458fd51635508a16f70dd",
"index": 2957,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_db_connection():\n try:\n return connector.connect(host='server_database_1', user='root',\n password='password1234', database='SMARTHOUSE')\n except connector.errors.DatabaseError:\n connection = connector.connect(host='server_database_1', user=\n 'root', password='password1234')\n cursor = connection.cursor()\n cursor.execute('CREATE DATABASE SMARTHOUSE')\n return get_db_connection()\n",
"step-3": "from mysql import connector\n\n\ndef get_db_connection():\n try:\n return connector.connect(host='server_database_1', user='root',\n password='password1234', database='SMARTHOUSE')\n except connector.errors.DatabaseError:\n connection = connector.connect(host='server_database_1', user=\n 'root', password='password1234')\n cursor = connection.cursor()\n cursor.execute('CREATE DATABASE SMARTHOUSE')\n return get_db_connection()\n",
"step-4": "from mysql import connector\n\n\ndef get_db_connection():\n try:\n return connector.connect(host=\"server_database_1\", user=\"root\", password=\"password1234\", database=\"SMARTHOUSE\")\n except connector.errors.DatabaseError:\n connection = connector.connect(host=\"server_database_1\", user=\"root\", password=\"password1234\")\n cursor = connection.cursor()\n cursor.execute(\"CREATE DATABASE SMARTHOUSE\")\n return get_db_connection()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
# Ben Suay, RAIL
# May 2013
# Worcester Polytechnic Institute
#
# http://openrave.org/docs/latest_stable/command_line_tools/
# openrave-robot.py /your/path/to/your.robot.xml --info=joints
# On that page you can find more examples on how to use openrave-robot.py.
from openravepy import *
import sys
if not __openravepy_build_doc__:
from openravepy import *
from numpy import *
import numpy
import time
from rodrigues import *
from TransformMatrix import *
from str2num import *
from TSR import *
from math import *
from copy import *
import os # for file operations
from RaveCBiRRT import *
from base_wheel_turning import *
class HuboPlusWheelTurning( BaseWheelTurning ):
def __init__(self,
HuboModelPath = '../../openHubo/huboplus/rlhuboplus.robot.xml',
WheelModelPath = '../../../drc_common/models/driving_wheel.robot.xml' ):
BaseWheelTurning.__init__( self, HuboModelPath, WheelModelPath )
# Set those variables to show or hide the interface
# Do it using the member functions
self.StopAtKeyStrokes = False
self.ShowUserInterface = False
self.ViewerStarted = False
# Right Hand Joints
# Open - Closed Values
self.rhanddofs = range(27,42)
self.rhandclosevals = [0.439, 0.683, 0.497, 0.439, 0.683, 0.497, 0.439, 0.683, 0.497, 0.439, 0.683, 0.497, 0, 0, 1.2]
self.rhandopenvals = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.08]
# Left Hand Joints
self.lhanddofs = range(42,57)
self.lhandclosevals = [0.439, 0.683, 0.497, 0.439, 0.683, 0.497, 0.439, 0.683, 0.497, 0.439, 0.683, 0.497, 0, 0, 1.2]
self.lhandopenvals = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.08]
def SetRobotConfiguration(self,jointValues):
print "SetRobotConfiguration"
values = []
values.append( jointValues['HPY'] ) # 0
values.append( jointValues['RHY'] ) # 1
values.append( jointValues['LHY'] ) # 2
values.append( jointValues['RHR'] ) # 3
values.append( jointValues['HPY'] ) # 4
values.append( jointValues['LHR'] ) # 5
values.append( jointValues['LHP'] ) # 6
values.append( jointValues['RKP'] ) # 7
values.append( jointValues['LKP'] ) # 8
values.append( jointValues['RAP'] ) # 9
values.append( jointValues['LAP'] ) # 10
values.append( jointValues['RAR'] ) # 11
values.append( jointValues['LAR'] ) # 12
values.append( jointValues['RSP'] ) # 13
values.append( jointValues['LSP'] ) # 14
values.append( jointValues['RSR'] ) # 15
values.append( jointValues['LSR'] ) # 16
values.append( jointValues['RSY'] ) # 17
values.append( jointValues['LSY'] ) # 18
values.append( jointValues['REP'] ) # 19
values.append( jointValues['LEP'] ) # 20
values.append( jointValues['RWY'] ) # 21
values.append( jointValues['LWY'] ) # 22
values.append( jointValues['RWP'] ) # 23
values.append( jointValues['LWP'] ) # 24
values.append( jointValues['HNR'] ) # 25
values.append( jointValues['HNP'] ) # 26
for i in range(27,57):
values.append(0)
# values.append( jointValues['rightIndexKnuckle2'] ) # 27
# values.append( jointValues['rightIndexKnuckle3'] ) # 28
# values.append( jointValues['rightIndexKnuckle1'] ) # 29
# values.append( jointValues['rightMiddleKnuckle2'] ) # 30
# values.append( jointValues['rightMiddleKnuckle3'] ) # 31
# values.append( jointValues['rightMiddleKnuckle1'] ) # 32
# values.append( jointValues['rightRingKnuckle2'] ) # 33
# values.append( jointValues['rightRingKnuckle3'] ) # 34
# values.append( jointValues['rightRingKnuckle1'] ) # 35
# values.append( jointValues['rightPinkyKnuckle2'] ) # 36
# values.append( jointValues['rightPinkyKnuckle3'] ) # 37
# values.append( jointValues['rightPinkyKnuckle1'] ) # 38
# values.append( jointValues['rightThumbKnuckle2'] ) # 39
# values.append( jointValues['rightThumbKnuckle3'] ) # 40
# values.append( jointValues['rightThumbKnuckle1'] ) # 41
# values.append( jointValues['leftIndexKnuckle2'] ) # 42
# values.append( jointValues['leftIndexKnuckle3'] ) # 43
# values.append( jointValues['leftIndexKnuckle1'] ) # 44
# values.append( jointValues['leftMiddleKnuckle2'] ) # 45
# values.append( jointValues['leftMiddleKnuckle3'] ) # 46
# values.append( jointValues['leftMiddleKnuckle1'] ) # 47
# values.append( jointValues['leftRingKnuckle2'] ) # 48
# values.append( jointValues['leftRingKnuckle3'] ) # 49
# values.append( jointValues['leftRingKnuckle1'] ) # 50
# values.append( jointValues['leftPinkyKnuckle2'] ) # 51
# values.append( jointValues['leftPinkyKnuckle3'] ) # 52
# values.append( jointValues['leftPinkyKnuckle1'] ) # 53
# values.append( jointValues['leftThumbKnuckle2'] ) # 54
# values.append( jointValues['leftThumbKnuckle3'] ) # 55
# values.append( jointValues['leftThumbKnuckle1'] ) # 56
self.robotid.SetDOFValues( values )
def Run(self):
self.RemoveFiles()
# This is a list of handles of the objects that are
# drawn on the screen in OpenRAVE Qt-Viewer.
# Keep appending to the end, and pop() if you want to delete.
handles = []
normalsmoothingitrs = 150;
fastsmoothingitrs = 20;
self.StartViewerAndSetWheelPos( handles )
# Wheel Joint Index
crankjointind = 0
# Set the wheel joints back to 0 for replanning
self.crankid.SetDOFValues([0],[crankjointind])
self.crankid.GetController().Reset(0)
manips = self.robotid.GetManipulators()
crankmanip = self.crankid.GetManipulators()
try:
cbirrtHubo = RaveCBiRRT(self.env,'rlhuboplus')
cbirrtWheel = RaveCBiRRT(self.env,'crank')
except openrave_exception, e:
print e
return []
# Keep Active Joint Indices
# Note that 0 is the driving wheel
#activedofs = [0]
activedofs = []
for m in manips:
# print m.GetArmIndices()
activedofs.extend(m.GetArmIndices())
# Sort Active Joint Indices
activedofs.sort()
#print activedofs
# Set Elbows and Thumbs Joint Values
self.robotid.SetDOFValues([-0.95,-0.95,1,1],[19,20,41,56])
self.robotid.SetActiveDOFs(activedofs)
# Current configuration of the robot is its initial configuration
initconfig = self.robotid.GetActiveDOFValues()
print "robot init config : "
print initconfig
# List of Robot Links
links = self.robotid.GetLinks()
# List of Wheel (Crank Links)
cranklinks = self.crankid.GetLinks()
# End Effector Transforms
Tee = []
for i in range(len(manips)):
# Returns End Effector Transform in World Coordinates
Tlink = manips[i].GetEndEffectorTransform()
Tee.append(Tlink)
# Get Transformation Matrix for the Wheel
# Note that crank's links are not rotated
# If you want use the wheel's end effector's transformation
# matrix (which is 23 degrees tilted) then see
# CTee matrix below.
#
# crank has two links:
# 0) pole - the blue cylinder in the model, and,
# 1) crank - the driving wheel itself.
jointtm = cranklinks[0].GetTransform()
# handles.append(misc.DrawAxes(env,matrix(jointtm),1))
# We can also get the transformation matrix
# with the following command as a string
jointtm_str = cbirrtHubo.solve('GetJointTransform name crank jointind '+str(crankjointind))
# And then we can convert the string to a 1x12 array
jointtm_str = jointtm_str.replace(" ",",")
jointtm_num = eval('['+jointtm_str+']')
# In this script we will use jointtm.
# jointtm_str and jointtm_num are given as example.
# Crank Transform End Effector in World Coordinates
# This is the transformation matrix of the end effector
# named "dummy" in the xml file.
# Note that dummy is tilted 23 degress around its X-Axis
CTee = crankmanip[0].GetEndEffectorTransform()
tilt_angle_deg = acos(dot(linalg.inv(CTee),jointtm)[1,1])*180/pi
tilt_angle_rad = acos(dot(linalg.inv(CTee),jointtm)[1,1])
# Center of Gravity Target
cogtarg = [-0.05, 0.085, 0]
#if self.ShowUserInterface :
#cogtm = MakeTransform(rodrigues([0,0,0]),transpose(matrix(cogtarg)))
#handles.append(misc.DrawAxes(self.env,cogtm,1))
# polyscale: changes the scale of the support polygon
# polytrans: shifts the support polygon around
footlinknames = ' Body_RAR Body_LAR polyscale 0.5 0.5 0 polytrans -0.015 0 0 '
#footlinknames = ' Body_RAR Body_LAR polyscale 0.7 0.5 0 polytrans -0.015 0 0 '
#footlinknames = ' Body_RAR Body_LAR polyscale 1.0 1.0 0 polytrans 0 0 0 '
# What is this?
handrot = rodrigues([0,-pi/2,0])
# Translation Offset from the wheel center for the hands
transoffset = [0, 0.15, 0];
# Figure out where to put the left hand on the wheel
temp = dot(CTee, MakeTransform(rodrigues([-pi/2,0,0]),transpose(matrix([0,0,0]))))
temp = dot(temp, MakeTransform(rodrigues([0,0,-pi/2]),transpose(matrix([0,0,0]))))
# Left Hand Pose in World Coordinates
T0_LH1 = dot(temp, MakeTransform(rodrigues([0,0,0]),transpose(matrix([0,0.15,0]))))
# Uncomment if you want to see where T0_LH1 is
# handles.append(misc.DrawAxes(env,matrix(T0_LH1),1))
# Figure out where to put the right hand on the wheel
temp = dot(CTee, MakeTransform(rodrigues([-pi/2,0,0]),transpose(matrix([0,0,0]))))
temp = dot(temp, MakeTransform(rodrigues([0,0,-pi/2]),transpose(matrix([0,0,0]))))
# Right Hand Pose in World Coordinates
T0_RH1 = dot(temp, MakeTransform(rodrigues([0,0,0]),transpose(matrix([0,-0.15,0]))))
# Uncomment if you want to see where T0_RH1 is
# handles.append(misc.DrawAxes(env,matrix(T0_RH1),1))
# Define Task Space Region strings
# Left Hand
TSRString1 = SerializeTSR(0,'NULL',T0_LH1,eye(4),matrix([0,0,0,0,0,0,0,0,0,0,0,0]))
# Right Hand
TSRString2 = SerializeTSR(1,'NULL',T0_RH1,eye(4),matrix([0,0,0,0,0,0,0,0,0,0,0,0]))
# Left Foot
TSRString3 = SerializeTSR(2,'NULL',Tee[2],eye(4),matrix([0,0,0,0,0,0,0,0,0,0,0,0]))
# Head
# Grasp transform in Head coordinates
Tw0_eH = eye(4)
# How much freedom do we want to give to the Head
# [x,x,y,y,z,z,R,R,P,P,Y,Y]
Bw0H = matrix([0,0,-0.1,0.1,-0.1,0.01,0,0,0,0,0,0])
TSRString4 = SerializeTSR(4,'NULL',Tee[4],Tw0_eH,Bw0H)
# We defined Task Space Regions. Now let's concatenate them.
TSRChainStringGrasping = SerializeTSRChain(0,1,0,1,TSRString1,'NULL',[])+' '+SerializeTSRChain(0,1,0,1,TSRString2,'NULL',[])+' '+SerializeTSRChain(0,1,1,1,TSRString3,'NULL',[])+' '+SerializeTSRChain(0,1,1,1,TSRString4,'NULL',[])
if( self.StopAtKeyStrokes ):
print "Press Enter to plan initconfig --> startik"
sys.stdin.readline()
# Get a trajectory from initial configuration to grasp configuration
with self.robotid:
try:
answer = cbirrtHubo.solve('RunCBiRRT psample 0.2 supportlinks 2 '+footlinknames+' smoothingitrs '+str(normalsmoothingitrs)+' '+TSRChainStringGrasping)
print "RunCBiRRT answer: ",str(answer)
except openrave_exception, e:
print "Cannot send command RunCBiRRT: "
print e
return []
try:
os.rename("cmovetraj.txt","movetraj0.txt")
except OSError, e:
# No file cmovetraj
print e
return []
# The following is the same as commented out try-except section
traj = RaveCreateTrajectory(self.env,'').deserialize(open('movetraj0.txt','r').read())
self.robotid.GetController().SetPath(traj)
self.robotid.WaitForController(0)
self.robotid.GetController().Reset(0)
# Reset(0) releases the controller, otherwise after calling
# SetPath the robot controller actively holds the trajectory's final joint values
# Instead of 4 lines above, we could use the following block
# to play the trajectory
#
# try:
# answer= cbirrtHubo.solve('traj movetraj0.txt');
# robotid.WaitForController(0)
# sys.stdin.readline()
# # debug
# print "traj call answer: ",str(answer)
# except openrave_exception, e:
# print e
# Get the current configuration of the robot
# and assign it to startik (start of the wheel
# rotation path).
startik = self.robotid.GetActiveDOFValues()
# Left Hand's index is less than the right hand.
# Hence it is evaluated first by the CBiRRT Module.
# That's why We need to define the right hand's
# transform relative to the wheel (ask Dmitry Berenson
# about this for more information).
temp1 = MakeTransform(rodrigues([-pi/2,0,0]),transpose(matrix([0,0,0])))
temp2 = MakeTransform(rodrigues([0,0,-pi/2]),transpose(matrix([0,0,0])))
# Rotate the wheel's transform to a suitable pose
# for the Left Hand
# T0_w0L stands for:
# left hand's transform on wheel in world coordinates
T0_w0L = dot(dot(CTee,temp1),temp2)
# This is what's happening:
#
# Tw0L_0 = linalg.inv(T0_w0L)
# Tw0L_LH1 = Tw0L_0*T0_LH1
#
# Left hand's transform in wheel's coordinates
Tw0L_LH1 = dot(linalg.inv(T0_w0L),T0_LH1)
# Transform of the left hand's end effector in wheel's coords.
# Required by CBiRRT
Tw0_eL = Tw0L_LH1
# How much freedom do we want to give to the left hand
Bw0L = matrix([0,0,0,0,0,0,0,pi,0,0,0,0])
# Right Hand's transforms:
T0_crankcrank = self.crankid.GetManipulators()[0].GetTransform()
T0_w0R = MakeTransform(rodrigues([tilt_angle_rad,0,0]),transpose(matrix([0,0,0])))
# End effector transform in wheel coordinates
Tw0_eR = dot(linalg.inv(T0_crankcrank),T0_RH1)
#handles.append(misc.DrawAxes(env,matrix(Tw0_eR),1))
# How much freedom? (note: in frame of crank)
Bw0R = matrix([0,0,0,0,0,0,0,0,0,0,0,0])
# Head's transforms:
T0_w0H = Tee[4]
Tw0_eH = eye(4);
Bw0H = matrix([-0.05,0.05,-0.1,0.1,-100,100,-pi,pi,-pi,pi,-pi,pi])
# Define Task Space Regions
# Left Hand
TSRString1 = SerializeTSR(0,'NULL',T0_w0L,Tw0_eL,Bw0L)
# Right Hand
TSRString2 = SerializeTSR(1,'crank crank',T0_w0R,Tw0_eR,Bw0R)
# Left Foot
TSRString3 = SerializeTSR(2,'NULL',Tee[2],eye(4),matrix([0,0,0,0,0,0,0,0,0,0,0,0]))
# Head
TSRString4 = SerializeTSR(4,'NULL',T0_w0H,Tw0_eH,Bw0H)
TSRChainStringFootOnly = SerializeTSRChain(0,0,1,1,TSRString3,'NULL',[])
TSRChainStringFootandHead = TSRChainStringFootOnly+' '+SerializeTSRChain(0,0,1,1,TSRString4,'NULL',[])
TSRChainStringTurning = SerializeTSRChain(0,0,1,1,TSRString1,'crank',matrix([crankjointind]))+' '+SerializeTSRChain(0,0,1,1,TSRString2,'NULL',[])+' '+TSRChainStringFootandHead
# Calculate hand transforms after rotating the wheel (they will help us find the goalik):
# How much do we want to rotate the wheel?
crank_rot = pi/6.5
# Which joint do we want the CBiRRT to mimic the TSR for?
TSRChainMimicDOF = 1
# Create the transform for the wheel that we would like to reach to
Tcrank_rot = MakeTransform(rodrigues([crank_rot,0,0]),transpose(matrix([0,0,0])))
# What is this?
temp = MakeTransform(rodrigues([0,0,crank_rot]),transpose(matrix([0,0,0])))
# Rotate the left hand's transform on the wheel in world transform "crank_rot" radians around it's Z-Axis
T0_cranknew = dot(T0_w0L,Tcrank_rot)
# Where will the left hand go after turning the wheel?
# This is what's happening:
#
# Tcranknew_LH2 = dot(Tw0L_0,T0_LH1) --> Left hand in wheel's coordinate
# T0_LH2 = dot(T0_cranknew,Tcranknew_LH2) --> Left hand rotated around wheel's origin
T0_LH2 = dot(T0_cranknew,dot(linalg.inv(T0_w0L),T0_LH1))
# Uncomment to see T0_LH2
# handles.append(misc.DrawAxes(env,matrix(T0_LH2),1))
# Where will the right hand go after turning the wheel?
T0_RH2 = dot(T0_crankcrank,dot(temp,dot(linalg.inv(T0_crankcrank),T0_RH1)))
# Uncomment to see T0_RH2
# handles.append(misc.DrawAxes(env,matrix(T0_RH2),1))
arg1 = str(cogtarg).strip("[]").replace(', ',' ')
arg2 = trans_to_str(T0_LH2)
arg3 = trans_to_str(T0_RH2)
arg4 = trans_to_str(Tee[2])
# print arg1
# print arg2
# print arg3
# print arg4
if( self.StopAtKeyStrokes ):
print "Press Enter to find a goalIK"
sys.stdin.readline()
self.crankid.SetDOFValues([crank_rot],[crankjointind])
goalik = cbirrtHubo.solve('DoGeneralIK exec supportlinks 2 '+footlinknames+' movecog '+arg1+' nummanips 3 maniptm 0 '+arg2+' maniptm 1 '+arg3+' maniptm 2 '+arg4)
# print "goalIK"
# print goalik
self.robotid.SetActiveDOFValues(str2num(goalik))
self.crankid.SetDOFValues([crank_rot],[crankjointind])
if( self.StopAtKeyStrokes ):
print "Press Enter to go to startik"
sys.stdin.readline()
# Get a trajectory from goalik to grasp configuration
goaljoints = deepcopy(goalik)
for i in range(TSRChainMimicDOF):
goaljoints += ' 0'
goaljoints = str2num(goaljoints)
self.robotid.SetActiveDOFValues(startik)
time.sleep(0.5)
self.robotid.SetDOFValues(self.rhandclosevals,self.rhanddofs)
self.robotid.SetDOFValues(self.lhandclosevals,self.lhanddofs)
# Close hands to start "turning" the wheel
self.crankid.SetDOFValues([0],[crankjointind])
time.sleep(0.5)
if( self.StopAtKeyStrokes ):
print "Press Enter to plan startik --> goalik (DMITRY!!!)"
sys.stdin.readline()
print self.robotid.GetActiveDOFValues()
print TSRChainStringTurning
try:
answer = cbirrtHubo.solve('RunCBiRRT supportlinks 2 '+footlinknames+' smoothingitrs '+str(fastsmoothingitrs)+' jointgoals '+str(len(goaljoints))+' '+Serialize1DMatrix(matrix(goaljoints))+' '+TSRChainStringTurning)
print "RunCBiRRT answer: ",str(answer)
except openrave_exception, e:
print "Cannot send command RunCBiRRT: "
print e
return []
try:
os.rename("cmovetraj.txt","movetraj1.txt")
except OSError, e:
# No file cmovetraj
print e
return []
# The following is the same as commented out try-except section
# traj = RaveCreateTrajectory(env,'').deserialize(open('movetraj1.txt','r').read())
# robotid.GetController().SetPath(traj)
# crankid.GetController().SetPath(traj)
# robotid.WaitForController(0)
# crankid.WaitForController(0)
# robotid.GetController().Reset(0)
# crankid.GetController().Reset(0)
try:
answer= cbirrtHubo.solve('traj movetraj1.txt');
answer= cbirrtWheel.solve('traj movetraj1.txt');
self.robotid.WaitForController(0)
# debug
print "traj call answer: ",str(answer)
except openrave_exception, e:
print e
return []
self.robotid.GetController().Reset(0)
self.robotid.SetDOFValues(self.rhandopenvals,self.rhanddofs)
self.robotid.SetDOFValues(self.lhandopenvals,self.lhanddofs)
self.robotid.SetActiveDOFValues(str2num(goalik))
time.sleep(2)
if( self.StopAtKeyStrokes ):
print "Press Enter to plan goalik --> startik "
sys.stdin.readline()
goaljoints = startik
print self.robotid.GetActiveDOFValues()
print TSRChainStringFootandHead
try:
answer = cbirrtHubo.solve('RunCBiRRT supportlinks 2 '+footlinknames+' smoothingitrs '+str(normalsmoothingitrs)+' jointgoals '+str(len(goaljoints))+' '+Serialize1DMatrix(matrix(goaljoints))+' '+TSRChainStringFootandHead)
print "RunCBiRRT answer: ",str(answer)
except openrave_exception, e:
print "Cannot send command RunCBiRRT: "
print e
return []
try:
os.rename("cmovetraj.txt","movetraj2.txt")
except OSError, e:
# No file cmovetraj
print e
return []
try:
answer= cbirrtHubo.solve('traj movetraj2.txt');
self.robotid.WaitForController(0)
# debug
print "traj call answer: ",str(answer)
except openrave_exception, e:
print e
return []
self.robotid.GetController().Reset(0)
#self.robotid.SetDOFValues(rhandclosevals,rhanddofs)
#self.robotid.SetDOFValues(lhandclosevals,lhanddofs)
self.robotid.SetActiveDOFValues(startik)
time.sleep(1)
if( self.StopAtKeyStrokes ):
print "Press Enter to plan startik --> initconfig "
sys.stdin.readline()
goaljoints = initconfig
print goaljoints
try:
answer = cbirrtHubo.solve('RunCBiRRT supportlinks 2 '+footlinknames+' smoothingitrs '+str(normalsmoothingitrs)+' jointgoals '+str(len(goaljoints))+' '+Serialize1DMatrix(matrix(goaljoints))+' '+TSRChainStringFootandHead)
print "RunCBiRRT answer: ",str(answer)
except openrave_exception, e:
print "Cannot send command RunCBiRRT: "
print e
return []
try:
os.rename("cmovetraj.txt","movetraj3.txt")
except OSError, e:
# No file cmovetraj
print e
return []
try:
answer= cbirrtHubo.solve('traj movetraj3.txt');
self.robotid.WaitForController(0)
# debug
print "traj call answer: ",str(answer)
except openrave_exception, e:
print e
return []
self.robotid.GetController().Reset(0)
return self.Playback()
if __name__ == "__main__":
planner = HuboPlusWheelTurning()
planner.SetViewer(True)
planner.SetStopKeyStrokes(False)
planner.Run()
planner.KillOpenrave()
|
normal
|
{
"blob_id": "6ad939ab541562efdaacb8b56865e76d1745176a",
"index": 2494,
"step-1": "#!/usr/bin/env python\n# Ben Suay, RAIL\n# May 2013\n# Worcester Polytechnic Institute\n#\n\n# http://openrave.org/docs/latest_stable/command_line_tools/\n# openrave-robot.py /your/path/to/your.robot.xml --info=joints\n# On that page you can find more examples on how to use openrave-robot.py.\n\nfrom openravepy import *\nimport sys\nif not __openravepy_build_doc__:\n from openravepy import *\n from numpy import *\n import numpy\nimport time\nfrom rodrigues import *\nfrom TransformMatrix import *\nfrom str2num import *\nfrom TSR import *\nfrom math import *\nfrom copy import *\nimport os # for file operations\nfrom RaveCBiRRT import *\nfrom base_wheel_turning import *\n\nclass HuboPlusWheelTurning( BaseWheelTurning ):\n\n def __init__(self,\n HuboModelPath = '../../openHubo/huboplus/rlhuboplus.robot.xml',\n WheelModelPath = '../../../drc_common/models/driving_wheel.robot.xml' ):\n\n BaseWheelTurning.__init__( self, HuboModelPath, WheelModelPath )\n\n # Set those variables to show or hide the interface\n # Do it using the member functions\n self.StopAtKeyStrokes = False\n self.ShowUserInterface = False\n self.ViewerStarted = False\n\n\t# Right Hand Joints \n # Open - Closed Values\n self.rhanddofs = range(27,42)\n self.rhandclosevals = [0.439, 0.683, 0.497, 0.439, 0.683, 0.497, 0.439, 0.683, 0.497, 0.439, 0.683, 0.497, 0, 0, 1.2]\n self.rhandopenvals = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.08]\n\n # Left Hand Joints\n self.lhanddofs = range(42,57)\n self.lhandclosevals = [0.439, 0.683, 0.497, 0.439, 0.683, 0.497, 0.439, 0.683, 0.497, 0.439, 0.683, 0.497, 0, 0, 1.2]\n self.lhandopenvals = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.08]\n\n def SetRobotConfiguration(self,jointValues):\n print \"SetRobotConfiguration\"\n values = []\n values.append( jointValues['HPY'] ) # 0\n values.append( jointValues['RHY'] ) # 1\n values.append( jointValues['LHY'] ) # 2\n values.append( jointValues['RHR'] ) # 3\n values.append( jointValues['HPY'] ) # 4\n values.append( jointValues['LHR'] ) # 5\n values.append( jointValues['LHP'] ) # 6\n values.append( jointValues['RKP'] ) # 7\n values.append( jointValues['LKP'] ) # 8\n values.append( jointValues['RAP'] ) # 9\n values.append( jointValues['LAP'] ) # 10\n values.append( jointValues['RAR'] ) # 11\n values.append( jointValues['LAR'] ) # 12\n values.append( jointValues['RSP'] ) # 13 \n values.append( jointValues['LSP'] ) # 14 \n values.append( jointValues['RSR'] ) # 15\n values.append( jointValues['LSR'] ) # 16\n values.append( jointValues['RSY'] ) # 17 \n values.append( jointValues['LSY'] ) # 18\n values.append( jointValues['REP'] ) # 19\n values.append( jointValues['LEP'] ) # 20\n values.append( jointValues['RWY'] ) # 21\n values.append( jointValues['LWY'] ) # 22\n values.append( jointValues['RWP'] ) # 23\n values.append( jointValues['LWP'] ) # 24\n values.append( jointValues['HNR'] ) # 25\n values.append( jointValues['HNP'] ) # 26\n\n for i in range(27,57):\n values.append(0)\n\n# values.append( jointValues['rightIndexKnuckle2'] ) # 27\n# values.append( jointValues['rightIndexKnuckle3'] ) # 28\n# values.append( jointValues['rightIndexKnuckle1'] ) # 29\n# values.append( jointValues['rightMiddleKnuckle2'] ) # 30\n# values.append( jointValues['rightMiddleKnuckle3'] ) # 31\n# values.append( jointValues['rightMiddleKnuckle1'] ) # 32\n# values.append( jointValues['rightRingKnuckle2'] ) # 33\n# values.append( jointValues['rightRingKnuckle3'] ) # 34\n# values.append( jointValues['rightRingKnuckle1'] ) # 35\n# values.append( jointValues['rightPinkyKnuckle2'] ) # 36\n# values.append( jointValues['rightPinkyKnuckle3'] ) # 37\n# values.append( jointValues['rightPinkyKnuckle1'] ) # 38\n# values.append( jointValues['rightThumbKnuckle2'] ) # 39\n# values.append( jointValues['rightThumbKnuckle3'] ) # 40\n# values.append( jointValues['rightThumbKnuckle1'] ) # 41\n# values.append( jointValues['leftIndexKnuckle2'] ) # 42\n# values.append( jointValues['leftIndexKnuckle3'] ) # 43\n# values.append( jointValues['leftIndexKnuckle1'] ) # 44\n# values.append( jointValues['leftMiddleKnuckle2'] ) # 45\n# values.append( jointValues['leftMiddleKnuckle3'] ) # 46\n# values.append( jointValues['leftMiddleKnuckle1'] ) # 47\n# values.append( jointValues['leftRingKnuckle2'] ) # 48\n# values.append( jointValues['leftRingKnuckle3'] ) # 49\n# values.append( jointValues['leftRingKnuckle1'] ) # 50\n# values.append( jointValues['leftPinkyKnuckle2'] ) # 51\n# values.append( jointValues['leftPinkyKnuckle3'] ) # 52\n# values.append( jointValues['leftPinkyKnuckle1'] ) # 53\n# values.append( jointValues['leftThumbKnuckle2'] ) # 54\n# values.append( jointValues['leftThumbKnuckle3'] ) # 55\n# values.append( jointValues['leftThumbKnuckle1'] ) # 56\n self.robotid.SetDOFValues( values )\n \n def Run(self):\n \n self.RemoveFiles()\n\n # This is a list of handles of the objects that are\n # drawn on the screen in OpenRAVE Qt-Viewer.\n # Keep appending to the end, and pop() if you want to delete.\n handles = [] \n\n normalsmoothingitrs = 150;\n fastsmoothingitrs = 20;\n\n self.StartViewerAndSetWheelPos( handles )\n\n # Wheel Joint Index \n crankjointind = 0\n # Set the wheel joints back to 0 for replanning\n self.crankid.SetDOFValues([0],[crankjointind])\n self.crankid.GetController().Reset(0)\n\n manips = self.robotid.GetManipulators()\n crankmanip = self.crankid.GetManipulators()\n \n try:\n cbirrtHubo = RaveCBiRRT(self.env,'rlhuboplus')\n cbirrtWheel = RaveCBiRRT(self.env,'crank')\n except openrave_exception, e:\n print e\n return []\n\n # Keep Active Joint Indices\n # Note that 0 is the driving wheel\n #activedofs = [0]\n activedofs = []\n for m in manips:\n # print m.GetArmIndices()\n activedofs.extend(m.GetArmIndices())\n\n # Sort Active Joint Indices\n activedofs.sort()\n #print activedofs\n\n # Set Elbows and Thumbs Joint Values\n self.robotid.SetDOFValues([-0.95,-0.95,1,1],[19,20,41,56]) \n self.robotid.SetActiveDOFs(activedofs)\n\n # Current configuration of the robot is its initial configuration\n initconfig = self.robotid.GetActiveDOFValues()\n\n print \"robot init config : \"\n print initconfig\n\n # List of Robot Links\n links = self.robotid.GetLinks()\n \n # List of Wheel (Crank Links)\n cranklinks = self.crankid.GetLinks()\n \n # End Effector Transforms\n Tee = []\n for i in range(len(manips)):\n # Returns End Effector Transform in World Coordinates\n Tlink = manips[i].GetEndEffectorTransform()\n Tee.append(Tlink)\n\n \n # Get Transformation Matrix for the Wheel\n # Note that crank's links are not rotated\n # If you want use the wheel's end effector's transformation\n # matrix (which is 23 degrees tilted) then see\n # CTee matrix below.\n #\n # crank has two links: \n # 0) pole - the blue cylinder in the model, and, \n # 1) crank - the driving wheel itself.\n jointtm = cranklinks[0].GetTransform()\n # handles.append(misc.DrawAxes(env,matrix(jointtm),1))\n \n\n # We can also get the transformation matrix\n # with the following command as a string\n jointtm_str = cbirrtHubo.solve('GetJointTransform name crank jointind '+str(crankjointind))\n # And then we can convert the string to a 1x12 array\n jointtm_str = jointtm_str.replace(\" \",\",\")\n jointtm_num = eval('['+jointtm_str+']')\n\n # In this script we will use jointtm.\n # jointtm_str and jointtm_num are given as example.\n \n # Crank Transform End Effector in World Coordinates\n # This is the transformation matrix of the end effector \n # named \"dummy\" in the xml file.\n # Note that dummy is tilted 23 degress around its X-Axis\n CTee = crankmanip[0].GetEndEffectorTransform()\n\n tilt_angle_deg = acos(dot(linalg.inv(CTee),jointtm)[1,1])*180/pi\n tilt_angle_rad = acos(dot(linalg.inv(CTee),jointtm)[1,1]) \n\n # Center of Gravity Target\n cogtarg = [-0.05, 0.085, 0]\n #if self.ShowUserInterface :\n #cogtm = MakeTransform(rodrigues([0,0,0]),transpose(matrix(cogtarg)))\n #handles.append(misc.DrawAxes(self.env,cogtm,1))\n\n # polyscale: changes the scale of the support polygon\n # polytrans: shifts the support polygon around\n footlinknames = ' Body_RAR Body_LAR polyscale 0.5 0.5 0 polytrans -0.015 0 0 '\n #footlinknames = ' Body_RAR Body_LAR polyscale 0.7 0.5 0 polytrans -0.015 0 0 '\n #footlinknames = ' Body_RAR Body_LAR polyscale 1.0 1.0 0 polytrans 0 0 0 '\n\n # What is this?\n handrot = rodrigues([0,-pi/2,0])\n \n # Translation Offset from the wheel center for the hands\n transoffset = [0, 0.15, 0];\n \n # Figure out where to put the left hand on the wheel\n temp = dot(CTee, MakeTransform(rodrigues([-pi/2,0,0]),transpose(matrix([0,0,0]))))\n temp = dot(temp, MakeTransform(rodrigues([0,0,-pi/2]),transpose(matrix([0,0,0]))))\n \n # Left Hand Pose in World Coordinates\n T0_LH1 = dot(temp, MakeTransform(rodrigues([0,0,0]),transpose(matrix([0,0.15,0]))))\n\n # Uncomment if you want to see where T0_LH1 is \n # handles.append(misc.DrawAxes(env,matrix(T0_LH1),1))\n\n # Figure out where to put the right hand on the wheel\n temp = dot(CTee, MakeTransform(rodrigues([-pi/2,0,0]),transpose(matrix([0,0,0]))))\n temp = dot(temp, MakeTransform(rodrigues([0,0,-pi/2]),transpose(matrix([0,0,0]))))\n # Right Hand Pose in World Coordinates\n T0_RH1 = dot(temp, MakeTransform(rodrigues([0,0,0]),transpose(matrix([0,-0.15,0]))))\n \n # Uncomment if you want to see where T0_RH1 is \n # handles.append(misc.DrawAxes(env,matrix(T0_RH1),1))\n \n # Define Task Space Region strings\n # Left Hand\n TSRString1 = SerializeTSR(0,'NULL',T0_LH1,eye(4),matrix([0,0,0,0,0,0,0,0,0,0,0,0]))\n\n # Right Hand\n TSRString2 = SerializeTSR(1,'NULL',T0_RH1,eye(4),matrix([0,0,0,0,0,0,0,0,0,0,0,0]))\n \n # Left Foot\n TSRString3 = SerializeTSR(2,'NULL',Tee[2],eye(4),matrix([0,0,0,0,0,0,0,0,0,0,0,0]))\n\n # Head\n # Grasp transform in Head coordinates\n Tw0_eH = eye(4) \n # How much freedom do we want to give to the Head\n # [x,x,y,y,z,z,R,R,P,P,Y,Y]\n Bw0H = matrix([0,0,-0.1,0.1,-0.1,0.01,0,0,0,0,0,0])\n TSRString4 = SerializeTSR(4,'NULL',Tee[4],Tw0_eH,Bw0H)\n\n # We defined Task Space Regions. Now let's concatenate them.\n TSRChainStringGrasping = SerializeTSRChain(0,1,0,1,TSRString1,'NULL',[])+' '+SerializeTSRChain(0,1,0,1,TSRString2,'NULL',[])+' '+SerializeTSRChain(0,1,1,1,TSRString3,'NULL',[])+' '+SerializeTSRChain(0,1,1,1,TSRString4,'NULL',[])\n \n\n if( self.StopAtKeyStrokes ):\n print \"Press Enter to plan initconfig --> startik\"\n sys.stdin.readline()\n \n # Get a trajectory from initial configuration to grasp configuration\n with self.robotid:\n try:\n answer = cbirrtHubo.solve('RunCBiRRT psample 0.2 supportlinks 2 '+footlinknames+' smoothingitrs '+str(normalsmoothingitrs)+' '+TSRChainStringGrasping)\n print \"RunCBiRRT answer: \",str(answer)\n except openrave_exception, e:\n print \"Cannot send command RunCBiRRT: \"\n print e\n return []\n\n try:\n os.rename(\"cmovetraj.txt\",\"movetraj0.txt\")\n except OSError, e:\n # No file cmovetraj\n print e\n return []\n\n # The following is the same as commented out try-except section\n traj = RaveCreateTrajectory(self.env,'').deserialize(open('movetraj0.txt','r').read()) \n self.robotid.GetController().SetPath(traj) \n self.robotid.WaitForController(0)\n self.robotid.GetController().Reset(0) \n # Reset(0) releases the controller, otherwise after calling \n # SetPath the robot controller actively holds the trajectory's final joint values\n \n # Instead of 4 lines above, we could use the following block\n # to play the trajectory\n #\n # try:\n # answer= cbirrtHubo.solve('traj movetraj0.txt');\n # robotid.WaitForController(0)\n # sys.stdin.readline()\n # # debug\n # print \"traj call answer: \",str(answer)\n # except openrave_exception, e:\n # print e\n \n \n # Get the current configuration of the robot\n # and assign it to startik (start of the wheel\n # rotation path).\n startik = self.robotid.GetActiveDOFValues()\n \n # Left Hand's index is less than the right hand.\n # Hence it is evaluated first by the CBiRRT Module.\n # That's why We need to define the right hand's \n # transform relative to the wheel (ask Dmitry Berenson\n # about this for more information).\n temp1 = MakeTransform(rodrigues([-pi/2,0,0]),transpose(matrix([0,0,0])))\n temp2 = MakeTransform(rodrigues([0,0,-pi/2]),transpose(matrix([0,0,0])))\n # Rotate the wheel's transform to a suitable pose\n # for the Left Hand\n # T0_w0L stands for: \n # left hand's transform on wheel in world coordinates\n T0_w0L = dot(dot(CTee,temp1),temp2)\n # This is what's happening: \n #\n # Tw0L_0 = linalg.inv(T0_w0L)\n # Tw0L_LH1 = Tw0L_0*T0_LH1\n #\n # Left hand's transform in wheel's coordinates\n Tw0L_LH1 = dot(linalg.inv(T0_w0L),T0_LH1)\n # Transform of the left hand's end effector in wheel's coords.\n # Required by CBiRRT\n Tw0_eL = Tw0L_LH1\n # How much freedom do we want to give to the left hand\n Bw0L = matrix([0,0,0,0,0,0,0,pi,0,0,0,0])\n\n # Right Hand's transforms:\n T0_crankcrank = self.crankid.GetManipulators()[0].GetTransform()\n T0_w0R = MakeTransform(rodrigues([tilt_angle_rad,0,0]),transpose(matrix([0,0,0])))\n # End effector transform in wheel coordinates\n Tw0_eR = dot(linalg.inv(T0_crankcrank),T0_RH1)\n\n #handles.append(misc.DrawAxes(env,matrix(Tw0_eR),1))\n\n # How much freedom? (note: in frame of crank)\n Bw0R = matrix([0,0,0,0,0,0,0,0,0,0,0,0])\n\n # Head's transforms:\n T0_w0H = Tee[4]\n Tw0_eH = eye(4);\n Bw0H = matrix([-0.05,0.05,-0.1,0.1,-100,100,-pi,pi,-pi,pi,-pi,pi])\n \n \n # Define Task Space Regions\n # Left Hand\n TSRString1 = SerializeTSR(0,'NULL',T0_w0L,Tw0_eL,Bw0L)\n # Right Hand\n TSRString2 = SerializeTSR(1,'crank crank',T0_w0R,Tw0_eR,Bw0R)\n # Left Foot\n TSRString3 = SerializeTSR(2,'NULL',Tee[2],eye(4),matrix([0,0,0,0,0,0,0,0,0,0,0,0]))\n # Head\n TSRString4 = SerializeTSR(4,'NULL',T0_w0H,Tw0_eH,Bw0H)\n \n TSRChainStringFootOnly = SerializeTSRChain(0,0,1,1,TSRString3,'NULL',[])\n\n TSRChainStringFootandHead = TSRChainStringFootOnly+' '+SerializeTSRChain(0,0,1,1,TSRString4,'NULL',[])\n\n TSRChainStringTurning = SerializeTSRChain(0,0,1,1,TSRString1,'crank',matrix([crankjointind]))+' '+SerializeTSRChain(0,0,1,1,TSRString2,'NULL',[])+' '+TSRChainStringFootandHead\n \n # Calculate hand transforms after rotating the wheel (they will help us find the goalik):\n # How much do we want to rotate the wheel?\n crank_rot = pi/6.5\n \n # Which joint do we want the CBiRRT to mimic the TSR for?\n TSRChainMimicDOF = 1\n \n # Create the transform for the wheel that we would like to reach to\n Tcrank_rot = MakeTransform(rodrigues([crank_rot,0,0]),transpose(matrix([0,0,0])))\n \n # What is this?\n temp = MakeTransform(rodrigues([0,0,crank_rot]),transpose(matrix([0,0,0])))\n \n # Rotate the left hand's transform on the wheel in world transform \"crank_rot\" radians around it's Z-Axis\n T0_cranknew = dot(T0_w0L,Tcrank_rot)\n \n # Where will the left hand go after turning the wheel?\n # This is what's happening:\n #\n # Tcranknew_LH2 = dot(Tw0L_0,T0_LH1) --> Left hand in wheel's coordinate\n # T0_LH2 = dot(T0_cranknew,Tcranknew_LH2) --> Left hand rotated around wheel's origin\n T0_LH2 = dot(T0_cranknew,dot(linalg.inv(T0_w0L),T0_LH1))\n\n # Uncomment to see T0_LH2\n # handles.append(misc.DrawAxes(env,matrix(T0_LH2),1))\n \n # Where will the right hand go after turning the wheel?\n T0_RH2 = dot(T0_crankcrank,dot(temp,dot(linalg.inv(T0_crankcrank),T0_RH1)))\n\n # Uncomment to see T0_RH2\n # handles.append(misc.DrawAxes(env,matrix(T0_RH2),1))\n\n arg1 = str(cogtarg).strip(\"[]\").replace(', ',' ')\n arg2 = trans_to_str(T0_LH2)\n arg3 = trans_to_str(T0_RH2)\n arg4 = trans_to_str(Tee[2])\n\n # print arg1\n # print arg2\n # print arg3\n # print arg4\n\n if( self.StopAtKeyStrokes ):\n print \"Press Enter to find a goalIK\"\n sys.stdin.readline()\n\n self.crankid.SetDOFValues([crank_rot],[crankjointind])\n\n goalik = cbirrtHubo.solve('DoGeneralIK exec supportlinks 2 '+footlinknames+' movecog '+arg1+' nummanips 3 maniptm 0 '+arg2+' maniptm 1 '+arg3+' maniptm 2 '+arg4)\n \n # print \"goalIK\"\n # print goalik\n\n self.robotid.SetActiveDOFValues(str2num(goalik))\n self.crankid.SetDOFValues([crank_rot],[crankjointind])\n \n if( self.StopAtKeyStrokes ):\n print \"Press Enter to go to startik\"\n sys.stdin.readline()\n\n # Get a trajectory from goalik to grasp configuration\n goaljoints = deepcopy(goalik)\n for i in range(TSRChainMimicDOF):\n goaljoints += ' 0'\n\n goaljoints = str2num(goaljoints)\n\n self.robotid.SetActiveDOFValues(startik)\n time.sleep(0.5)\n self.robotid.SetDOFValues(self.rhandclosevals,self.rhanddofs)\n self.robotid.SetDOFValues(self.lhandclosevals,self.lhanddofs)\n # Close hands to start \"turning\" the wheel\n self.crankid.SetDOFValues([0],[crankjointind])\n time.sleep(0.5)\n \n if( self.StopAtKeyStrokes ):\n print \"Press Enter to plan startik --> goalik (DMITRY!!!)\"\n sys.stdin.readline()\n\n print self.robotid.GetActiveDOFValues()\n print TSRChainStringTurning\n\n try:\n answer = cbirrtHubo.solve('RunCBiRRT supportlinks 2 '+footlinknames+' smoothingitrs '+str(fastsmoothingitrs)+' jointgoals '+str(len(goaljoints))+' '+Serialize1DMatrix(matrix(goaljoints))+' '+TSRChainStringTurning)\n print \"RunCBiRRT answer: \",str(answer)\n except openrave_exception, e:\n print \"Cannot send command RunCBiRRT: \"\n print e\n return []\n \n\n try:\n os.rename(\"cmovetraj.txt\",\"movetraj1.txt\")\n except OSError, e:\n # No file cmovetraj\n print e\n return []\n\n # The following is the same as commented out try-except section\n # traj = RaveCreateTrajectory(env,'').deserialize(open('movetraj1.txt','r').read()) \n # robotid.GetController().SetPath(traj) \n # crankid.GetController().SetPath(traj)\n # robotid.WaitForController(0)\n # crankid.WaitForController(0)\n # robotid.GetController().Reset(0)\n # crankid.GetController().Reset(0)\n \n try:\n answer= cbirrtHubo.solve('traj movetraj1.txt');\n answer= cbirrtWheel.solve('traj movetraj1.txt');\n self.robotid.WaitForController(0)\n # debug\n print \"traj call answer: \",str(answer)\n except openrave_exception, e:\n print e\n return []\n\n self.robotid.GetController().Reset(0)\n self.robotid.SetDOFValues(self.rhandopenvals,self.rhanddofs)\n self.robotid.SetDOFValues(self.lhandopenvals,self.lhanddofs)\n self.robotid.SetActiveDOFValues(str2num(goalik))\n\n time.sleep(2)\n\n if( self.StopAtKeyStrokes ):\n print \"Press Enter to plan goalik --> startik \"\n sys.stdin.readline()\n\n \n\n goaljoints = startik\n\n print self.robotid.GetActiveDOFValues()\n print TSRChainStringFootandHead\n try:\n answer = cbirrtHubo.solve('RunCBiRRT supportlinks 2 '+footlinknames+' smoothingitrs '+str(normalsmoothingitrs)+' jointgoals '+str(len(goaljoints))+' '+Serialize1DMatrix(matrix(goaljoints))+' '+TSRChainStringFootandHead)\n print \"RunCBiRRT answer: \",str(answer)\n except openrave_exception, e:\n print \"Cannot send command RunCBiRRT: \"\n print e\n return []\n\n try:\n os.rename(\"cmovetraj.txt\",\"movetraj2.txt\")\n except OSError, e:\n # No file cmovetraj\n print e\n return []\n\n try:\n answer= cbirrtHubo.solve('traj movetraj2.txt');\n self.robotid.WaitForController(0)\n # debug\n print \"traj call answer: \",str(answer)\n except openrave_exception, e:\n print e\n return []\n \n self.robotid.GetController().Reset(0)\n #self.robotid.SetDOFValues(rhandclosevals,rhanddofs)\n #self.robotid.SetDOFValues(lhandclosevals,lhanddofs)\n\n self.robotid.SetActiveDOFValues(startik)\n time.sleep(1)\n\n if( self.StopAtKeyStrokes ):\n print \"Press Enter to plan startik --> initconfig \"\n sys.stdin.readline()\n\n goaljoints = initconfig\n print goaljoints\n try:\n answer = cbirrtHubo.solve('RunCBiRRT supportlinks 2 '+footlinknames+' smoothingitrs '+str(normalsmoothingitrs)+' jointgoals '+str(len(goaljoints))+' '+Serialize1DMatrix(matrix(goaljoints))+' '+TSRChainStringFootandHead)\n print \"RunCBiRRT answer: \",str(answer)\n except openrave_exception, e:\n print \"Cannot send command RunCBiRRT: \"\n print e\n return []\n\n try:\n os.rename(\"cmovetraj.txt\",\"movetraj3.txt\")\n except OSError, e:\n # No file cmovetraj\n print e\n return []\n\n try:\n answer= cbirrtHubo.solve('traj movetraj3.txt');\n self.robotid.WaitForController(0)\n # debug\n print \"traj call answer: \",str(answer)\n except openrave_exception, e:\n print e\n return []\n\n self.robotid.GetController().Reset(0)\n \n return self.Playback()\n\n\nif __name__ == \"__main__\":\n planner = HuboPlusWheelTurning()\n planner.SetViewer(True)\n planner.SetStopKeyStrokes(False)\n planner.Run()\n planner.KillOpenrave()\n\n \n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import datetime
a = datetime.datetime.now()
while True:
print("""\
Welcome to HMS
1. Are you want enter data
2. Are you want see record
3. exit
""")
option = int(input("enter your option"))
print(option)
if option == 1:
print("""\
Select client name
1. Add Exercise
2. Add Dite
4. exit
""")
option1 =int(input("enter your option"))
if option1 == 1:
print("""\
1. Aditya
2. harsh
3. shivam
4. exit
""")
list=['Aditya', 'harsh','shivam']
option2 =int(input("enter your option"))
option2 = option2-1
name = list[option2]
dec = input("enter the exercise name")
f = open(name,"a")
decs = dec+'--'+str(a)+'\n'
f.write(decs)
f.close()
print('successfuly data enter')
elif option1 == 2:
print("""\
1. Aditya
2. harsh
3. shivam
4. exit
""")
list=['Aditya', 'harsh','shivam']
option2 =int(input("enter your option"))
option2 = option2-1
name = list[option2]
dec = input("enter the dite")
f = open(name,"a")
decs = dec+'--'+str(a)+'\n'
f.write(decs)
f.close()
print('successfuly data enter')
else:
break
elif option == 2:
print("""\
select name whose record you want see
1. Aditya
2. harsh
3. shivam
4. exit
""")
list=['Aditya', 'harsh','shivam']
option3 =int(input("enter your option"))
option3 = option3-1
name = list[option3]
f = open(name,"rt")
content =f.read()
print(content)
else:
break
|
normal
|
{
"blob_id": "5c5a0fd67a6d6e805b77ddfddfe959335daa3bad",
"index": 6383,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile True:\n print(\n \"\"\" Welcome to HMS\n 1. Are you want enter data\n 2. Are you want see record\n 3. exit\n \"\"\"\n )\n option = int(input('enter your option'))\n print(option)\n if option == 1:\n print(\n \"\"\" Select client name\n 1. Add Exercise\n 2. Add Dite\n 4. exit\n \"\"\"\n )\n option1 = int(input('enter your option'))\n if option1 == 1:\n print(\n \"\"\" 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option2 = int(input('enter your option'))\n option2 = option2 - 1\n name = list[option2]\n dec = input('enter the exercise name')\n f = open(name, 'a')\n decs = dec + '--' + str(a) + '\\n'\n f.write(decs)\n f.close()\n print('successfuly data enter')\n elif option1 == 2:\n print(\n \"\"\" 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option2 = int(input('enter your option'))\n option2 = option2 - 1\n name = list[option2]\n dec = input('enter the dite')\n f = open(name, 'a')\n decs = dec + '--' + str(a) + '\\n'\n f.write(decs)\n f.close()\n print('successfuly data enter')\n else:\n break\n elif option == 2:\n print(\n \"\"\" select name whose record you want see\n 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option3 = int(input('enter your option'))\n option3 = option3 - 1\n name = list[option3]\n f = open(name, 'rt')\n content = f.read()\n print(content)\n else:\n break\n",
"step-3": "<mask token>\na = datetime.datetime.now()\nwhile True:\n print(\n \"\"\" Welcome to HMS\n 1. Are you want enter data\n 2. Are you want see record\n 3. exit\n \"\"\"\n )\n option = int(input('enter your option'))\n print(option)\n if option == 1:\n print(\n \"\"\" Select client name\n 1. Add Exercise\n 2. Add Dite\n 4. exit\n \"\"\"\n )\n option1 = int(input('enter your option'))\n if option1 == 1:\n print(\n \"\"\" 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option2 = int(input('enter your option'))\n option2 = option2 - 1\n name = list[option2]\n dec = input('enter the exercise name')\n f = open(name, 'a')\n decs = dec + '--' + str(a) + '\\n'\n f.write(decs)\n f.close()\n print('successfuly data enter')\n elif option1 == 2:\n print(\n \"\"\" 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option2 = int(input('enter your option'))\n option2 = option2 - 1\n name = list[option2]\n dec = input('enter the dite')\n f = open(name, 'a')\n decs = dec + '--' + str(a) + '\\n'\n f.write(decs)\n f.close()\n print('successfuly data enter')\n else:\n break\n elif option == 2:\n print(\n \"\"\" select name whose record you want see\n 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option3 = int(input('enter your option'))\n option3 = option3 - 1\n name = list[option3]\n f = open(name, 'rt')\n content = f.read()\n print(content)\n else:\n break\n",
"step-4": "import datetime\na = datetime.datetime.now()\nwhile True:\n print(\n \"\"\" Welcome to HMS\n 1. Are you want enter data\n 2. Are you want see record\n 3. exit\n \"\"\"\n )\n option = int(input('enter your option'))\n print(option)\n if option == 1:\n print(\n \"\"\" Select client name\n 1. Add Exercise\n 2. Add Dite\n 4. exit\n \"\"\"\n )\n option1 = int(input('enter your option'))\n if option1 == 1:\n print(\n \"\"\" 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option2 = int(input('enter your option'))\n option2 = option2 - 1\n name = list[option2]\n dec = input('enter the exercise name')\n f = open(name, 'a')\n decs = dec + '--' + str(a) + '\\n'\n f.write(decs)\n f.close()\n print('successfuly data enter')\n elif option1 == 2:\n print(\n \"\"\" 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option2 = int(input('enter your option'))\n option2 = option2 - 1\n name = list[option2]\n dec = input('enter the dite')\n f = open(name, 'a')\n decs = dec + '--' + str(a) + '\\n'\n f.write(decs)\n f.close()\n print('successfuly data enter')\n else:\n break\n elif option == 2:\n print(\n \"\"\" select name whose record you want see\n 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\"\n )\n list = ['Aditya', 'harsh', 'shivam']\n option3 = int(input('enter your option'))\n option3 = option3 - 1\n name = list[option3]\n f = open(name, 'rt')\n content = f.read()\n print(content)\n else:\n break\n",
"step-5": "import datetime\na = datetime.datetime.now()\nwhile True:\n print(\"\"\"\\\n Welcome to HMS\n 1. Are you want enter data\n 2. Are you want see record\n 3. exit\n \"\"\")\n option = int(input(\"enter your option\"))\n print(option)\n if option == 1:\n\n print(\"\"\"\\\n Select client name\n 1. Add Exercise\n 2. Add Dite\n 4. exit\n \"\"\")\n option1 =int(input(\"enter your option\"))\n if option1 == 1:\n print(\"\"\"\\\n 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\")\n list=['Aditya', 'harsh','shivam']\n option2 =int(input(\"enter your option\"))\n option2 = option2-1\n name = list[option2]\n dec = input(\"enter the exercise name\")\n f = open(name,\"a\")\n decs = dec+'--'+str(a)+'\\n'\n f.write(decs)\n f.close()\n print('successfuly data enter')\n elif option1 == 2:\n print(\"\"\"\\\n 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\")\n list=['Aditya', 'harsh','shivam']\n option2 =int(input(\"enter your option\"))\n option2 = option2-1\n name = list[option2]\n dec = input(\"enter the dite\")\n f = open(name,\"a\")\n decs = dec+'--'+str(a)+'\\n'\n f.write(decs)\n f.close()\n print('successfuly data enter') \n else:\n break \n elif option == 2:\n print(\"\"\"\\\n select name whose record you want see\n 1. Aditya\n 2. harsh\n 3. shivam\n 4. exit\n \"\"\")\n list=['Aditya', 'harsh','shivam']\n option3 =int(input(\"enter your option\"))\n option3 = option3-1\n name = list[option3]\n f = open(name,\"rt\")\n content =f.read() \n print(content) \n else:\n break\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def prepareTransformAttributes(attributes):
dtype = attributes.get('type')
dtype = '{}'.format(dtype)
min = attributes.get('min')
max = attributes.get('max')
ftype = attributes.get('ftype')
maxV = np.power(2.0, np.iinfo(np.dtype(dtype)).bits) - 1.0
minO = np.iinfo(np.dtype(dtype)).min
maxO = np.iinfo(np.dtype(dtype)).max
f = maxV / (max - min)
return min, max, minO, maxO, f, dtype, ftype
<|reserved_special_token_0|>
def setT(attributes, value, isChar=False, variable=None):
ftype = attributes['ftype']
if ftype == 'M':
value = value.astype('datetime64[ms]').astype('f8')
if ftype == 'S1' and not isChar:
value = stringtochar(np.array(value).astype('S{}'.format(variable.
shape[1])))
if 'min' in attributes and 'max' in attributes and attributes['type'
] != attributes['ftype']:
min, max, minO, maxO, f, dtype, x = prepareTransformAttributes(
attributes)
value = np.clip(value, min, max)
value = (value - min) * f - np.abs(minO)
value = np.rint(value)
value = np.clip(value, minO, maxO)
value = value.astype(dtype)
return value
class NpEncoder(json.JSONEncoder):
"""
Encoder to change numpy type to python type.
This is used for creating JSON object.
"""
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
else:
return super(NpEncoder, self).default(obj)
<|reserved_special_token_0|>
def getVariablesG(groups):
obj = {}
for gname in groups:
for vname in groups[gname]['variables']:
if vname in obj:
obj[vname].append(gname)
else:
obj[vname] = [gname]
for vname in obj:
obj[vname] = sorted(list(set(obj[vname])))
return obj
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def prepareTransformAttributes(attributes):
dtype = attributes.get('type')
dtype = '{}'.format(dtype)
min = attributes.get('min')
max = attributes.get('max')
ftype = attributes.get('ftype')
maxV = np.power(2.0, np.iinfo(np.dtype(dtype)).bits) - 1.0
minO = np.iinfo(np.dtype(dtype)).min
maxO = np.iinfo(np.dtype(dtype)).max
f = maxV / (max - min)
return min, max, minO, maxO, f, dtype, ftype
<|reserved_special_token_0|>
def setT(attributes, value, isChar=False, variable=None):
ftype = attributes['ftype']
if ftype == 'M':
value = value.astype('datetime64[ms]').astype('f8')
if ftype == 'S1' and not isChar:
value = stringtochar(np.array(value).astype('S{}'.format(variable.
shape[1])))
if 'min' in attributes and 'max' in attributes and attributes['type'
] != attributes['ftype']:
min, max, minO, maxO, f, dtype, x = prepareTransformAttributes(
attributes)
value = np.clip(value, min, max)
value = (value - min) * f - np.abs(minO)
value = np.rint(value)
value = np.clip(value, minO, maxO)
value = value.astype(dtype)
return value
class NpEncoder(json.JSONEncoder):
"""
Encoder to change numpy type to python type.
This is used for creating JSON object.
"""
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
else:
return super(NpEncoder, self).default(obj)
def getDimensionsV(variables, dimensionsV={}):
for vname in variables:
for dname in variables[vname]['dimensions']:
if dname in dimensionsV:
dimensionsV[dname].append(vname)
else:
dimensionsV[dname] = [vname]
return dimensionsV
def getVariablesG(groups):
obj = {}
for gname in groups:
for vname in groups[gname]['variables']:
if vname in obj:
obj[vname].append(gname)
else:
obj[vname] = [gname]
for vname in obj:
obj[vname] = sorted(list(set(obj[vname])))
return obj
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def is_json(myjson):
try:
json_object = json.loads(myjson)
except:
return False
return True
def getType(type):
t = np.dtype(type).char
if t == 'S':
return 'S1'
if t == 'U':
return 'U1'
return t
def getType3(type):
t = np.dtype(type).char
if t == 'B':
return 'b'
if t == 'H':
return 'h'
if t == 'I':
return 'i'
if t == 'l':
return 'i'
if t == 'S':
return 'S1'
if t == 'U':
return 'U1'
return t
def prepareTransformAttributes(attributes):
dtype = attributes.get('type')
dtype = '{}'.format(dtype)
min = attributes.get('min')
max = attributes.get('max')
ftype = attributes.get('ftype')
maxV = np.power(2.0, np.iinfo(np.dtype(dtype)).bits) - 1.0
minO = np.iinfo(np.dtype(dtype)).min
maxO = np.iinfo(np.dtype(dtype)).max
f = maxV / (max - min)
return min, max, minO, maxO, f, dtype, ftype
def getT(attributes, value, isChar=False):
ftype = attributes['ftype']
if 'min' in attributes and 'max' in attributes and attributes['type'
] != attributes['ftype']:
min, max, minO, maxO, f, dtype, ftype = prepareTransformAttributes(
attributes)
value = ((value + np.abs(minO)) / f + min).astype(ftype)
if ftype == 'M':
value = value.astype('datetime64[ms]')
if ftype == 'S1':
value = value.astype('S1') if isChar else chartostring(value.astype
('S1'))
return value
def setT(attributes, value, isChar=False, variable=None):
ftype = attributes['ftype']
if ftype == 'M':
value = value.astype('datetime64[ms]').astype('f8')
if ftype == 'S1' and not isChar:
value = stringtochar(np.array(value).astype('S{}'.format(variable.
shape[1])))
if 'min' in attributes and 'max' in attributes and attributes['type'
] != attributes['ftype']:
min, max, minO, maxO, f, dtype, x = prepareTransformAttributes(
attributes)
value = np.clip(value, min, max)
value = (value - min) * f - np.abs(minO)
value = np.rint(value)
value = np.clip(value, minO, maxO)
value = value.astype(dtype)
return value
class NpEncoder(json.JSONEncoder):
"""
Encoder to change numpy type to python type.
This is used for creating JSON object.
"""
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
else:
return super(NpEncoder, self).default(obj)
def getDimensionsV(variables, dimensionsV={}):
for vname in variables:
for dname in variables[vname]['dimensions']:
if dname in dimensionsV:
dimensionsV[dname].append(vname)
else:
dimensionsV[dname] = [vname]
return dimensionsV
def getVariablesG(groups):
obj = {}
for gname in groups:
for vname in groups[gname]['variables']:
if vname in obj:
obj[vname].append(gname)
else:
obj[vname] = [gname]
for vname in obj:
obj[vname] = sorted(list(set(obj[vname])))
return obj
<|reserved_special_token_1|>
import numpy as np
import json
from netCDF4 import Dataset, stringtochar, chartostring, Variable, Group
def is_json(myjson):
try:
json_object = json.loads(myjson)
except:
return False
return True
def getType(type):
t = np.dtype(type).char
if t == 'S':
return 'S1'
if t == 'U':
return 'U1'
return t
def getType3(type):
t = np.dtype(type).char
if t == 'B':
return 'b'
if t == 'H':
return 'h'
if t == 'I':
return 'i'
if t == 'l':
return 'i'
if t == 'S':
return 'S1'
if t == 'U':
return 'U1'
return t
def prepareTransformAttributes(attributes):
dtype = attributes.get('type')
dtype = '{}'.format(dtype)
min = attributes.get('min')
max = attributes.get('max')
ftype = attributes.get('ftype')
maxV = np.power(2.0, np.iinfo(np.dtype(dtype)).bits) - 1.0
minO = np.iinfo(np.dtype(dtype)).min
maxO = np.iinfo(np.dtype(dtype)).max
f = maxV / (max - min)
return min, max, minO, maxO, f, dtype, ftype
def getT(attributes, value, isChar=False):
ftype = attributes['ftype']
if 'min' in attributes and 'max' in attributes and attributes['type'
] != attributes['ftype']:
min, max, minO, maxO, f, dtype, ftype = prepareTransformAttributes(
attributes)
value = ((value + np.abs(minO)) / f + min).astype(ftype)
if ftype == 'M':
value = value.astype('datetime64[ms]')
if ftype == 'S1':
value = value.astype('S1') if isChar else chartostring(value.astype
('S1'))
return value
def setT(attributes, value, isChar=False, variable=None):
ftype = attributes['ftype']
if ftype == 'M':
value = value.astype('datetime64[ms]').astype('f8')
if ftype == 'S1' and not isChar:
value = stringtochar(np.array(value).astype('S{}'.format(variable.
shape[1])))
if 'min' in attributes and 'max' in attributes and attributes['type'
] != attributes['ftype']:
min, max, minO, maxO, f, dtype, x = prepareTransformAttributes(
attributes)
value = np.clip(value, min, max)
value = (value - min) * f - np.abs(minO)
value = np.rint(value)
value = np.clip(value, minO, maxO)
value = value.astype(dtype)
return value
class NpEncoder(json.JSONEncoder):
"""
Encoder to change numpy type to python type.
This is used for creating JSON object.
"""
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
else:
return super(NpEncoder, self).default(obj)
def getDimensionsV(variables, dimensionsV={}):
for vname in variables:
for dname in variables[vname]['dimensions']:
if dname in dimensionsV:
dimensionsV[dname].append(vname)
else:
dimensionsV[dname] = [vname]
return dimensionsV
def getVariablesG(groups):
obj = {}
for gname in groups:
for vname in groups[gname]['variables']:
if vname in obj:
obj[vname].append(gname)
else:
obj[vname] = [gname]
for vname in obj:
obj[vname] = sorted(list(set(obj[vname])))
return obj
<|reserved_special_token_1|>
import numpy as np
import json
from netCDF4 import Dataset,stringtochar,chartostring,Variable,Group
def is_json(myjson):
try:
json_object = json.loads(myjson)
except:
return False
return True
def getType(type):
t=np.dtype(type).char
if t=="S":return 'S1'
if t=="U":return 'U1'
return t
def getType3(type):
t=np.dtype(type).char
if t=="B":return 'b'
if t=="H":return 'h'
if t=="I":return 'i'
if t=="l":return 'i'
if t=="S":return 'S1'
if t=="U":return 'U1'
return t
def prepareTransformAttributes(attributes):
dtype=attributes.get("type")
dtype="{}".format(dtype)
min=attributes.get("min")
max=attributes.get("max")
ftype=attributes.get("ftype")
maxV=np.power(2.0,np.iinfo(np.dtype(dtype)).bits)-1.0
minO=np.iinfo(np.dtype(dtype)).min
maxO=np.iinfo(np.dtype(dtype)).max
f=maxV/(max-min)
return min,max,minO,maxO,f,dtype,ftype
def getT(attributes,value,isChar=False):
ftype=attributes['ftype']
if "min" in attributes and "max" in attributes and attributes['type']!=attributes['ftype'] :
min,max,minO,maxO,f,dtype,ftype=prepareTransformAttributes(attributes)
value=(((value+np.abs(minO))/f)+min).astype(ftype)
if ftype=="M":value=value.astype('datetime64[ms]')
if ftype=="S1":value=value.astype("S1") if isChar else chartostring(value.astype("S1"))
return value
def setT(attributes,value,isChar=False,variable=None):
ftype=attributes['ftype']
if ftype=="M":value=value.astype("datetime64[ms]").astype("f8")
if ftype=='S1' and not isChar:value=stringtochar(np.array(value).astype("S{}".format(variable.shape[1])))
if "min" in attributes and "max" in attributes and attributes['type']!=attributes['ftype'] :
min,max,minO,maxO,f,dtype,x=prepareTransformAttributes(attributes)
value=np.clip(value, min, max)
value=(value-min)*f-np.abs(minO)
# value=np.rint(np.nextafter(value, value+1))
value=np.rint(value)
value=np.clip(value, minO, maxO)
value=value.astype(dtype)
return value
class NpEncoder(json.JSONEncoder):
"""
Encoder to change numpy type to python type.
This is used for creating JSON object.
"""
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
else:
return super(NpEncoder, self).default(obj)
def getDimensionsV(variables,dimensionsV={}):
for vname in variables:
for dname in variables[vname]['dimensions']:
if dname in dimensionsV:dimensionsV[dname].append(vname)
else:dimensionsV[dname]=[vname]
return dimensionsV
def getVariablesG(groups):
obj={}
for gname in groups:
for vname in groups[gname]['variables']:
if vname in obj:obj[vname].append(gname)
else:obj[vname]=[gname]
for vname in obj:
obj[vname]=sorted(list(set(obj[vname])))
return obj
|
flexible
|
{
"blob_id": "57490e56833154d3ed3a18b5bf7bc4db32a50d69",
"index": 2979,
"step-1": "<mask token>\n\n\ndef prepareTransformAttributes(attributes):\n dtype = attributes.get('type')\n dtype = '{}'.format(dtype)\n min = attributes.get('min')\n max = attributes.get('max')\n ftype = attributes.get('ftype')\n maxV = np.power(2.0, np.iinfo(np.dtype(dtype)).bits) - 1.0\n minO = np.iinfo(np.dtype(dtype)).min\n maxO = np.iinfo(np.dtype(dtype)).max\n f = maxV / (max - min)\n return min, max, minO, maxO, f, dtype, ftype\n\n\n<mask token>\n\n\ndef setT(attributes, value, isChar=False, variable=None):\n ftype = attributes['ftype']\n if ftype == 'M':\n value = value.astype('datetime64[ms]').astype('f8')\n if ftype == 'S1' and not isChar:\n value = stringtochar(np.array(value).astype('S{}'.format(variable.\n shape[1])))\n if 'min' in attributes and 'max' in attributes and attributes['type'\n ] != attributes['ftype']:\n min, max, minO, maxO, f, dtype, x = prepareTransformAttributes(\n attributes)\n value = np.clip(value, min, max)\n value = (value - min) * f - np.abs(minO)\n value = np.rint(value)\n value = np.clip(value, minO, maxO)\n value = value.astype(dtype)\n return value\n\n\nclass NpEncoder(json.JSONEncoder):\n \"\"\" \n Encoder to change numpy type to python type.\n This is used for creating JSON object.\n \"\"\"\n\n def default(self, obj):\n if isinstance(obj, np.integer):\n return int(obj)\n elif isinstance(obj, np.floating):\n return float(obj)\n elif isinstance(obj, np.ndarray):\n return obj.tolist()\n else:\n return super(NpEncoder, self).default(obj)\n\n\n<mask token>\n\n\ndef getVariablesG(groups):\n obj = {}\n for gname in groups:\n for vname in groups[gname]['variables']:\n if vname in obj:\n obj[vname].append(gname)\n else:\n obj[vname] = [gname]\n for vname in obj:\n obj[vname] = sorted(list(set(obj[vname])))\n return obj\n",
"step-2": "<mask token>\n\n\ndef prepareTransformAttributes(attributes):\n dtype = attributes.get('type')\n dtype = '{}'.format(dtype)\n min = attributes.get('min')\n max = attributes.get('max')\n ftype = attributes.get('ftype')\n maxV = np.power(2.0, np.iinfo(np.dtype(dtype)).bits) - 1.0\n minO = np.iinfo(np.dtype(dtype)).min\n maxO = np.iinfo(np.dtype(dtype)).max\n f = maxV / (max - min)\n return min, max, minO, maxO, f, dtype, ftype\n\n\n<mask token>\n\n\ndef setT(attributes, value, isChar=False, variable=None):\n ftype = attributes['ftype']\n if ftype == 'M':\n value = value.astype('datetime64[ms]').astype('f8')\n if ftype == 'S1' and not isChar:\n value = stringtochar(np.array(value).astype('S{}'.format(variable.\n shape[1])))\n if 'min' in attributes and 'max' in attributes and attributes['type'\n ] != attributes['ftype']:\n min, max, minO, maxO, f, dtype, x = prepareTransformAttributes(\n attributes)\n value = np.clip(value, min, max)\n value = (value - min) * f - np.abs(minO)\n value = np.rint(value)\n value = np.clip(value, minO, maxO)\n value = value.astype(dtype)\n return value\n\n\nclass NpEncoder(json.JSONEncoder):\n \"\"\" \n Encoder to change numpy type to python type.\n This is used for creating JSON object.\n \"\"\"\n\n def default(self, obj):\n if isinstance(obj, np.integer):\n return int(obj)\n elif isinstance(obj, np.floating):\n return float(obj)\n elif isinstance(obj, np.ndarray):\n return obj.tolist()\n else:\n return super(NpEncoder, self).default(obj)\n\n\ndef getDimensionsV(variables, dimensionsV={}):\n for vname in variables:\n for dname in variables[vname]['dimensions']:\n if dname in dimensionsV:\n dimensionsV[dname].append(vname)\n else:\n dimensionsV[dname] = [vname]\n return dimensionsV\n\n\ndef getVariablesG(groups):\n obj = {}\n for gname in groups:\n for vname in groups[gname]['variables']:\n if vname in obj:\n obj[vname].append(gname)\n else:\n obj[vname] = [gname]\n for vname in obj:\n obj[vname] = sorted(list(set(obj[vname])))\n return obj\n",
"step-3": "<mask token>\n\n\ndef is_json(myjson):\n try:\n json_object = json.loads(myjson)\n except:\n return False\n return True\n\n\ndef getType(type):\n t = np.dtype(type).char\n if t == 'S':\n return 'S1'\n if t == 'U':\n return 'U1'\n return t\n\n\ndef getType3(type):\n t = np.dtype(type).char\n if t == 'B':\n return 'b'\n if t == 'H':\n return 'h'\n if t == 'I':\n return 'i'\n if t == 'l':\n return 'i'\n if t == 'S':\n return 'S1'\n if t == 'U':\n return 'U1'\n return t\n\n\ndef prepareTransformAttributes(attributes):\n dtype = attributes.get('type')\n dtype = '{}'.format(dtype)\n min = attributes.get('min')\n max = attributes.get('max')\n ftype = attributes.get('ftype')\n maxV = np.power(2.0, np.iinfo(np.dtype(dtype)).bits) - 1.0\n minO = np.iinfo(np.dtype(dtype)).min\n maxO = np.iinfo(np.dtype(dtype)).max\n f = maxV / (max - min)\n return min, max, minO, maxO, f, dtype, ftype\n\n\ndef getT(attributes, value, isChar=False):\n ftype = attributes['ftype']\n if 'min' in attributes and 'max' in attributes and attributes['type'\n ] != attributes['ftype']:\n min, max, minO, maxO, f, dtype, ftype = prepareTransformAttributes(\n attributes)\n value = ((value + np.abs(minO)) / f + min).astype(ftype)\n if ftype == 'M':\n value = value.astype('datetime64[ms]')\n if ftype == 'S1':\n value = value.astype('S1') if isChar else chartostring(value.astype\n ('S1'))\n return value\n\n\ndef setT(attributes, value, isChar=False, variable=None):\n ftype = attributes['ftype']\n if ftype == 'M':\n value = value.astype('datetime64[ms]').astype('f8')\n if ftype == 'S1' and not isChar:\n value = stringtochar(np.array(value).astype('S{}'.format(variable.\n shape[1])))\n if 'min' in attributes and 'max' in attributes and attributes['type'\n ] != attributes['ftype']:\n min, max, minO, maxO, f, dtype, x = prepareTransformAttributes(\n attributes)\n value = np.clip(value, min, max)\n value = (value - min) * f - np.abs(minO)\n value = np.rint(value)\n value = np.clip(value, minO, maxO)\n value = value.astype(dtype)\n return value\n\n\nclass NpEncoder(json.JSONEncoder):\n \"\"\" \n Encoder to change numpy type to python type.\n This is used for creating JSON object.\n \"\"\"\n\n def default(self, obj):\n if isinstance(obj, np.integer):\n return int(obj)\n elif isinstance(obj, np.floating):\n return float(obj)\n elif isinstance(obj, np.ndarray):\n return obj.tolist()\n else:\n return super(NpEncoder, self).default(obj)\n\n\ndef getDimensionsV(variables, dimensionsV={}):\n for vname in variables:\n for dname in variables[vname]['dimensions']:\n if dname in dimensionsV:\n dimensionsV[dname].append(vname)\n else:\n dimensionsV[dname] = [vname]\n return dimensionsV\n\n\ndef getVariablesG(groups):\n obj = {}\n for gname in groups:\n for vname in groups[gname]['variables']:\n if vname in obj:\n obj[vname].append(gname)\n else:\n obj[vname] = [gname]\n for vname in obj:\n obj[vname] = sorted(list(set(obj[vname])))\n return obj\n",
"step-4": "import numpy as np\nimport json\nfrom netCDF4 import Dataset, stringtochar, chartostring, Variable, Group\n\n\ndef is_json(myjson):\n try:\n json_object = json.loads(myjson)\n except:\n return False\n return True\n\n\ndef getType(type):\n t = np.dtype(type).char\n if t == 'S':\n return 'S1'\n if t == 'U':\n return 'U1'\n return t\n\n\ndef getType3(type):\n t = np.dtype(type).char\n if t == 'B':\n return 'b'\n if t == 'H':\n return 'h'\n if t == 'I':\n return 'i'\n if t == 'l':\n return 'i'\n if t == 'S':\n return 'S1'\n if t == 'U':\n return 'U1'\n return t\n\n\ndef prepareTransformAttributes(attributes):\n dtype = attributes.get('type')\n dtype = '{}'.format(dtype)\n min = attributes.get('min')\n max = attributes.get('max')\n ftype = attributes.get('ftype')\n maxV = np.power(2.0, np.iinfo(np.dtype(dtype)).bits) - 1.0\n minO = np.iinfo(np.dtype(dtype)).min\n maxO = np.iinfo(np.dtype(dtype)).max\n f = maxV / (max - min)\n return min, max, minO, maxO, f, dtype, ftype\n\n\ndef getT(attributes, value, isChar=False):\n ftype = attributes['ftype']\n if 'min' in attributes and 'max' in attributes and attributes['type'\n ] != attributes['ftype']:\n min, max, minO, maxO, f, dtype, ftype = prepareTransformAttributes(\n attributes)\n value = ((value + np.abs(minO)) / f + min).astype(ftype)\n if ftype == 'M':\n value = value.astype('datetime64[ms]')\n if ftype == 'S1':\n value = value.astype('S1') if isChar else chartostring(value.astype\n ('S1'))\n return value\n\n\ndef setT(attributes, value, isChar=False, variable=None):\n ftype = attributes['ftype']\n if ftype == 'M':\n value = value.astype('datetime64[ms]').astype('f8')\n if ftype == 'S1' and not isChar:\n value = stringtochar(np.array(value).astype('S{}'.format(variable.\n shape[1])))\n if 'min' in attributes and 'max' in attributes and attributes['type'\n ] != attributes['ftype']:\n min, max, minO, maxO, f, dtype, x = prepareTransformAttributes(\n attributes)\n value = np.clip(value, min, max)\n value = (value - min) * f - np.abs(minO)\n value = np.rint(value)\n value = np.clip(value, minO, maxO)\n value = value.astype(dtype)\n return value\n\n\nclass NpEncoder(json.JSONEncoder):\n \"\"\" \n Encoder to change numpy type to python type.\n This is used for creating JSON object.\n \"\"\"\n\n def default(self, obj):\n if isinstance(obj, np.integer):\n return int(obj)\n elif isinstance(obj, np.floating):\n return float(obj)\n elif isinstance(obj, np.ndarray):\n return obj.tolist()\n else:\n return super(NpEncoder, self).default(obj)\n\n\ndef getDimensionsV(variables, dimensionsV={}):\n for vname in variables:\n for dname in variables[vname]['dimensions']:\n if dname in dimensionsV:\n dimensionsV[dname].append(vname)\n else:\n dimensionsV[dname] = [vname]\n return dimensionsV\n\n\ndef getVariablesG(groups):\n obj = {}\n for gname in groups:\n for vname in groups[gname]['variables']:\n if vname in obj:\n obj[vname].append(gname)\n else:\n obj[vname] = [gname]\n for vname in obj:\n obj[vname] = sorted(list(set(obj[vname])))\n return obj\n",
"step-5": "import numpy as np\nimport json\nfrom netCDF4 import Dataset,stringtochar,chartostring,Variable,Group\n\ndef is_json(myjson):\n try:\n json_object = json.loads(myjson)\n except:\n return False\n return True\n\ndef getType(type):\n t=np.dtype(type).char\n if t==\"S\":return 'S1'\n if t==\"U\":return 'U1'\n return t\n \ndef getType3(type):\n \n t=np.dtype(type).char\n if t==\"B\":return 'b'\n if t==\"H\":return 'h'\n if t==\"I\":return 'i'\n if t==\"l\":return 'i'\n if t==\"S\":return 'S1'\n if t==\"U\":return 'U1'\n return t\n\ndef prepareTransformAttributes(attributes):\n dtype=attributes.get(\"type\")\n dtype=\"{}\".format(dtype)\n min=attributes.get(\"min\")\n max=attributes.get(\"max\")\n ftype=attributes.get(\"ftype\")\n maxV=np.power(2.0,np.iinfo(np.dtype(dtype)).bits)-1.0\n minO=np.iinfo(np.dtype(dtype)).min\n maxO=np.iinfo(np.dtype(dtype)).max\n f=maxV/(max-min)\n return min,max,minO,maxO,f,dtype,ftype\n\n\n\ndef getT(attributes,value,isChar=False):\n ftype=attributes['ftype']\n if \"min\" in attributes and \"max\" in attributes and attributes['type']!=attributes['ftype'] :\n min,max,minO,maxO,f,dtype,ftype=prepareTransformAttributes(attributes)\n value=(((value+np.abs(minO))/f)+min).astype(ftype)\n \n if ftype==\"M\":value=value.astype('datetime64[ms]')\n if ftype==\"S1\":value=value.astype(\"S1\") if isChar else chartostring(value.astype(\"S1\"))\n \n return value\n \ndef setT(attributes,value,isChar=False,variable=None):\n ftype=attributes['ftype']\n if ftype==\"M\":value=value.astype(\"datetime64[ms]\").astype(\"f8\")\n if ftype=='S1' and not isChar:value=stringtochar(np.array(value).astype(\"S{}\".format(variable.shape[1])))\n \n if \"min\" in attributes and \"max\" in attributes and attributes['type']!=attributes['ftype'] :\n min,max,minO,maxO,f,dtype,x=prepareTransformAttributes(attributes)\n value=np.clip(value, min, max)\n value=(value-min)*f-np.abs(minO)\n # value=np.rint(np.nextafter(value, value+1))\n value=np.rint(value)\n value=np.clip(value, minO, maxO)\n value=value.astype(dtype) \n \n return value\n\n\nclass NpEncoder(json.JSONEncoder):\n \"\"\" \n Encoder to change numpy type to python type.\n This is used for creating JSON object.\n \"\"\"\n def default(self, obj):\n if isinstance(obj, np.integer):\n return int(obj)\n elif isinstance(obj, np.floating):\n return float(obj)\n elif isinstance(obj, np.ndarray):\n return obj.tolist()\n else:\n return super(NpEncoder, self).default(obj)\n \ndef getDimensionsV(variables,dimensionsV={}):\n for vname in variables:\n for dname in variables[vname]['dimensions']:\n if dname in dimensionsV:dimensionsV[dname].append(vname)\n else:dimensionsV[dname]=[vname]\n return dimensionsV\n \ndef getVariablesG(groups):\n obj={}\n for gname in groups:\n for vname in groups[gname]['variables']:\n if vname in obj:obj[vname].append(gname)\n else:obj[vname]=[gname]\n for vname in obj:\n obj[vname]=sorted(list(set(obj[vname])))\n return obj ",
"step-ids": [
6,
7,
11,
12,
13
]
}
|
[
6,
7,
11,
12,
13
] |
import numpy as np
class RandomPlayer:
def __init__(self, game):
self.game = game
def play(self, board):
a = np.random.randint(self.game.getActionSize())
valids = self.game.getValidMoves(board, 1)
while valids[a] != 1:
a = np.random.randint(self.game.getActionSize())
return a
class HumanPlayer:
def __init__(self, game):
self.game = game
def play(self, board):
valids = self.game.getValidMoves(board, 1)
while True:
a = int(input('Nhập nước bạn đi'))
if valids[a]:
break
else:
print('Invalid')
return a
class MinimaxPlayer:
def __init__(self, game):
self.game = game
def play(self, Board):
key = -1
bem = -99999999
board = self.to_board(Board)
print(Board)
print(board)
for i in range(7, 12):
for j in range(0, 2):
if board[i] != 0:
cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]
for k in range(14):
cpboard[k] = board[k]
self.Move(i, j, cpboard)
mov = self.minimax(cpboard, 0, 1)
if mov > bem:
bem = mov
key = i + j * 100
return key
def to_board(self, board):
Board = [10, 5, 5, 5, 5, 5, 10, 5, 5, 5, 5, 5, 0, 0, 0, 0]
# board.turn = self.turn
Board[14] = board[0][7]
Board[15] = board[1][7]
Board[0] = board[0][5]
Board[6] = board[1][5]
for i in range(5):
Board[i + 1] = board[0][i]
Board[i + 7] = board[1][i]
Board[12] = board[0][6]
Board[13] = board[1][6]
return Board
def checkWin(self, board):
global point1, point2
p1 = 0
p2 = 0
point1 = board[12]
point2 = board[13]
if board[0] == 0 and board[6] == 0:
p1 = point1 + board[1] + board[2] + board[3] + board[4] + board[5]
p2 = point2 + board[7] + board[8] + board[9] + board[10] + board[11]
if p1 > p2:
return 1
if p1 == p2:
return 2
else:
return 0
else:
return 3
def checkEat(self, position, direc, board):
if direc == 1:
if board[(position + 1) % 12] == 0 and board[(position + 2) % 12] != 0 and ((position + 1) % 12) % 6 != 0:
self.eat((position + 2) % 12, board)
self.checkEat((position + 2) % 12, direc, board)
else:
if board[(position - 1) % 12] == 0 and board[(position - 2) % 12] != 0 and ((position - 1) % 12) % 6 != 0:
self.eat((position - 2) % 12, board)
self.checkEat((position - 2) % 12, direc, board)
def eat(self, position, board):
global point1, point2
if self[14] == 1:
board[12] = board[12] + board[position]
else:
board[13] = board[13] + board[position]
board[position] = 0
def Move(self, position, direc, board):
if board[position] != 0 and position != 0 and position != 6:
if direc == 1:
index = (position + 1) % 12
while board[position] != 0:
board[position] = board[position] - 1
board[index] = board[index] + 1
index = (index + 1) % 12
if board[index] == 0:
self.checkEat((index - 1 + 12) % 12, direc, board)
else:
self.Move(index, direc, board)
else:
index = (position - 1) % 12
while board[position] != 0:
board[position] = board[position] - 1
board[index] = board[index] + 1
index = (index - 1) % 12
if board[index] == 0:
self.checkEat((index + 1 + 12) % 12, direc, board)
else:
self.Move(index, direc, board)
def minimax(self, board, depth, turn):
if self.checkWin(board) == 1: return -9999
if self.checkWin(board) == 0: return 9999
if self.checkWin(board) == 2: return 0
if depth == 5:
return board[13] - board[12]
if turn == 0:
best = -1000
for i in range(7, 12):
for j in range(0, 2):
if (board[i] != 0):
cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]
for k in range(14):
cpboard[k] = board[k]
self.Move(i, j, cpboard)
vl = self.minimax(cpboard, depth + 1, (turn + 1) % 2)
if vl > best:
best = vl
return best
else:
best = 1000
for i in range(1, 6):
for j in range(0, 2):
if (board[i] != 0):
cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]
for k in range(14):
cpboard[k] = board[k]
self.Move(i, j, cpboard)
vl = self.minimax(cpboard, depth + 1, (turn + 1) % 2)
if vl < best:
best = vl
return best
|
normal
|
{
"blob_id": "efe099bc5cd0319ffefd779f1e854f1a60edc5fa",
"index": 9240,
"step-1": "<mask token>\n\n\nclass MinimaxPlayer:\n\n def __init__(self, game):\n self.game = game\n\n def play(self, Board):\n key = -1\n bem = -99999999\n board = self.to_board(Board)\n print(Board)\n print(board)\n for i in range(7, 12):\n for j in range(0, 2):\n if board[i] != 0:\n cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]\n for k in range(14):\n cpboard[k] = board[k]\n self.Move(i, j, cpboard)\n mov = self.minimax(cpboard, 0, 1)\n if mov > bem:\n bem = mov\n key = i + j * 100\n return key\n\n def to_board(self, board):\n Board = [10, 5, 5, 5, 5, 5, 10, 5, 5, 5, 5, 5, 0, 0, 0, 0]\n Board[14] = board[0][7]\n Board[15] = board[1][7]\n Board[0] = board[0][5]\n Board[6] = board[1][5]\n for i in range(5):\n Board[i + 1] = board[0][i]\n Board[i + 7] = board[1][i]\n Board[12] = board[0][6]\n Board[13] = board[1][6]\n return Board\n <mask token>\n <mask token>\n\n def eat(self, position, board):\n global point1, point2\n if self[14] == 1:\n board[12] = board[12] + board[position]\n else:\n board[13] = board[13] + board[position]\n board[position] = 0\n\n def Move(self, position, direc, board):\n if board[position] != 0 and position != 0 and position != 6:\n if direc == 1:\n index = (position + 1) % 12\n while board[position] != 0:\n board[position] = board[position] - 1\n board[index] = board[index] + 1\n index = (index + 1) % 12\n if board[index] == 0:\n self.checkEat((index - 1 + 12) % 12, direc, board)\n else:\n self.Move(index, direc, board)\n else:\n index = (position - 1) % 12\n while board[position] != 0:\n board[position] = board[position] - 1\n board[index] = board[index] + 1\n index = (index - 1) % 12\n if board[index] == 0:\n self.checkEat((index + 1 + 12) % 12, direc, board)\n else:\n self.Move(index, direc, board)\n\n def minimax(self, board, depth, turn):\n if self.checkWin(board) == 1:\n return -9999\n if self.checkWin(board) == 0:\n return 9999\n if self.checkWin(board) == 2:\n return 0\n if depth == 5:\n return board[13] - board[12]\n if turn == 0:\n best = -1000\n for i in range(7, 12):\n for j in range(0, 2):\n if board[i] != 0:\n cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]\n for k in range(14):\n cpboard[k] = board[k]\n self.Move(i, j, cpboard)\n vl = self.minimax(cpboard, depth + 1, (turn + 1) % 2)\n if vl > best:\n best = vl\n return best\n else:\n best = 1000\n for i in range(1, 6):\n for j in range(0, 2):\n if board[i] != 0:\n cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]\n for k in range(14):\n cpboard[k] = board[k]\n self.Move(i, j, cpboard)\n vl = self.minimax(cpboard, depth + 1, (turn + 1) % 2)\n if vl < best:\n best = vl\n return best\n",
"step-2": "<mask token>\n\n\nclass MinimaxPlayer:\n\n def __init__(self, game):\n self.game = game\n\n def play(self, Board):\n key = -1\n bem = -99999999\n board = self.to_board(Board)\n print(Board)\n print(board)\n for i in range(7, 12):\n for j in range(0, 2):\n if board[i] != 0:\n cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]\n for k in range(14):\n cpboard[k] = board[k]\n self.Move(i, j, cpboard)\n mov = self.minimax(cpboard, 0, 1)\n if mov > bem:\n bem = mov\n key = i + j * 100\n return key\n\n def to_board(self, board):\n Board = [10, 5, 5, 5, 5, 5, 10, 5, 5, 5, 5, 5, 0, 0, 0, 0]\n Board[14] = board[0][7]\n Board[15] = board[1][7]\n Board[0] = board[0][5]\n Board[6] = board[1][5]\n for i in range(5):\n Board[i + 1] = board[0][i]\n Board[i + 7] = board[1][i]\n Board[12] = board[0][6]\n Board[13] = board[1][6]\n return Board\n\n def checkWin(self, board):\n global point1, point2\n p1 = 0\n p2 = 0\n point1 = board[12]\n point2 = board[13]\n if board[0] == 0 and board[6] == 0:\n p1 = point1 + board[1] + board[2] + board[3] + board[4] + board[5]\n p2 = point2 + board[7] + board[8] + board[9] + board[10] + board[11\n ]\n if p1 > p2:\n return 1\n if p1 == p2:\n return 2\n else:\n return 0\n else:\n return 3\n\n def checkEat(self, position, direc, board):\n if direc == 1:\n if board[(position + 1) % 12] == 0 and board[(position + 2) % 12\n ] != 0 and (position + 1) % 12 % 6 != 0:\n self.eat((position + 2) % 12, board)\n self.checkEat((position + 2) % 12, direc, board)\n elif board[(position - 1) % 12] == 0 and board[(position - 2) % 12\n ] != 0 and (position - 1) % 12 % 6 != 0:\n self.eat((position - 2) % 12, board)\n self.checkEat((position - 2) % 12, direc, board)\n\n def eat(self, position, board):\n global point1, point2\n if self[14] == 1:\n board[12] = board[12] + board[position]\n else:\n board[13] = board[13] + board[position]\n board[position] = 0\n\n def Move(self, position, direc, board):\n if board[position] != 0 and position != 0 and position != 6:\n if direc == 1:\n index = (position + 1) % 12\n while board[position] != 0:\n board[position] = board[position] - 1\n board[index] = board[index] + 1\n index = (index + 1) % 12\n if board[index] == 0:\n self.checkEat((index - 1 + 12) % 12, direc, board)\n else:\n self.Move(index, direc, board)\n else:\n index = (position - 1) % 12\n while board[position] != 0:\n board[position] = board[position] - 1\n board[index] = board[index] + 1\n index = (index - 1) % 12\n if board[index] == 0:\n self.checkEat((index + 1 + 12) % 12, direc, board)\n else:\n self.Move(index, direc, board)\n\n def minimax(self, board, depth, turn):\n if self.checkWin(board) == 1:\n return -9999\n if self.checkWin(board) == 0:\n return 9999\n if self.checkWin(board) == 2:\n return 0\n if depth == 5:\n return board[13] - board[12]\n if turn == 0:\n best = -1000\n for i in range(7, 12):\n for j in range(0, 2):\n if board[i] != 0:\n cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]\n for k in range(14):\n cpboard[k] = board[k]\n self.Move(i, j, cpboard)\n vl = self.minimax(cpboard, depth + 1, (turn + 1) % 2)\n if vl > best:\n best = vl\n return best\n else:\n best = 1000\n for i in range(1, 6):\n for j in range(0, 2):\n if board[i] != 0:\n cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]\n for k in range(14):\n cpboard[k] = board[k]\n self.Move(i, j, cpboard)\n vl = self.minimax(cpboard, depth + 1, (turn + 1) % 2)\n if vl < best:\n best = vl\n return best\n",
"step-3": "<mask token>\n\n\nclass RandomPlayer:\n <mask token>\n\n def play(self, board):\n a = np.random.randint(self.game.getActionSize())\n valids = self.game.getValidMoves(board, 1)\n while valids[a] != 1:\n a = np.random.randint(self.game.getActionSize())\n return a\n\n\nclass HumanPlayer:\n\n def __init__(self, game):\n self.game = game\n\n def play(self, board):\n valids = self.game.getValidMoves(board, 1)\n while True:\n a = int(input('Nhập nước bạn đi'))\n if valids[a]:\n break\n else:\n print('Invalid')\n return a\n\n\nclass MinimaxPlayer:\n\n def __init__(self, game):\n self.game = game\n\n def play(self, Board):\n key = -1\n bem = -99999999\n board = self.to_board(Board)\n print(Board)\n print(board)\n for i in range(7, 12):\n for j in range(0, 2):\n if board[i] != 0:\n cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]\n for k in range(14):\n cpboard[k] = board[k]\n self.Move(i, j, cpboard)\n mov = self.minimax(cpboard, 0, 1)\n if mov > bem:\n bem = mov\n key = i + j * 100\n return key\n\n def to_board(self, board):\n Board = [10, 5, 5, 5, 5, 5, 10, 5, 5, 5, 5, 5, 0, 0, 0, 0]\n Board[14] = board[0][7]\n Board[15] = board[1][7]\n Board[0] = board[0][5]\n Board[6] = board[1][5]\n for i in range(5):\n Board[i + 1] = board[0][i]\n Board[i + 7] = board[1][i]\n Board[12] = board[0][6]\n Board[13] = board[1][6]\n return Board\n\n def checkWin(self, board):\n global point1, point2\n p1 = 0\n p2 = 0\n point1 = board[12]\n point2 = board[13]\n if board[0] == 0 and board[6] == 0:\n p1 = point1 + board[1] + board[2] + board[3] + board[4] + board[5]\n p2 = point2 + board[7] + board[8] + board[9] + board[10] + board[11\n ]\n if p1 > p2:\n return 1\n if p1 == p2:\n return 2\n else:\n return 0\n else:\n return 3\n\n def checkEat(self, position, direc, board):\n if direc == 1:\n if board[(position + 1) % 12] == 0 and board[(position + 2) % 12\n ] != 0 and (position + 1) % 12 % 6 != 0:\n self.eat((position + 2) % 12, board)\n self.checkEat((position + 2) % 12, direc, board)\n elif board[(position - 1) % 12] == 0 and board[(position - 2) % 12\n ] != 0 and (position - 1) % 12 % 6 != 0:\n self.eat((position - 2) % 12, board)\n self.checkEat((position - 2) % 12, direc, board)\n\n def eat(self, position, board):\n global point1, point2\n if self[14] == 1:\n board[12] = board[12] + board[position]\n else:\n board[13] = board[13] + board[position]\n board[position] = 0\n\n def Move(self, position, direc, board):\n if board[position] != 0 and position != 0 and position != 6:\n if direc == 1:\n index = (position + 1) % 12\n while board[position] != 0:\n board[position] = board[position] - 1\n board[index] = board[index] + 1\n index = (index + 1) % 12\n if board[index] == 0:\n self.checkEat((index - 1 + 12) % 12, direc, board)\n else:\n self.Move(index, direc, board)\n else:\n index = (position - 1) % 12\n while board[position] != 0:\n board[position] = board[position] - 1\n board[index] = board[index] + 1\n index = (index - 1) % 12\n if board[index] == 0:\n self.checkEat((index + 1 + 12) % 12, direc, board)\n else:\n self.Move(index, direc, board)\n\n def minimax(self, board, depth, turn):\n if self.checkWin(board) == 1:\n return -9999\n if self.checkWin(board) == 0:\n return 9999\n if self.checkWin(board) == 2:\n return 0\n if depth == 5:\n return board[13] - board[12]\n if turn == 0:\n best = -1000\n for i in range(7, 12):\n for j in range(0, 2):\n if board[i] != 0:\n cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]\n for k in range(14):\n cpboard[k] = board[k]\n self.Move(i, j, cpboard)\n vl = self.minimax(cpboard, depth + 1, (turn + 1) % 2)\n if vl > best:\n best = vl\n return best\n else:\n best = 1000\n for i in range(1, 6):\n for j in range(0, 2):\n if board[i] != 0:\n cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]\n for k in range(14):\n cpboard[k] = board[k]\n self.Move(i, j, cpboard)\n vl = self.minimax(cpboard, depth + 1, (turn + 1) % 2)\n if vl < best:\n best = vl\n return best\n",
"step-4": "import numpy as np\n\n\nclass RandomPlayer:\n\n def __init__(self, game):\n self.game = game\n\n def play(self, board):\n a = np.random.randint(self.game.getActionSize())\n valids = self.game.getValidMoves(board, 1)\n while valids[a] != 1:\n a = np.random.randint(self.game.getActionSize())\n return a\n\n\nclass HumanPlayer:\n\n def __init__(self, game):\n self.game = game\n\n def play(self, board):\n valids = self.game.getValidMoves(board, 1)\n while True:\n a = int(input('Nhập nước bạn đi'))\n if valids[a]:\n break\n else:\n print('Invalid')\n return a\n\n\nclass MinimaxPlayer:\n\n def __init__(self, game):\n self.game = game\n\n def play(self, Board):\n key = -1\n bem = -99999999\n board = self.to_board(Board)\n print(Board)\n print(board)\n for i in range(7, 12):\n for j in range(0, 2):\n if board[i] != 0:\n cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]\n for k in range(14):\n cpboard[k] = board[k]\n self.Move(i, j, cpboard)\n mov = self.minimax(cpboard, 0, 1)\n if mov > bem:\n bem = mov\n key = i + j * 100\n return key\n\n def to_board(self, board):\n Board = [10, 5, 5, 5, 5, 5, 10, 5, 5, 5, 5, 5, 0, 0, 0, 0]\n Board[14] = board[0][7]\n Board[15] = board[1][7]\n Board[0] = board[0][5]\n Board[6] = board[1][5]\n for i in range(5):\n Board[i + 1] = board[0][i]\n Board[i + 7] = board[1][i]\n Board[12] = board[0][6]\n Board[13] = board[1][6]\n return Board\n\n def checkWin(self, board):\n global point1, point2\n p1 = 0\n p2 = 0\n point1 = board[12]\n point2 = board[13]\n if board[0] == 0 and board[6] == 0:\n p1 = point1 + board[1] + board[2] + board[3] + board[4] + board[5]\n p2 = point2 + board[7] + board[8] + board[9] + board[10] + board[11\n ]\n if p1 > p2:\n return 1\n if p1 == p2:\n return 2\n else:\n return 0\n else:\n return 3\n\n def checkEat(self, position, direc, board):\n if direc == 1:\n if board[(position + 1) % 12] == 0 and board[(position + 2) % 12\n ] != 0 and (position + 1) % 12 % 6 != 0:\n self.eat((position + 2) % 12, board)\n self.checkEat((position + 2) % 12, direc, board)\n elif board[(position - 1) % 12] == 0 and board[(position - 2) % 12\n ] != 0 and (position - 1) % 12 % 6 != 0:\n self.eat((position - 2) % 12, board)\n self.checkEat((position - 2) % 12, direc, board)\n\n def eat(self, position, board):\n global point1, point2\n if self[14] == 1:\n board[12] = board[12] + board[position]\n else:\n board[13] = board[13] + board[position]\n board[position] = 0\n\n def Move(self, position, direc, board):\n if board[position] != 0 and position != 0 and position != 6:\n if direc == 1:\n index = (position + 1) % 12\n while board[position] != 0:\n board[position] = board[position] - 1\n board[index] = board[index] + 1\n index = (index + 1) % 12\n if board[index] == 0:\n self.checkEat((index - 1 + 12) % 12, direc, board)\n else:\n self.Move(index, direc, board)\n else:\n index = (position - 1) % 12\n while board[position] != 0:\n board[position] = board[position] - 1\n board[index] = board[index] + 1\n index = (index - 1) % 12\n if board[index] == 0:\n self.checkEat((index + 1 + 12) % 12, direc, board)\n else:\n self.Move(index, direc, board)\n\n def minimax(self, board, depth, turn):\n if self.checkWin(board) == 1:\n return -9999\n if self.checkWin(board) == 0:\n return 9999\n if self.checkWin(board) == 2:\n return 0\n if depth == 5:\n return board[13] - board[12]\n if turn == 0:\n best = -1000\n for i in range(7, 12):\n for j in range(0, 2):\n if board[i] != 0:\n cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]\n for k in range(14):\n cpboard[k] = board[k]\n self.Move(i, j, cpboard)\n vl = self.minimax(cpboard, depth + 1, (turn + 1) % 2)\n if vl > best:\n best = vl\n return best\n else:\n best = 1000\n for i in range(1, 6):\n for j in range(0, 2):\n if board[i] != 0:\n cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]\n for k in range(14):\n cpboard[k] = board[k]\n self.Move(i, j, cpboard)\n vl = self.minimax(cpboard, depth + 1, (turn + 1) % 2)\n if vl < best:\n best = vl\n return best\n",
"step-5": "import numpy as np\n\n\nclass RandomPlayer:\n def __init__(self, game):\n self.game = game\n\n def play(self, board):\n a = np.random.randint(self.game.getActionSize())\n valids = self.game.getValidMoves(board, 1)\n while valids[a] != 1:\n a = np.random.randint(self.game.getActionSize())\n return a\n\n\nclass HumanPlayer:\n def __init__(self, game):\n self.game = game\n\n def play(self, board):\n valids = self.game.getValidMoves(board, 1)\n while True:\n a = int(input('Nhập nước bạn đi'))\n if valids[a]:\n break\n else:\n print('Invalid')\n return a\n\n\nclass MinimaxPlayer:\n def __init__(self, game):\n self.game = game\n\n def play(self, Board):\n key = -1\n bem = -99999999\n board = self.to_board(Board)\n print(Board)\n print(board)\n for i in range(7, 12):\n for j in range(0, 2):\n if board[i] != 0:\n cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]\n for k in range(14):\n cpboard[k] = board[k]\n self.Move(i, j, cpboard)\n mov = self.minimax(cpboard, 0, 1)\n\n if mov > bem:\n bem = mov\n key = i + j * 100\n return key\n\n def to_board(self, board):\n Board = [10, 5, 5, 5, 5, 5, 10, 5, 5, 5, 5, 5, 0, 0, 0, 0]\n # board.turn = self.turn\n Board[14] = board[0][7]\n Board[15] = board[1][7]\n Board[0] = board[0][5]\n Board[6] = board[1][5]\n for i in range(5):\n Board[i + 1] = board[0][i]\n Board[i + 7] = board[1][i]\n Board[12] = board[0][6]\n Board[13] = board[1][6]\n return Board\n\n def checkWin(self, board):\n global point1, point2\n p1 = 0\n p2 = 0\n point1 = board[12]\n point2 = board[13]\n if board[0] == 0 and board[6] == 0:\n p1 = point1 + board[1] + board[2] + board[3] + board[4] + board[5]\n p2 = point2 + board[7] + board[8] + board[9] + board[10] + board[11]\n if p1 > p2:\n return 1\n if p1 == p2:\n return 2\n else:\n return 0\n else:\n return 3\n\n def checkEat(self, position, direc, board):\n if direc == 1:\n if board[(position + 1) % 12] == 0 and board[(position + 2) % 12] != 0 and ((position + 1) % 12) % 6 != 0:\n self.eat((position + 2) % 12, board)\n self.checkEat((position + 2) % 12, direc, board)\n else:\n if board[(position - 1) % 12] == 0 and board[(position - 2) % 12] != 0 and ((position - 1) % 12) % 6 != 0:\n self.eat((position - 2) % 12, board)\n self.checkEat((position - 2) % 12, direc, board)\n\n def eat(self, position, board):\n global point1, point2\n if self[14] == 1:\n board[12] = board[12] + board[position]\n else:\n board[13] = board[13] + board[position]\n\n board[position] = 0\n\n def Move(self, position, direc, board):\n if board[position] != 0 and position != 0 and position != 6:\n if direc == 1:\n index = (position + 1) % 12\n while board[position] != 0:\n board[position] = board[position] - 1\n board[index] = board[index] + 1\n index = (index + 1) % 12\n if board[index] == 0:\n self.checkEat((index - 1 + 12) % 12, direc, board)\n else:\n self.Move(index, direc, board)\n else:\n index = (position - 1) % 12\n while board[position] != 0:\n board[position] = board[position] - 1\n board[index] = board[index] + 1\n index = (index - 1) % 12\n if board[index] == 0:\n self.checkEat((index + 1 + 12) % 12, direc, board)\n else:\n self.Move(index, direc, board)\n\n def minimax(self, board, depth, turn):\n if self.checkWin(board) == 1: return -9999\n if self.checkWin(board) == 0: return 9999\n if self.checkWin(board) == 2: return 0\n if depth == 5:\n return board[13] - board[12]\n if turn == 0:\n best = -1000\n for i in range(7, 12):\n for j in range(0, 2):\n if (board[i] != 0):\n cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]\n for k in range(14):\n cpboard[k] = board[k]\n self.Move(i, j, cpboard)\n vl = self.minimax(cpboard, depth + 1, (turn + 1) % 2)\n if vl > best:\n best = vl\n return best\n else:\n best = 1000\n for i in range(1, 6):\n for j in range(0, 2):\n if (board[i] != 0):\n cpboard = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 1, 1]\n for k in range(14):\n cpboard[k] = board[k]\n self.Move(i, j, cpboard)\n vl = self.minimax(cpboard, depth + 1, (turn + 1) % 2)\n if vl < best:\n best = vl\n return best\n",
"step-ids": [
7,
9,
14,
16,
17
]
}
|
[
7,
9,
14,
16,
17
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"Widget for exporting the data"
import asyncio
from pathlib import Path
from typing import List
from bokeh.models import Div, CustomAction, CustomJS
from view.dialog import FileDialog
from utils.gui import startfile
class SaveFileDialog(FileDialog):
"A file dialog that adds a default save path"
def __init__(self, ctrl):
super().__init__(ctrl, storage = "save")
def _defaultpath(ext, bopen):
assert not bopen
pot = [i for i in self.storedpaths(ctrl, "load", ext) if i.exists()]
ope = next((i for i in pot if i.suffix not in ('', '.gr')), None)
if ope is None:
ope = self.firstexistingpath(pot)
pot = self.storedpaths(ctrl, "save", ext)
sav = self.firstexistingparent(pot)
if ope is None:
return sav
if sav is None:
if Path(ope).is_dir():
return ope
sav = Path(ope).with_suffix(ext[0][1])
else:
psa = Path(sav)
if psa.suffix == '':
sav = (psa/Path(ope).stem).with_suffix(ext[0][1])
else:
sav = (psa.parent/Path(ope).stem).with_suffix(psa.suffix)
self.defaultextension = sav.suffix[1:] if sav.suffix != '' else None
return str(sav)
self.__store = self.access[1]
self.access = _defaultpath, None
self.filetypes = "xlsx:*.xlsx"
self.title = "Export plot data to excel"
def store(self, *_):
"store the path"
return self.__store(*_)
class CSVExporter:
"exports all to csv"
@classmethod
def addtodoc(cls, mainviews, ctrl, doc) -> List[Div]:
"creates the widget"
dlg = SaveFileDialog(ctrl)
div = Div(text = "", width = 0, height = 0)
mainview = mainviews[0] if isinstance(mainviews, (list, tuple)) else mainviews
figure = mainview.getfigure()
figure.tools = (
figure.tools
+ [
CustomAction(
action_tooltip = dlg.title,
callback = CustomJS(
code = 'div.text = div.text + " ";',
args = dict(div = div)
)
)
]
)
if isinstance(mainviews, (list, tuple)):
for i in mainviews[1:]:
i.getfigure().tools = i.getfigure().tools + [figure.tools[-1]]
def _cb(attr, old, new):
if new == " " and div.text == ' ':
div.text = ""
asyncio.create_task(cls._run(dlg, mainview, ctrl, doc))
div.on_change("text", _cb)
return [div]
def reset(self, *_):
"reset all"
@staticmethod
async def _run(dlg: SaveFileDialog, mainview, ctrl, doc):
paths = await mainview.threadmethod(dlg.save)
if paths is None:
return
@doc.add_next_tick_callback
def _toolbarsave():
with ctrl.action:
dlg.store(paths, False) # pylint: disable=not-callable
path = paths if isinstance(paths, (str, Path)) else paths[0]
if mainview.export(path) and Path(path).exists():
startfile(path)
|
normal
|
{
"blob_id": "d120172e65f329b1137df38b693e5fe7145bc80d",
"index": 2840,
"step-1": "<mask token>\n\n\nclass CSVExporter:\n <mask token>\n <mask token>\n\n def reset(self, *_):\n \"\"\"reset all\"\"\"\n\n @staticmethod\n async def _run(dlg: SaveFileDialog, mainview, ctrl, doc):\n paths = await mainview.threadmethod(dlg.save)\n if paths is None:\n return\n\n @doc.add_next_tick_callback\n def _toolbarsave():\n with ctrl.action:\n dlg.store(paths, False)\n path = paths if isinstance(paths, (str, Path)) else paths[0]\n if mainview.export(path) and Path(path).exists():\n startfile(path)\n",
"step-2": "<mask token>\n\n\nclass SaveFileDialog(FileDialog):\n <mask token>\n\n def __init__(self, ctrl):\n super().__init__(ctrl, storage='save')\n\n def _defaultpath(ext, bopen):\n assert not bopen\n pot = [i for i in self.storedpaths(ctrl, 'load', ext) if i.exists()\n ]\n ope = next((i for i in pot if i.suffix not in ('', '.gr')), None)\n if ope is None:\n ope = self.firstexistingpath(pot)\n pot = self.storedpaths(ctrl, 'save', ext)\n sav = self.firstexistingparent(pot)\n if ope is None:\n return sav\n if sav is None:\n if Path(ope).is_dir():\n return ope\n sav = Path(ope).with_suffix(ext[0][1])\n else:\n psa = Path(sav)\n if psa.suffix == '':\n sav = (psa / Path(ope).stem).with_suffix(ext[0][1])\n else:\n sav = (psa.parent / Path(ope).stem).with_suffix(psa.suffix)\n self.defaultextension = sav.suffix[1:\n ] if sav.suffix != '' else None\n return str(sav)\n self.__store = self.access[1]\n self.access = _defaultpath, None\n self.filetypes = 'xlsx:*.xlsx'\n self.title = 'Export plot data to excel'\n\n def store(self, *_):\n \"\"\"store the path\"\"\"\n return self.__store(*_)\n\n\nclass CSVExporter:\n \"\"\"exports all to csv\"\"\"\n\n @classmethod\n def addtodoc(cls, mainviews, ctrl, doc) ->List[Div]:\n \"\"\"creates the widget\"\"\"\n dlg = SaveFileDialog(ctrl)\n div = Div(text='', width=0, height=0)\n mainview = mainviews[0] if isinstance(mainviews, (list, tuple)\n ) else mainviews\n figure = mainview.getfigure()\n figure.tools = figure.tools + [CustomAction(action_tooltip=dlg.\n title, callback=CustomJS(code='div.text = div.text + \" \";',\n args=dict(div=div)))]\n if isinstance(mainviews, (list, tuple)):\n for i in mainviews[1:]:\n i.getfigure().tools = i.getfigure().tools + [figure.tools[-1]]\n\n def _cb(attr, old, new):\n if new == ' ' and div.text == ' ':\n div.text = ''\n asyncio.create_task(cls._run(dlg, mainview, ctrl, doc))\n div.on_change('text', _cb)\n return [div]\n\n def reset(self, *_):\n \"\"\"reset all\"\"\"\n\n @staticmethod\n async def _run(dlg: SaveFileDialog, mainview, ctrl, doc):\n paths = await mainview.threadmethod(dlg.save)\n if paths is None:\n return\n\n @doc.add_next_tick_callback\n def _toolbarsave():\n with ctrl.action:\n dlg.store(paths, False)\n path = paths if isinstance(paths, (str, Path)) else paths[0]\n if mainview.export(path) and Path(path).exists():\n startfile(path)\n",
"step-3": "<mask token>\n\n\nclass SaveFileDialog(FileDialog):\n \"\"\"A file dialog that adds a default save path\"\"\"\n\n def __init__(self, ctrl):\n super().__init__(ctrl, storage='save')\n\n def _defaultpath(ext, bopen):\n assert not bopen\n pot = [i for i in self.storedpaths(ctrl, 'load', ext) if i.exists()\n ]\n ope = next((i for i in pot if i.suffix not in ('', '.gr')), None)\n if ope is None:\n ope = self.firstexistingpath(pot)\n pot = self.storedpaths(ctrl, 'save', ext)\n sav = self.firstexistingparent(pot)\n if ope is None:\n return sav\n if sav is None:\n if Path(ope).is_dir():\n return ope\n sav = Path(ope).with_suffix(ext[0][1])\n else:\n psa = Path(sav)\n if psa.suffix == '':\n sav = (psa / Path(ope).stem).with_suffix(ext[0][1])\n else:\n sav = (psa.parent / Path(ope).stem).with_suffix(psa.suffix)\n self.defaultextension = sav.suffix[1:\n ] if sav.suffix != '' else None\n return str(sav)\n self.__store = self.access[1]\n self.access = _defaultpath, None\n self.filetypes = 'xlsx:*.xlsx'\n self.title = 'Export plot data to excel'\n\n def store(self, *_):\n \"\"\"store the path\"\"\"\n return self.__store(*_)\n\n\nclass CSVExporter:\n \"\"\"exports all to csv\"\"\"\n\n @classmethod\n def addtodoc(cls, mainviews, ctrl, doc) ->List[Div]:\n \"\"\"creates the widget\"\"\"\n dlg = SaveFileDialog(ctrl)\n div = Div(text='', width=0, height=0)\n mainview = mainviews[0] if isinstance(mainviews, (list, tuple)\n ) else mainviews\n figure = mainview.getfigure()\n figure.tools = figure.tools + [CustomAction(action_tooltip=dlg.\n title, callback=CustomJS(code='div.text = div.text + \" \";',\n args=dict(div=div)))]\n if isinstance(mainviews, (list, tuple)):\n for i in mainviews[1:]:\n i.getfigure().tools = i.getfigure().tools + [figure.tools[-1]]\n\n def _cb(attr, old, new):\n if new == ' ' and div.text == ' ':\n div.text = ''\n asyncio.create_task(cls._run(dlg, mainview, ctrl, doc))\n div.on_change('text', _cb)\n return [div]\n\n def reset(self, *_):\n \"\"\"reset all\"\"\"\n\n @staticmethod\n async def _run(dlg: SaveFileDialog, mainview, ctrl, doc):\n paths = await mainview.threadmethod(dlg.save)\n if paths is None:\n return\n\n @doc.add_next_tick_callback\n def _toolbarsave():\n with ctrl.action:\n dlg.store(paths, False)\n path = paths if isinstance(paths, (str, Path)) else paths[0]\n if mainview.export(path) and Path(path).exists():\n startfile(path)\n",
"step-4": "<mask token>\nimport asyncio\nfrom pathlib import Path\nfrom typing import List\nfrom bokeh.models import Div, CustomAction, CustomJS\nfrom view.dialog import FileDialog\nfrom utils.gui import startfile\n\n\nclass SaveFileDialog(FileDialog):\n \"\"\"A file dialog that adds a default save path\"\"\"\n\n def __init__(self, ctrl):\n super().__init__(ctrl, storage='save')\n\n def _defaultpath(ext, bopen):\n assert not bopen\n pot = [i for i in self.storedpaths(ctrl, 'load', ext) if i.exists()\n ]\n ope = next((i for i in pot if i.suffix not in ('', '.gr')), None)\n if ope is None:\n ope = self.firstexistingpath(pot)\n pot = self.storedpaths(ctrl, 'save', ext)\n sav = self.firstexistingparent(pot)\n if ope is None:\n return sav\n if sav is None:\n if Path(ope).is_dir():\n return ope\n sav = Path(ope).with_suffix(ext[0][1])\n else:\n psa = Path(sav)\n if psa.suffix == '':\n sav = (psa / Path(ope).stem).with_suffix(ext[0][1])\n else:\n sav = (psa.parent / Path(ope).stem).with_suffix(psa.suffix)\n self.defaultextension = sav.suffix[1:\n ] if sav.suffix != '' else None\n return str(sav)\n self.__store = self.access[1]\n self.access = _defaultpath, None\n self.filetypes = 'xlsx:*.xlsx'\n self.title = 'Export plot data to excel'\n\n def store(self, *_):\n \"\"\"store the path\"\"\"\n return self.__store(*_)\n\n\nclass CSVExporter:\n \"\"\"exports all to csv\"\"\"\n\n @classmethod\n def addtodoc(cls, mainviews, ctrl, doc) ->List[Div]:\n \"\"\"creates the widget\"\"\"\n dlg = SaveFileDialog(ctrl)\n div = Div(text='', width=0, height=0)\n mainview = mainviews[0] if isinstance(mainviews, (list, tuple)\n ) else mainviews\n figure = mainview.getfigure()\n figure.tools = figure.tools + [CustomAction(action_tooltip=dlg.\n title, callback=CustomJS(code='div.text = div.text + \" \";',\n args=dict(div=div)))]\n if isinstance(mainviews, (list, tuple)):\n for i in mainviews[1:]:\n i.getfigure().tools = i.getfigure().tools + [figure.tools[-1]]\n\n def _cb(attr, old, new):\n if new == ' ' and div.text == ' ':\n div.text = ''\n asyncio.create_task(cls._run(dlg, mainview, ctrl, doc))\n div.on_change('text', _cb)\n return [div]\n\n def reset(self, *_):\n \"\"\"reset all\"\"\"\n\n @staticmethod\n async def _run(dlg: SaveFileDialog, mainview, ctrl, doc):\n paths = await mainview.threadmethod(dlg.save)\n if paths is None:\n return\n\n @doc.add_next_tick_callback\n def _toolbarsave():\n with ctrl.action:\n dlg.store(paths, False)\n path = paths if isinstance(paths, (str, Path)) else paths[0]\n if mainview.export(path) and Path(path).exists():\n startfile(path)\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"Widget for exporting the data\"\nimport asyncio\nfrom pathlib import Path\nfrom typing import List\nfrom bokeh.models import Div, CustomAction, CustomJS\nfrom view.dialog import FileDialog\nfrom utils.gui import startfile\n\nclass SaveFileDialog(FileDialog):\n \"A file dialog that adds a default save path\"\n def __init__(self, ctrl):\n super().__init__(ctrl, storage = \"save\")\n\n def _defaultpath(ext, bopen):\n assert not bopen\n pot = [i for i in self.storedpaths(ctrl, \"load\", ext) if i.exists()]\n ope = next((i for i in pot if i.suffix not in ('', '.gr')), None)\n if ope is None:\n ope = self.firstexistingpath(pot)\n\n pot = self.storedpaths(ctrl, \"save\", ext)\n sav = self.firstexistingparent(pot)\n\n if ope is None:\n return sav\n\n if sav is None:\n if Path(ope).is_dir():\n return ope\n sav = Path(ope).with_suffix(ext[0][1])\n else:\n psa = Path(sav)\n if psa.suffix == '':\n sav = (psa/Path(ope).stem).with_suffix(ext[0][1])\n else:\n sav = (psa.parent/Path(ope).stem).with_suffix(psa.suffix)\n\n self.defaultextension = sav.suffix[1:] if sav.suffix != '' else None\n return str(sav)\n\n self.__store = self.access[1]\n self.access = _defaultpath, None\n self.filetypes = \"xlsx:*.xlsx\"\n self.title = \"Export plot data to excel\"\n\n def store(self, *_):\n \"store the path\"\n return self.__store(*_)\n\nclass CSVExporter:\n \"exports all to csv\"\n @classmethod\n def addtodoc(cls, mainviews, ctrl, doc) -> List[Div]:\n \"creates the widget\"\n dlg = SaveFileDialog(ctrl)\n div = Div(text = \"\", width = 0, height = 0)\n\n mainview = mainviews[0] if isinstance(mainviews, (list, tuple)) else mainviews\n figure = mainview.getfigure()\n\n figure.tools = (\n figure.tools\n + [\n CustomAction(\n action_tooltip = dlg.title,\n callback = CustomJS(\n code = 'div.text = div.text + \" \";',\n args = dict(div = div)\n )\n )\n ]\n )\n\n if isinstance(mainviews, (list, tuple)):\n for i in mainviews[1:]:\n i.getfigure().tools = i.getfigure().tools + [figure.tools[-1]]\n\n def _cb(attr, old, new):\n if new == \" \" and div.text == ' ':\n div.text = \"\"\n asyncio.create_task(cls._run(dlg, mainview, ctrl, doc))\n\n div.on_change(\"text\", _cb)\n return [div]\n\n def reset(self, *_):\n \"reset all\"\n\n @staticmethod\n async def _run(dlg: SaveFileDialog, mainview, ctrl, doc):\n paths = await mainview.threadmethod(dlg.save)\n if paths is None:\n return\n\n @doc.add_next_tick_callback\n def _toolbarsave():\n with ctrl.action:\n dlg.store(paths, False) # pylint: disable=not-callable\n path = paths if isinstance(paths, (str, Path)) else paths[0]\n if mainview.export(path) and Path(path).exists():\n startfile(path)\n",
"step-ids": [
2,
7,
8,
9,
10
]
}
|
[
2,
7,
8,
9,
10
] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('location', '0005_auto_20170303_1625'),
]
operations = [
migrations.RemoveField(
model_name='location',
name='block',
),
migrations.RemoveField(
model_name='location',
name='mandapam',
),
migrations.RemoveField(
model_name='location',
name='others',
),
migrations.RemoveField(
model_name='location',
name='sub_district',
),
migrations.RemoveField(
model_name='location',
name='taluka',
),
migrations.RemoveField(
model_name='location',
name='tehsil',
),
migrations.AlterField(
model_name='location',
name='sub_district_type',
field=models.ForeignKey(related_name='location', blank=True, to='location.SubDistrictType', null=True, on_delete=django.db.models.deletion.CASCADE),
),
]
|
normal
|
{
"blob_id": "ca7b3b5df860d3c3fb0953857ad950affdcc671d",
"index": 9311,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('location', '0005_auto_20170303_1625')]\n operations = [migrations.RemoveField(model_name='location', name=\n 'block'), migrations.RemoveField(model_name='location', name=\n 'mandapam'), migrations.RemoveField(model_name='location', name=\n 'others'), migrations.RemoveField(model_name='location', name=\n 'sub_district'), migrations.RemoveField(model_name='location', name\n ='taluka'), migrations.RemoveField(model_name='location', name=\n 'tehsil'), migrations.AlterField(model_name='location', name=\n 'sub_district_type', field=models.ForeignKey(related_name=\n 'location', blank=True, to='location.SubDistrictType', null=True,\n on_delete=django.db.models.deletion.CASCADE))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('location', '0005_auto_20170303_1625')]\n operations = [migrations.RemoveField(model_name='location', name=\n 'block'), migrations.RemoveField(model_name='location', name=\n 'mandapam'), migrations.RemoveField(model_name='location', name=\n 'others'), migrations.RemoveField(model_name='location', name=\n 'sub_district'), migrations.RemoveField(model_name='location', name\n ='taluka'), migrations.RemoveField(model_name='location', name=\n 'tehsil'), migrations.AlterField(model_name='location', name=\n 'sub_district_type', field=models.ForeignKey(related_name=\n 'location', blank=True, to='location.SubDistrictType', null=True,\n on_delete=django.db.models.deletion.CASCADE))]\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('location', '0005_auto_20170303_1625'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='location',\n name='block',\n ),\n migrations.RemoveField(\n model_name='location',\n name='mandapam',\n ),\n migrations.RemoveField(\n model_name='location',\n name='others',\n ),\n migrations.RemoveField(\n model_name='location',\n name='sub_district',\n ),\n migrations.RemoveField(\n model_name='location',\n name='taluka',\n ),\n migrations.RemoveField(\n model_name='location',\n name='tehsil',\n ),\n migrations.AlterField(\n model_name='location',\n name='sub_district_type',\n field=models.ForeignKey(related_name='location', blank=True, to='location.SubDistrictType', null=True, on_delete=django.db.models.deletion.CASCADE),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from functools import partial
def power_func(x, y, a=1, b=0):
return a * x ** y + b
new_func = partial(power_func, 2, a=4)
print(new_func(4, b=1))
print(new_func(1))
|
normal
|
{
"blob_id": "c9f1768e2f2dd47d637c2e577067eb6cd163e972",
"index": 8331,
"step-1": "<mask token>\n\n\ndef power_func(x, y, a=1, b=0):\n return a * x ** y + b\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef power_func(x, y, a=1, b=0):\n return a * x ** y + b\n\n\n<mask token>\nprint(new_func(4, b=1))\nprint(new_func(1))\n",
"step-3": "<mask token>\n\n\ndef power_func(x, y, a=1, b=0):\n return a * x ** y + b\n\n\nnew_func = partial(power_func, 2, a=4)\nprint(new_func(4, b=1))\nprint(new_func(1))\n",
"step-4": "from functools import partial\n\n\ndef power_func(x, y, a=1, b=0):\n return a * x ** y + b\n\n\nnew_func = partial(power_func, 2, a=4)\nprint(new_func(4, b=1))\nprint(new_func(1))\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
from django.db import models
from django.conf import settings
from django.utils.text import slugify
from six import python_2_unicode_compatible
from ckeditor_uploader.fields import RichTextUploadingField
from ckeditor.fields import RichTextField
# Create your models here.
class topic(models.Model):
name = models.CharField(max_length=255, primary_key=True)
showname = models.CharField(max_length=255, null= True)
def __str__(self):
return self.name
class article(models.Model):
title = models.CharField(max_length=255)
slug = models.SlugField(max_length=255, unique= True, blank=True, editable=True, null = True)
topic = models.ForeignKey(topic, on_delete=models.CASCADE)
author = models.CharField(max_length=255)
opening = models.TextField()
body = RichTextUploadingField()
date = models.DateTimeField(auto_now_add=True)
image = models.ImageField(null = True)
view = models.IntegerField(default=0, null=True)
def __str__(self):
return self.title
def save(self, *args, **kwargs):
self.slug = slugify(self.title)
super(article, self).save(*args, **kwargs)
class Comment(models.Model):
post = models.ForeignKey(article, on_delete=models.CASCADE, related_name='comments')
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
body = models.TextField()
date = models.DateTimeField(auto_now_add=True)
|
normal
|
{
"blob_id": "31801f62942337b0cdf0e022dc75a9e125be54e3",
"index": 4191,
"step-1": "<mask token>\n\n\nclass article(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.title\n\n def save(self, *args, **kwargs):\n self.slug = slugify(self.title)\n super(article, self).save(*args, **kwargs)\n\n\nclass Comment(models.Model):\n post = models.ForeignKey(article, on_delete=models.CASCADE,\n related_name='comments')\n author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n body = models.TextField()\n date = models.DateTimeField(auto_now_add=True)\n",
"step-2": "<mask token>\n\n\nclass topic(models.Model):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass article(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(max_length=255, unique=True, blank=True,\n editable=True, null=True)\n topic = models.ForeignKey(topic, on_delete=models.CASCADE)\n author = models.CharField(max_length=255)\n opening = models.TextField()\n body = RichTextUploadingField()\n date = models.DateTimeField(auto_now_add=True)\n image = models.ImageField(null=True)\n view = models.IntegerField(default=0, null=True)\n\n def __str__(self):\n return self.title\n\n def save(self, *args, **kwargs):\n self.slug = slugify(self.title)\n super(article, self).save(*args, **kwargs)\n\n\nclass Comment(models.Model):\n post = models.ForeignKey(article, on_delete=models.CASCADE,\n related_name='comments')\n author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n body = models.TextField()\n date = models.DateTimeField(auto_now_add=True)\n",
"step-3": "<mask token>\n\n\nclass topic(models.Model):\n name = models.CharField(max_length=255, primary_key=True)\n showname = models.CharField(max_length=255, null=True)\n\n def __str__(self):\n return self.name\n\n\nclass article(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(max_length=255, unique=True, blank=True,\n editable=True, null=True)\n topic = models.ForeignKey(topic, on_delete=models.CASCADE)\n author = models.CharField(max_length=255)\n opening = models.TextField()\n body = RichTextUploadingField()\n date = models.DateTimeField(auto_now_add=True)\n image = models.ImageField(null=True)\n view = models.IntegerField(default=0, null=True)\n\n def __str__(self):\n return self.title\n\n def save(self, *args, **kwargs):\n self.slug = slugify(self.title)\n super(article, self).save(*args, **kwargs)\n\n\nclass Comment(models.Model):\n post = models.ForeignKey(article, on_delete=models.CASCADE,\n related_name='comments')\n author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n body = models.TextField()\n date = models.DateTimeField(auto_now_add=True)\n",
"step-4": "from django.db import models\nfrom django.conf import settings\nfrom django.utils.text import slugify\nfrom six import python_2_unicode_compatible\nfrom ckeditor_uploader.fields import RichTextUploadingField\nfrom ckeditor.fields import RichTextField\n\n\nclass topic(models.Model):\n name = models.CharField(max_length=255, primary_key=True)\n showname = models.CharField(max_length=255, null=True)\n\n def __str__(self):\n return self.name\n\n\nclass article(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(max_length=255, unique=True, blank=True,\n editable=True, null=True)\n topic = models.ForeignKey(topic, on_delete=models.CASCADE)\n author = models.CharField(max_length=255)\n opening = models.TextField()\n body = RichTextUploadingField()\n date = models.DateTimeField(auto_now_add=True)\n image = models.ImageField(null=True)\n view = models.IntegerField(default=0, null=True)\n\n def __str__(self):\n return self.title\n\n def save(self, *args, **kwargs):\n self.slug = slugify(self.title)\n super(article, self).save(*args, **kwargs)\n\n\nclass Comment(models.Model):\n post = models.ForeignKey(article, on_delete=models.CASCADE,\n related_name='comments')\n author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n body = models.TextField()\n date = models.DateTimeField(auto_now_add=True)\n",
"step-5": "from django.db import models\nfrom django.conf import settings\nfrom django.utils.text import slugify\nfrom six import python_2_unicode_compatible\nfrom ckeditor_uploader.fields import RichTextUploadingField\nfrom ckeditor.fields import RichTextField\n# Create your models here.\nclass topic(models.Model):\n name = models.CharField(max_length=255, primary_key=True)\n showname = models.CharField(max_length=255, null= True)\n\n def __str__(self):\n return self.name\n\nclass article(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(max_length=255, unique= True, blank=True, editable=True, null = True)\n topic = models.ForeignKey(topic, on_delete=models.CASCADE)\n author = models.CharField(max_length=255)\n opening = models.TextField()\n body = RichTextUploadingField()\n date = models.DateTimeField(auto_now_add=True)\n image = models.ImageField(null = True)\n view = models.IntegerField(default=0, null=True)\n \n\n def __str__(self):\n return self.title\n\n def save(self, *args, **kwargs):\n self.slug = slugify(self.title)\n super(article, self).save(*args, **kwargs)\n \n\nclass Comment(models.Model):\n post = models.ForeignKey(article, on_delete=models.CASCADE, related_name='comments')\n author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)\n body = models.TextField()\n date = models.DateTimeField(auto_now_add=True)\n\n\n\n ",
"step-ids": [
5,
7,
9,
10,
11
]
}
|
[
5,
7,
9,
10,
11
] |
<|reserved_special_token_0|>
class mrk_file(FileInfo):
<|reserved_special_token_0|>
def __init__(self, id_=None, file=None, parent=None):
super(mrk_file, self).__init__(id_, file, parent)
self._type = '.mrk'
<|reserved_special_token_0|>
def __setstate__(self, state):
super(mrk_file, self).__setstate__(state)
def __repr__(self):
return str(basename(self.file))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class mrk_file(FileInfo):
<|reserved_special_token_0|>
def __init__(self, id_=None, file=None, parent=None):
super(mrk_file, self).__init__(id_, file, parent)
self._type = '.mrk'
def __getstate__(self):
data = super(mrk_file, self).__getstate__()
return data
def __setstate__(self, state):
super(mrk_file, self).__setstate__(state)
def __repr__(self):
return str(basename(self.file))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class mrk_file(FileInfo):
"""
.mrk specific file container.
"""
def __init__(self, id_=None, file=None, parent=None):
super(mrk_file, self).__init__(id_, file, parent)
self._type = '.mrk'
def __getstate__(self):
data = super(mrk_file, self).__getstate__()
return data
def __setstate__(self, state):
super(mrk_file, self).__setstate__(state)
def __repr__(self):
return str(basename(self.file))
<|reserved_special_token_1|>
from os.path import basename
from .FileInfo import FileInfo
class mrk_file(FileInfo):
"""
.mrk specific file container.
"""
def __init__(self, id_=None, file=None, parent=None):
super(mrk_file, self).__init__(id_, file, parent)
self._type = '.mrk'
def __getstate__(self):
data = super(mrk_file, self).__getstate__()
return data
def __setstate__(self, state):
super(mrk_file, self).__setstate__(state)
def __repr__(self):
return str(basename(self.file))
<|reserved_special_token_1|>
from os.path import basename
from .FileInfo import FileInfo
class mrk_file(FileInfo):
"""
.mrk specific file container.
"""
def __init__(self, id_=None, file=None, parent=None):
super(mrk_file, self).__init__(id_, file, parent)
self._type = '.mrk'
#region class methods
def __getstate__(self):
data = super(mrk_file, self).__getstate__()
return data
def __setstate__(self, state):
super(mrk_file, self).__setstate__(state)
def __repr__(self):
# Have a separate representation for .mrk files as this is shown in the
# info for each con file under the list of associated mrk's.
return str(basename(self.file))
|
flexible
|
{
"blob_id": "8e9aec7d3653137a05f94e4041d28f3423122751",
"index": 3990,
"step-1": "<mask token>\n\n\nclass mrk_file(FileInfo):\n <mask token>\n\n def __init__(self, id_=None, file=None, parent=None):\n super(mrk_file, self).__init__(id_, file, parent)\n self._type = '.mrk'\n <mask token>\n\n def __setstate__(self, state):\n super(mrk_file, self).__setstate__(state)\n\n def __repr__(self):\n return str(basename(self.file))\n",
"step-2": "<mask token>\n\n\nclass mrk_file(FileInfo):\n <mask token>\n\n def __init__(self, id_=None, file=None, parent=None):\n super(mrk_file, self).__init__(id_, file, parent)\n self._type = '.mrk'\n\n def __getstate__(self):\n data = super(mrk_file, self).__getstate__()\n return data\n\n def __setstate__(self, state):\n super(mrk_file, self).__setstate__(state)\n\n def __repr__(self):\n return str(basename(self.file))\n",
"step-3": "<mask token>\n\n\nclass mrk_file(FileInfo):\n \"\"\"\n .mrk specific file container.\n \"\"\"\n\n def __init__(self, id_=None, file=None, parent=None):\n super(mrk_file, self).__init__(id_, file, parent)\n self._type = '.mrk'\n\n def __getstate__(self):\n data = super(mrk_file, self).__getstate__()\n return data\n\n def __setstate__(self, state):\n super(mrk_file, self).__setstate__(state)\n\n def __repr__(self):\n return str(basename(self.file))\n",
"step-4": "from os.path import basename\nfrom .FileInfo import FileInfo\n\n\nclass mrk_file(FileInfo):\n \"\"\"\n .mrk specific file container.\n \"\"\"\n\n def __init__(self, id_=None, file=None, parent=None):\n super(mrk_file, self).__init__(id_, file, parent)\n self._type = '.mrk'\n\n def __getstate__(self):\n data = super(mrk_file, self).__getstate__()\n return data\n\n def __setstate__(self, state):\n super(mrk_file, self).__setstate__(state)\n\n def __repr__(self):\n return str(basename(self.file))\n",
"step-5": "from os.path import basename\n\nfrom .FileInfo import FileInfo\n\n\nclass mrk_file(FileInfo):\n \"\"\"\n .mrk specific file container.\n \"\"\"\n def __init__(self, id_=None, file=None, parent=None):\n super(mrk_file, self).__init__(id_, file, parent)\n self._type = '.mrk'\n\n#region class methods\n\n def __getstate__(self):\n data = super(mrk_file, self).__getstate__()\n\n return data\n\n def __setstate__(self, state):\n super(mrk_file, self).__setstate__(state)\n\n def __repr__(self):\n # Have a separate representation for .mrk files as this is shown in the\n # info for each con file under the list of associated mrk's.\n return str(basename(self.file))\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
# load the dependencies
from airflow import DAG
from datetime import date, timedelta, datetime
# default_args are the default arguments applied to the DAG and all inherited tasks
DAG_DEFAULT_ARGS = {
'owner': 'airflow',
'depends_on_past': False,
'retries': 1,
'retry_delay': timedelta(minutes=1)
}
with DAG('twitter_dag_v1', start_date=datetime(2018, 10, 1), schedule_interval="@daily", default_args=DAG_DEFAULT_ARGS, catchup=False) as dag:
None
|
normal
|
{
"blob_id": "436cc06778bf9ac9e04a897f4a4db90c595d943c",
"index": 5969,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith DAG('twitter_dag_v1', start_date=datetime(2018, 10, 1),\n schedule_interval='@daily', default_args=DAG_DEFAULT_ARGS, catchup=False\n ) as dag:\n None\n",
"step-3": "<mask token>\nDAG_DEFAULT_ARGS = {'owner': 'airflow', 'depends_on_past': False, 'retries':\n 1, 'retry_delay': timedelta(minutes=1)}\nwith DAG('twitter_dag_v1', start_date=datetime(2018, 10, 1),\n schedule_interval='@daily', default_args=DAG_DEFAULT_ARGS, catchup=False\n ) as dag:\n None\n",
"step-4": "from airflow import DAG\nfrom datetime import date, timedelta, datetime\nDAG_DEFAULT_ARGS = {'owner': 'airflow', 'depends_on_past': False, 'retries':\n 1, 'retry_delay': timedelta(minutes=1)}\nwith DAG('twitter_dag_v1', start_date=datetime(2018, 10, 1),\n schedule_interval='@daily', default_args=DAG_DEFAULT_ARGS, catchup=False\n ) as dag:\n None\n",
"step-5": "# load the dependencies\nfrom airflow import DAG\nfrom datetime import date, timedelta, datetime\n\n# default_args are the default arguments applied to the DAG and all inherited tasks\nDAG_DEFAULT_ARGS = {\n\t'owner': 'airflow',\n\t'depends_on_past': False,\n\t'retries': 1,\n\t'retry_delay': timedelta(minutes=1)\n}\n\nwith DAG('twitter_dag_v1', start_date=datetime(2018, 10, 1), schedule_interval=\"@daily\", default_args=DAG_DEFAULT_ARGS, catchup=False) as dag:\n\tNone\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
"""
@File : jump.py
@copyright : GG
@Coder: Leslie_s
@Date: 2020/1/26
"""
import requests
from lxml import html
import pandas as pd
import time
import pandas as pd
import datetime
import re
import json
headers = {
'accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange',
'accept-encoding':'gzip, deflate, br',
'accept-language':'zh-CN,zh;q=0.8',
'upgrade - insecure - requests': '1',
'user-agent':'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36',# 需要填写
}
url = 'https://3g.dxy.cn/newh5/view/pneumonia?scene=2&clicktime=1579582238&enterid=1579582238&from=timeline&isappinstalled=0'
r = requests.get(url, headers=headers,timeout=15,allow_redirects=False)
r.encoding='utf-8'
t1 = html.fromstring(r.text)
doc = r.text
test_com = r'(?P<first>"provinceName":"[\u4e00-\u9fa5]{1,9}"),(?P<second>"provinceShortName":"[\u4e00-\u9fa5]{1,9}"),(?P<three>"confirmedCount":\d{1,9})'
iter_dict = {}
gg_a = r'provinceName":(?P<first>"[\u4e00-\u9fa5]{1,9}"),"provinceShortName":(?P<second>"[\u4e00-\u9fa5]{1,9}"),"confirmedCount":(?P<three>\d{1,9})'
r=re.finditer(gg_a,doc)
train = re.findall(gg_a,doc)
for i in r:
print(i.group(1))
provinceName=i.group(1)
provinceShortName=i.group(2)
confirmedCount=i.group(3)
iter_dict.setdefault( provinceShortName,confirmedCount)
#
# result = re.finditer(test_com,r.text)
# for i in result:
# print(i.group(1))
#
# search = re.finditer(test_com, r.text)
# print('group 0:', search.group(0))
# list_provincename=[]
# list_confircount=[]
# for match in matches_pro:
# print(match.group(1))
# list_provincename.append(match.group(1))
# for match in matches_confirmedCount:
# print(match.group(1))
# list_confircount.append(match.group(1))
#
# dic_result = dict(zip(list_confircount,list_provincename))
#
|
normal
|
{
"blob_id": "5aecd021297fee4407d6b529c24afb3c6398f7ba",
"index": 7205,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in r:\n print(i.group(1))\n provinceName = i.group(1)\n provinceShortName = i.group(2)\n confirmedCount = i.group(3)\n iter_dict.setdefault(provinceShortName, confirmedCount)\n",
"step-3": "<mask token>\nheaders = {'accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange'\n , 'accept-encoding': 'gzip, deflate, br', 'accept-language':\n 'zh-CN,zh;q=0.8', 'upgrade - insecure - requests': '1', 'user-agent':\n 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36'\n }\nurl = (\n 'https://3g.dxy.cn/newh5/view/pneumonia?scene=2&clicktime=1579582238&enterid=1579582238&from=timeline&isappinstalled=0'\n )\nr = requests.get(url, headers=headers, timeout=15, allow_redirects=False)\nr.encoding = 'utf-8'\nt1 = html.fromstring(r.text)\ndoc = r.text\ntest_com = (\n '(?P<first>\"provinceName\":\"[\\\\u4e00-\\\\u9fa5]{1,9}\"),(?P<second>\"provinceShortName\":\"[\\\\u4e00-\\\\u9fa5]{1,9}\"),(?P<three>\"confirmedCount\":\\\\d{1,9})'\n )\niter_dict = {}\ngg_a = (\n 'provinceName\":(?P<first>\"[\\\\u4e00-\\\\u9fa5]{1,9}\"),\"provinceShortName\":(?P<second>\"[\\\\u4e00-\\\\u9fa5]{1,9}\"),\"confirmedCount\":(?P<three>\\\\d{1,9})'\n )\nr = re.finditer(gg_a, doc)\ntrain = re.findall(gg_a, doc)\nfor i in r:\n print(i.group(1))\n provinceName = i.group(1)\n provinceShortName = i.group(2)\n confirmedCount = i.group(3)\n iter_dict.setdefault(provinceShortName, confirmedCount)\n",
"step-4": "<mask token>\nimport requests\nfrom lxml import html\nimport pandas as pd\nimport time\nimport pandas as pd\nimport datetime\nimport re\nimport json\nheaders = {'accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange'\n , 'accept-encoding': 'gzip, deflate, br', 'accept-language':\n 'zh-CN,zh;q=0.8', 'upgrade - insecure - requests': '1', 'user-agent':\n 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36'\n }\nurl = (\n 'https://3g.dxy.cn/newh5/view/pneumonia?scene=2&clicktime=1579582238&enterid=1579582238&from=timeline&isappinstalled=0'\n )\nr = requests.get(url, headers=headers, timeout=15, allow_redirects=False)\nr.encoding = 'utf-8'\nt1 = html.fromstring(r.text)\ndoc = r.text\ntest_com = (\n '(?P<first>\"provinceName\":\"[\\\\u4e00-\\\\u9fa5]{1,9}\"),(?P<second>\"provinceShortName\":\"[\\\\u4e00-\\\\u9fa5]{1,9}\"),(?P<three>\"confirmedCount\":\\\\d{1,9})'\n )\niter_dict = {}\ngg_a = (\n 'provinceName\":(?P<first>\"[\\\\u4e00-\\\\u9fa5]{1,9}\"),\"provinceShortName\":(?P<second>\"[\\\\u4e00-\\\\u9fa5]{1,9}\"),\"confirmedCount\":(?P<three>\\\\d{1,9})'\n )\nr = re.finditer(gg_a, doc)\ntrain = re.findall(gg_a, doc)\nfor i in r:\n print(i.group(1))\n provinceName = i.group(1)\n provinceShortName = i.group(2)\n confirmedCount = i.group(3)\n iter_dict.setdefault(provinceShortName, confirmedCount)\n",
"step-5": "\"\"\"\n@File : jump.py\n@copyright : GG\n@Coder: Leslie_s\n@Date: 2020/1/26\n\"\"\"\nimport requests\nfrom lxml import html\nimport pandas as pd\nimport time\nimport pandas as pd\nimport datetime\nimport re\nimport json\n\nheaders = {\n\n 'accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange',\n\n 'accept-encoding':'gzip, deflate, br',\n\n 'accept-language':'zh-CN,zh;q=0.8',\n 'upgrade - insecure - requests': '1',\n 'user-agent':'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36',# 需要填写\n\n }\nurl = 'https://3g.dxy.cn/newh5/view/pneumonia?scene=2&clicktime=1579582238&enterid=1579582238&from=timeline&isappinstalled=0'\nr = requests.get(url, headers=headers,timeout=15,allow_redirects=False)\nr.encoding='utf-8'\nt1 = html.fromstring(r.text)\ndoc = r.text\ntest_com = r'(?P<first>\"provinceName\":\"[\\u4e00-\\u9fa5]{1,9}\"),(?P<second>\"provinceShortName\":\"[\\u4e00-\\u9fa5]{1,9}\"),(?P<three>\"confirmedCount\":\\d{1,9})'\niter_dict = {}\ngg_a = r'provinceName\":(?P<first>\"[\\u4e00-\\u9fa5]{1,9}\"),\"provinceShortName\":(?P<second>\"[\\u4e00-\\u9fa5]{1,9}\"),\"confirmedCount\":(?P<three>\\d{1,9})'\nr=re.finditer(gg_a,doc)\ntrain = re.findall(gg_a,doc)\nfor i in r:\n print(i.group(1))\n provinceName=i.group(1)\n provinceShortName=i.group(2)\n confirmedCount=i.group(3)\n iter_dict.setdefault( provinceShortName,confirmedCount)\n#\n# result = re.finditer(test_com,r.text)\n# for i in result:\n# print(i.group(1))\n#\n# search = re.finditer(test_com, r.text)\n# print('group 0:', search.group(0))\n# list_provincename=[]\n# list_confircount=[]\n# for match in matches_pro:\n# print(match.group(1))\n# list_provincename.append(match.group(1))\n# for match in matches_confirmedCount:\n# print(match.group(1))\n# list_confircount.append(match.group(1))\n#\n# dic_result = dict(zip(list_confircount,list_provincename))\n#\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import cv2 as cv
'''色彩空间介绍'''
'''
RGB:对于RGB的色彩空间是立方体的色彩空间 三通道 红 黄 蓝 每个灰度级为255
HSV:对于HSV的色彩空间是255度的圆柱体 三通道 高度 圆心角 半径分别是255
HIS
YCrCb
YUV
'''
'''常用的色彩空间转换函数***cvtColor'''
def colorSpaceConvert(image):
'''转换到灰度空间'''
res = cv.cvtColor(image, cv.COLOR_BGR2GRAY)
cv.imshow("gray", res)
'''转换到HSV色彩空间'''
res = cv.cvtColor(image, cv.COLOR_BGR2HSV)
cv.imshow("hsv", res)
'''转换到YUV色彩空间'''
res = cv.cvtColor(image, cv.COLOR_BGR2YUV)
cv.imshow("yuv", res)
image = cv.imread("../girl.jpg")
colorSpaceConvert(image)
'''等待下一个操作的延迟'''
cv.waitKey(0)
'''程序操作结束要销毁所有的窗口'''
cv.destroyAllWindows()
|
normal
|
{
"blob_id": "6d359d987c50fd0d5e963d467a379eb245e3eb40",
"index": 3756,
"step-1": "<mask token>\n\n\ndef colorSpaceConvert(image):\n \"\"\"转换到灰度空间\"\"\"\n res = cv.cvtColor(image, cv.COLOR_BGR2GRAY)\n cv.imshow('gray', res)\n \"\"\"转换到HSV色彩空间\"\"\"\n res = cv.cvtColor(image, cv.COLOR_BGR2HSV)\n cv.imshow('hsv', res)\n \"\"\"转换到YUV色彩空间\"\"\"\n res = cv.cvtColor(image, cv.COLOR_BGR2YUV)\n cv.imshow('yuv', res)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef colorSpaceConvert(image):\n \"\"\"转换到灰度空间\"\"\"\n res = cv.cvtColor(image, cv.COLOR_BGR2GRAY)\n cv.imshow('gray', res)\n \"\"\"转换到HSV色彩空间\"\"\"\n res = cv.cvtColor(image, cv.COLOR_BGR2HSV)\n cv.imshow('hsv', res)\n \"\"\"转换到YUV色彩空间\"\"\"\n res = cv.cvtColor(image, cv.COLOR_BGR2YUV)\n cv.imshow('yuv', res)\n\n\n<mask token>\ncolorSpaceConvert(image)\n<mask token>\ncv.waitKey(0)\n<mask token>\ncv.destroyAllWindows()\n",
"step-3": "<mask token>\n\n\ndef colorSpaceConvert(image):\n \"\"\"转换到灰度空间\"\"\"\n res = cv.cvtColor(image, cv.COLOR_BGR2GRAY)\n cv.imshow('gray', res)\n \"\"\"转换到HSV色彩空间\"\"\"\n res = cv.cvtColor(image, cv.COLOR_BGR2HSV)\n cv.imshow('hsv', res)\n \"\"\"转换到YUV色彩空间\"\"\"\n res = cv.cvtColor(image, cv.COLOR_BGR2YUV)\n cv.imshow('yuv', res)\n\n\nimage = cv.imread('../girl.jpg')\ncolorSpaceConvert(image)\n<mask token>\ncv.waitKey(0)\n<mask token>\ncv.destroyAllWindows()\n",
"step-4": "import cv2 as cv\n<mask token>\n\n\ndef colorSpaceConvert(image):\n \"\"\"转换到灰度空间\"\"\"\n res = cv.cvtColor(image, cv.COLOR_BGR2GRAY)\n cv.imshow('gray', res)\n \"\"\"转换到HSV色彩空间\"\"\"\n res = cv.cvtColor(image, cv.COLOR_BGR2HSV)\n cv.imshow('hsv', res)\n \"\"\"转换到YUV色彩空间\"\"\"\n res = cv.cvtColor(image, cv.COLOR_BGR2YUV)\n cv.imshow('yuv', res)\n\n\nimage = cv.imread('../girl.jpg')\ncolorSpaceConvert(image)\n<mask token>\ncv.waitKey(0)\n<mask token>\ncv.destroyAllWindows()\n",
"step-5": "import cv2 as cv\n\n'''色彩空间介绍'''\n'''\nRGB:对于RGB的色彩空间是立方体的色彩空间 三通道 红 黄 蓝 每个灰度级为255\nHSV:对于HSV的色彩空间是255度的圆柱体 三通道 高度 圆心角 半径分别是255\nHIS\nYCrCb\nYUV\n\n'''\n'''常用的色彩空间转换函数***cvtColor'''\ndef colorSpaceConvert(image):\n '''转换到灰度空间'''\n res = cv.cvtColor(image, cv.COLOR_BGR2GRAY)\n cv.imshow(\"gray\", res)\n '''转换到HSV色彩空间'''\n res = cv.cvtColor(image, cv.COLOR_BGR2HSV)\n cv.imshow(\"hsv\", res)\n '''转换到YUV色彩空间'''\n res = cv.cvtColor(image, cv.COLOR_BGR2YUV)\n cv.imshow(\"yuv\", res)\n\nimage = cv.imread(\"../girl.jpg\")\ncolorSpaceConvert(image)\n'''等待下一个操作的延迟'''\ncv.waitKey(0)\n'''程序操作结束要销毁所有的窗口'''\ncv.destroyAllWindows()",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class ViagemSerializer(serializers.ModelSerializer):
detalhes = DetalheSerializer(many=True, read_only=True)
caixa = CaixaSerializer(read_only=True)
localPartida = HospitalSerializer(read_only=True)
localChegada = HospitalSerializer(read_only=True)
class Meta:
model = Viagem
fields = '__all__'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CaixaSerializer(serializers.ModelSerializer):
class Meta:
model = Caixa
fields = '__all__'
class HospitalSerializer(serializers.ModelSerializer):
class Meta:
model = Hospital
fields = '__all__'
class DetalheSerializer(serializers.ModelSerializer):
imeiEquipamento = serializers.CharField(max_length=22)
class Meta:
model = Detalhe
fields = '__all__'
class ViagemSerializer(serializers.ModelSerializer):
detalhes = DetalheSerializer(many=True, read_only=True)
caixa = CaixaSerializer(read_only=True)
localPartida = HospitalSerializer(read_only=True)
localChegada = HospitalSerializer(read_only=True)
class Meta:
model = Viagem
fields = '__all__'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = '__all__'
class CaixaSerializer(serializers.ModelSerializer):
class Meta:
model = Caixa
fields = '__all__'
class HospitalSerializer(serializers.ModelSerializer):
class Meta:
model = Hospital
fields = '__all__'
class DetalheSerializer(serializers.ModelSerializer):
imeiEquipamento = serializers.CharField(max_length=22)
class Meta:
model = Detalhe
fields = '__all__'
class ViagemSerializer(serializers.ModelSerializer):
detalhes = DetalheSerializer(many=True, read_only=True)
caixa = CaixaSerializer(read_only=True)
localPartida = HospitalSerializer(read_only=True)
localChegada = HospitalSerializer(read_only=True)
class Meta:
model = Viagem
fields = '__all__'
<|reserved_special_token_1|>
from rest_framework import serializers
from django.contrib.auth.models import User
from core.models import Detalhe, Viagem, Hospital, Equipamento, Caixa
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = '__all__'
class CaixaSerializer(serializers.ModelSerializer):
class Meta:
model = Caixa
fields = '__all__'
class HospitalSerializer(serializers.ModelSerializer):
class Meta:
model = Hospital
fields = '__all__'
class DetalheSerializer(serializers.ModelSerializer):
imeiEquipamento = serializers.CharField(max_length=22)
class Meta:
model = Detalhe
fields = '__all__'
class ViagemSerializer(serializers.ModelSerializer):
detalhes = DetalheSerializer(many=True, read_only=True)
caixa = CaixaSerializer(read_only=True)
localPartida = HospitalSerializer(read_only=True)
localChegada = HospitalSerializer(read_only=True)
class Meta:
model = Viagem
fields = '__all__'
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
from rest_framework import serializers
from django.contrib.auth.models import User
from core.models import Detalhe, Viagem, Hospital, Equipamento, Caixa
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = '__all__'
class CaixaSerializer(serializers.ModelSerializer):
class Meta:
model = Caixa
fields = '__all__'
class HospitalSerializer(serializers.ModelSerializer):
class Meta:
model = Hospital
fields = '__all__'
class DetalheSerializer(serializers.ModelSerializer):
imeiEquipamento = serializers.CharField(max_length=22)
class Meta:
model = Detalhe
fields = '__all__'
class ViagemSerializer(serializers.ModelSerializer):
detalhes = DetalheSerializer(many=True, read_only=True)
caixa = CaixaSerializer(read_only=True)
localPartida = HospitalSerializer(read_only=True)
localChegada = HospitalSerializer(read_only=True)
class Meta:
model = Viagem
fields = '__all__'
|
flexible
|
{
"blob_id": "b5c68211cfa255e47ee316dc5b0627719eacae78",
"index": 8504,
"step-1": "<mask token>\n\n\nclass ViagemSerializer(serializers.ModelSerializer):\n detalhes = DetalheSerializer(many=True, read_only=True)\n caixa = CaixaSerializer(read_only=True)\n localPartida = HospitalSerializer(read_only=True)\n localChegada = HospitalSerializer(read_only=True)\n\n\n class Meta:\n model = Viagem\n fields = '__all__'\n",
"step-2": "<mask token>\n\n\nclass CaixaSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Caixa\n fields = '__all__'\n\n\nclass HospitalSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Hospital\n fields = '__all__'\n\n\nclass DetalheSerializer(serializers.ModelSerializer):\n imeiEquipamento = serializers.CharField(max_length=22)\n\n\n class Meta:\n model = Detalhe\n fields = '__all__'\n\n\nclass ViagemSerializer(serializers.ModelSerializer):\n detalhes = DetalheSerializer(many=True, read_only=True)\n caixa = CaixaSerializer(read_only=True)\n localPartida = HospitalSerializer(read_only=True)\n localChegada = HospitalSerializer(read_only=True)\n\n\n class Meta:\n model = Viagem\n fields = '__all__'\n",
"step-3": "<mask token>\n\n\nclass UserSerializer(serializers.HyperlinkedModelSerializer):\n\n\n class Meta:\n model = User\n fields = '__all__'\n\n\nclass CaixaSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Caixa\n fields = '__all__'\n\n\nclass HospitalSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Hospital\n fields = '__all__'\n\n\nclass DetalheSerializer(serializers.ModelSerializer):\n imeiEquipamento = serializers.CharField(max_length=22)\n\n\n class Meta:\n model = Detalhe\n fields = '__all__'\n\n\nclass ViagemSerializer(serializers.ModelSerializer):\n detalhes = DetalheSerializer(many=True, read_only=True)\n caixa = CaixaSerializer(read_only=True)\n localPartida = HospitalSerializer(read_only=True)\n localChegada = HospitalSerializer(read_only=True)\n\n\n class Meta:\n model = Viagem\n fields = '__all__'\n",
"step-4": "from rest_framework import serializers\nfrom django.contrib.auth.models import User\nfrom core.models import Detalhe, Viagem, Hospital, Equipamento, Caixa\n\n\nclass UserSerializer(serializers.HyperlinkedModelSerializer):\n\n\n class Meta:\n model = User\n fields = '__all__'\n\n\nclass CaixaSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Caixa\n fields = '__all__'\n\n\nclass HospitalSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Hospital\n fields = '__all__'\n\n\nclass DetalheSerializer(serializers.ModelSerializer):\n imeiEquipamento = serializers.CharField(max_length=22)\n\n\n class Meta:\n model = Detalhe\n fields = '__all__'\n\n\nclass ViagemSerializer(serializers.ModelSerializer):\n detalhes = DetalheSerializer(many=True, read_only=True)\n caixa = CaixaSerializer(read_only=True)\n localPartida = HospitalSerializer(read_only=True)\n localChegada = HospitalSerializer(read_only=True)\n\n\n class Meta:\n model = Viagem\n fields = '__all__'\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom rest_framework import serializers\nfrom django.contrib.auth.models import User\nfrom core.models import Detalhe, Viagem, Hospital, Equipamento, Caixa\n\nclass UserSerializer(serializers.HyperlinkedModelSerializer):\n class Meta:\n model = User\n fields = '__all__'\n\nclass CaixaSerializer(serializers.ModelSerializer):\n class Meta:\n model = Caixa\n fields = '__all__'\n\nclass HospitalSerializer(serializers.ModelSerializer):\n class Meta:\n model = Hospital\n fields = '__all__'\n\nclass DetalheSerializer(serializers.ModelSerializer):\n imeiEquipamento = serializers.CharField(max_length=22)\n class Meta:\n model = Detalhe\n fields = '__all__'\n\nclass ViagemSerializer(serializers.ModelSerializer):\n detalhes = DetalheSerializer(many=True, read_only=True)\n caixa = CaixaSerializer(read_only=True)\n localPartida = HospitalSerializer(read_only=True)\n localChegada = HospitalSerializer(read_only=True)\n \n class Meta:\n model = Viagem\n fields = '__all__'",
"step-ids": [
2,
6,
7,
8,
9
]
}
|
[
2,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def state_preparation(m, name, p):
circ = QuantumCircuit(m, name='State prep')
if name == 'GHZ':
circ.h(0)
for k in range(1, m):
circ.cx(0, k)
elif name == 'noisy_GHZ_bitflip':
prob = np.random.rand(m)
circ.h(0)
for k in range(1, m):
circ.cx(0, k)
if prob[k] <= p:
circ.x(k)
if prob[0] <= p:
circ.x(0)
elif name == 'noisy_GHZ_QDC':
probas = [1 - 3 * p / 4, p / 4, p / 4, p / 4]
gate_inds = np.random.choice(np.arange(4), size=m, p=probas)
circ.h(0)
for k in range(1, m):
circ.cx(0, k)
if gate_inds[k] == 1:
circ.x(k)
elif gate_inds[k] == 2:
circ.y(k)
elif gate_inds[k] == 3:
circ.z(k)
if gate_inds[0] == 1:
circ.x(0)
elif gate_inds[0] == 2:
circ.y(0)
elif gate_inds[0] == 3:
circ.z(0)
elif name == 'rigged_QDC':
probas_rigged = [1 - p, p / 2, p / 2, 0]
probas_rigged2 = [1 - 29 * p / 30, 2 * p / 5, 2 * p / 5, p / 6]
probas = [1 - 3 * p / 4, p / 4, p / 4, p / 4]
gate_inds = np.random.choice(np.arange(4), size=m - 1, p=probas)
gate_inds_r = np.random.choice(np.arange(4), p=probas_rigged)
gate_inds_r2 = np.random.choice(np.arange(4), p=probas_rigged2)
circ.h(0)
circ.cx(0, 1)
if gate_inds_r2 == 1:
circ.x(1)
elif gate_inds_r2 == 2:
circ.y(1)
elif gate_inds_r2 == 3:
circ.z(1)
for k in range(2, m):
circ.cx(0, k)
if gate_inds[k - 1] == 1:
circ.x(k)
elif gate_inds[k - 1] == 2:
circ.y(k)
elif gate_inds[k - 1] == 3:
circ.z(k)
if gate_inds_r == 1:
circ.x(0)
elif gate_inds_r == 2:
circ.y(0)
elif gate_inds_r == 3:
circ.z(0)
else:
raise ValueError('Unrecognized name.')
return circ
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import numpy as np
from qiskit import *
def state_preparation(m, name, p):
circ = QuantumCircuit(m, name='State prep')
if name == 'GHZ':
circ.h(0)
for k in range(1, m):
circ.cx(0, k)
elif name == 'noisy_GHZ_bitflip':
prob = np.random.rand(m)
circ.h(0)
for k in range(1, m):
circ.cx(0, k)
if prob[k] <= p:
circ.x(k)
if prob[0] <= p:
circ.x(0)
elif name == 'noisy_GHZ_QDC':
probas = [1 - 3 * p / 4, p / 4, p / 4, p / 4]
gate_inds = np.random.choice(np.arange(4), size=m, p=probas)
circ.h(0)
for k in range(1, m):
circ.cx(0, k)
if gate_inds[k] == 1:
circ.x(k)
elif gate_inds[k] == 2:
circ.y(k)
elif gate_inds[k] == 3:
circ.z(k)
if gate_inds[0] == 1:
circ.x(0)
elif gate_inds[0] == 2:
circ.y(0)
elif gate_inds[0] == 3:
circ.z(0)
elif name == 'rigged_QDC':
probas_rigged = [1 - p, p / 2, p / 2, 0]
probas_rigged2 = [1 - 29 * p / 30, 2 * p / 5, 2 * p / 5, p / 6]
probas = [1 - 3 * p / 4, p / 4, p / 4, p / 4]
gate_inds = np.random.choice(np.arange(4), size=m - 1, p=probas)
gate_inds_r = np.random.choice(np.arange(4), p=probas_rigged)
gate_inds_r2 = np.random.choice(np.arange(4), p=probas_rigged2)
circ.h(0)
circ.cx(0, 1)
if gate_inds_r2 == 1:
circ.x(1)
elif gate_inds_r2 == 2:
circ.y(1)
elif gate_inds_r2 == 3:
circ.z(1)
for k in range(2, m):
circ.cx(0, k)
if gate_inds[k - 1] == 1:
circ.x(k)
elif gate_inds[k - 1] == 2:
circ.y(k)
elif gate_inds[k - 1] == 3:
circ.z(k)
if gate_inds_r == 1:
circ.x(0)
elif gate_inds_r == 2:
circ.y(0)
elif gate_inds_r == 3:
circ.z(0)
else:
raise ValueError('Unrecognized name.')
return circ
<|reserved_special_token_1|>
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 25 18:40:07 2021
@author: tomachache
"""
import numpy as np
from qiskit import *
# Various state preparation
def state_preparation(m, name, p):
# m : nb of qubits
# name : name of the state we want
# p : proba associated with noise
circ = QuantumCircuit(m, name = 'State prep')
if name == 'GHZ':
circ.h(0)
for k in range(1,m):
circ.cx(0,k)
elif name == 'noisy_GHZ_bitflip':
prob = np.random.rand(m)
circ.h(0)
for k in range(1,m):
circ.cx(0,k)
if prob[k] <= p: # flips each bit with proba p
circ.x(k)
if prob[0] <= p:
circ.x(0)
elif name == 'noisy_GHZ_QDC':
probas = [1 - 3*p/4, p/4, p/4, p/4]
gate_inds = np.random.choice(np.arange(4), size = m, p = probas)
circ.h(0)
for k in range(1,m):
circ.cx(0,k)
if gate_inds[k] == 1:
circ.x(k)
elif gate_inds[k] == 2:
circ.y(k)
elif gate_inds[k] == 3:
circ.z(k)
if gate_inds[0] == 1:
circ.x(0)
elif gate_inds[0] == 2:
circ.y(0)
elif gate_inds[0] == 3:
circ.z(0)
elif name == 'rigged_QDC': # QDC where 1st and 2nd qubits have different probas
probas_rigged = [1-p, p/2, p/2, 0]
probas_rigged2 = [1 - 29*p/30, 2*p/5, 2*p/5, p/6]
probas = [1 - 3*p/4, p/4, p/4, p/4]
gate_inds = np.random.choice(np.arange(4), size = m - 1, p = probas)
gate_inds_r = np.random.choice(np.arange(4), p = probas_rigged)
gate_inds_r2 = np.random.choice(np.arange(4), p = probas_rigged2)
circ.h(0)
circ.cx(0,1)
if gate_inds_r2 == 1:
circ.x(1)
elif gate_inds_r2 == 2:
circ.y(1)
elif gate_inds_r2 == 3:
circ.z(1)
for k in range(2,m):
circ.cx(0,k)
if gate_inds[k-1] == 1:
circ.x(k)
elif gate_inds[k-1] == 2:
circ.y(k)
elif gate_inds[k-1] == 3:
circ.z(k)
if gate_inds_r == 1:
circ.x(0)
elif gate_inds_r == 2:
circ.y(0)
elif gate_inds_r == 3:
circ.z(0)
else:
raise ValueError('Unrecognized name.')
return circ
|
flexible
|
{
"blob_id": "6962bf99e3ecae473af54ded33fde09527cb82c0",
"index": 8284,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef state_preparation(m, name, p):\n circ = QuantumCircuit(m, name='State prep')\n if name == 'GHZ':\n circ.h(0)\n for k in range(1, m):\n circ.cx(0, k)\n elif name == 'noisy_GHZ_bitflip':\n prob = np.random.rand(m)\n circ.h(0)\n for k in range(1, m):\n circ.cx(0, k)\n if prob[k] <= p:\n circ.x(k)\n if prob[0] <= p:\n circ.x(0)\n elif name == 'noisy_GHZ_QDC':\n probas = [1 - 3 * p / 4, p / 4, p / 4, p / 4]\n gate_inds = np.random.choice(np.arange(4), size=m, p=probas)\n circ.h(0)\n for k in range(1, m):\n circ.cx(0, k)\n if gate_inds[k] == 1:\n circ.x(k)\n elif gate_inds[k] == 2:\n circ.y(k)\n elif gate_inds[k] == 3:\n circ.z(k)\n if gate_inds[0] == 1:\n circ.x(0)\n elif gate_inds[0] == 2:\n circ.y(0)\n elif gate_inds[0] == 3:\n circ.z(0)\n elif name == 'rigged_QDC':\n probas_rigged = [1 - p, p / 2, p / 2, 0]\n probas_rigged2 = [1 - 29 * p / 30, 2 * p / 5, 2 * p / 5, p / 6]\n probas = [1 - 3 * p / 4, p / 4, p / 4, p / 4]\n gate_inds = np.random.choice(np.arange(4), size=m - 1, p=probas)\n gate_inds_r = np.random.choice(np.arange(4), p=probas_rigged)\n gate_inds_r2 = np.random.choice(np.arange(4), p=probas_rigged2)\n circ.h(0)\n circ.cx(0, 1)\n if gate_inds_r2 == 1:\n circ.x(1)\n elif gate_inds_r2 == 2:\n circ.y(1)\n elif gate_inds_r2 == 3:\n circ.z(1)\n for k in range(2, m):\n circ.cx(0, k)\n if gate_inds[k - 1] == 1:\n circ.x(k)\n elif gate_inds[k - 1] == 2:\n circ.y(k)\n elif gate_inds[k - 1] == 3:\n circ.z(k)\n if gate_inds_r == 1:\n circ.x(0)\n elif gate_inds_r == 2:\n circ.y(0)\n elif gate_inds_r == 3:\n circ.z(0)\n else:\n raise ValueError('Unrecognized name.')\n return circ\n",
"step-3": "<mask token>\nimport numpy as np\nfrom qiskit import *\n\n\ndef state_preparation(m, name, p):\n circ = QuantumCircuit(m, name='State prep')\n if name == 'GHZ':\n circ.h(0)\n for k in range(1, m):\n circ.cx(0, k)\n elif name == 'noisy_GHZ_bitflip':\n prob = np.random.rand(m)\n circ.h(0)\n for k in range(1, m):\n circ.cx(0, k)\n if prob[k] <= p:\n circ.x(k)\n if prob[0] <= p:\n circ.x(0)\n elif name == 'noisy_GHZ_QDC':\n probas = [1 - 3 * p / 4, p / 4, p / 4, p / 4]\n gate_inds = np.random.choice(np.arange(4), size=m, p=probas)\n circ.h(0)\n for k in range(1, m):\n circ.cx(0, k)\n if gate_inds[k] == 1:\n circ.x(k)\n elif gate_inds[k] == 2:\n circ.y(k)\n elif gate_inds[k] == 3:\n circ.z(k)\n if gate_inds[0] == 1:\n circ.x(0)\n elif gate_inds[0] == 2:\n circ.y(0)\n elif gate_inds[0] == 3:\n circ.z(0)\n elif name == 'rigged_QDC':\n probas_rigged = [1 - p, p / 2, p / 2, 0]\n probas_rigged2 = [1 - 29 * p / 30, 2 * p / 5, 2 * p / 5, p / 6]\n probas = [1 - 3 * p / 4, p / 4, p / 4, p / 4]\n gate_inds = np.random.choice(np.arange(4), size=m - 1, p=probas)\n gate_inds_r = np.random.choice(np.arange(4), p=probas_rigged)\n gate_inds_r2 = np.random.choice(np.arange(4), p=probas_rigged2)\n circ.h(0)\n circ.cx(0, 1)\n if gate_inds_r2 == 1:\n circ.x(1)\n elif gate_inds_r2 == 2:\n circ.y(1)\n elif gate_inds_r2 == 3:\n circ.z(1)\n for k in range(2, m):\n circ.cx(0, k)\n if gate_inds[k - 1] == 1:\n circ.x(k)\n elif gate_inds[k - 1] == 2:\n circ.y(k)\n elif gate_inds[k - 1] == 3:\n circ.z(k)\n if gate_inds_r == 1:\n circ.x(0)\n elif gate_inds_r == 2:\n circ.y(0)\n elif gate_inds_r == 3:\n circ.z(0)\n else:\n raise ValueError('Unrecognized name.')\n return circ\n",
"step-4": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Jan 25 18:40:07 2021\n\n@author: tomachache\n\"\"\"\n\nimport numpy as np\n\nfrom qiskit import *\n\n\n# Various state preparation\ndef state_preparation(m, name, p): \n # m : nb of qubits \n # name : name of the state we want \n # p : proba associated with noise\n \n circ = QuantumCircuit(m, name = 'State prep')\n \n if name == 'GHZ':\n circ.h(0)\n for k in range(1,m):\n circ.cx(0,k)\n \n elif name == 'noisy_GHZ_bitflip':\n prob = np.random.rand(m)\n circ.h(0)\n for k in range(1,m):\n circ.cx(0,k)\n if prob[k] <= p: # flips each bit with proba p\n circ.x(k)\n if prob[0] <= p:\n circ.x(0)\n \n elif name == 'noisy_GHZ_QDC':\n probas = [1 - 3*p/4, p/4, p/4, p/4]\n gate_inds = np.random.choice(np.arange(4), size = m, p = probas)\n circ.h(0)\n for k in range(1,m):\n circ.cx(0,k)\n if gate_inds[k] == 1:\n circ.x(k)\n elif gate_inds[k] == 2:\n circ.y(k)\n elif gate_inds[k] == 3:\n circ.z(k)\n if gate_inds[0] == 1:\n circ.x(0)\n elif gate_inds[0] == 2:\n circ.y(0)\n elif gate_inds[0] == 3:\n circ.z(0)\n \n elif name == 'rigged_QDC': # QDC where 1st and 2nd qubits have different probas\n probas_rigged = [1-p, p/2, p/2, 0]\n probas_rigged2 = [1 - 29*p/30, 2*p/5, 2*p/5, p/6]\n probas = [1 - 3*p/4, p/4, p/4, p/4]\n gate_inds = np.random.choice(np.arange(4), size = m - 1, p = probas)\n gate_inds_r = np.random.choice(np.arange(4), p = probas_rigged)\n gate_inds_r2 = np.random.choice(np.arange(4), p = probas_rigged2)\n circ.h(0)\n circ.cx(0,1)\n if gate_inds_r2 == 1:\n circ.x(1)\n elif gate_inds_r2 == 2:\n circ.y(1)\n elif gate_inds_r2 == 3:\n circ.z(1)\n for k in range(2,m):\n circ.cx(0,k)\n if gate_inds[k-1] == 1:\n circ.x(k)\n elif gate_inds[k-1] == 2:\n circ.y(k)\n elif gate_inds[k-1] == 3:\n circ.z(k)\n if gate_inds_r == 1:\n circ.x(0)\n elif gate_inds_r == 2:\n circ.y(0)\n elif gate_inds_r == 3:\n circ.z(0)\n else:\n raise ValueError('Unrecognized name.')\n \n return circ\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
// Time Complexity : O(n)
// Space Complexity : O(n)
// Did this code successfully run on Leetcode : Yes
// // Any problem you faced while coding this : No
// Your code here along with comments explaining your approach
class Solution:
def productExceptSelf(self, nums: List[int]) -> List[int]:
res=[]
left=[]
right=[]
product=1
for i in range(len(nums)): # calculate all the products in left of the index element
left.append(product)
product=product*nums[i]
product=1
for i in range(len(nums)-1,-1,-1): # calculate all the products in right of the index element starting from reverse
right.insert(0,product)
product=product*nums[i]
print(left,right)
for i in range(len(nums)): # calculate result by multiplying the left and right of every index
res.append(left[i]*right[i])
return res
|
normal
|
{
"blob_id": "23bcef07326db084d4e0e6337beb00faba329193",
"index": 1834,
"step-1": "// Time Complexity : O(n)\n// Space Complexity : O(n)\n// Did this code successfully run on Leetcode : Yes\n// // Any problem you faced while coding this : No\n\n// Your code here along with comments explaining your approach\nclass Solution:\n def productExceptSelf(self, nums: List[int]) -> List[int]:\n res=[]\n left=[]\n right=[]\n product=1\n for i in range(len(nums)): # calculate all the products in left of the index element\n left.append(product)\n product=product*nums[i]\n product=1\n for i in range(len(nums)-1,-1,-1): # calculate all the products in right of the index element starting from reverse\n right.insert(0,product)\n product=product*nums[i]\n print(left,right)\n for i in range(len(nums)): # calculate result by multiplying the left and right of every index\n res.append(left[i]*right[i])\n return res",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from django import forms
from .models import Picture
class PictureUploadForm(forms.ModelForm):
class Meta:
model = Picture
exclude = ()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field_name, field in self.fields.items():
field.widget.attrs['class'] = 'form-control text-center'
field.help_text = ''
def clean(self):
cleaned_data = super().clean()
if cleaned_data['img'] and cleaned_data['urlImg']:
raise forms.ValidationError("Должно быть заполнено только одно из полей")
class PictureUpdateForm(forms.Form):
width = forms.IntegerField()
height = forms.IntegerField()
size = forms.FloatField()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field_name, field in self.fields.items():
field.widget.attrs['class'] = 'form-control text-center'
field.help_text = ''
def clean(self):
cleaned_data = super().clean()
if cleaned_data['width'] < 1 or cleaned_data['height'] < 1 or cleaned_data['size'] < 1:
raise forms.ValidationError('Значения в полях должны быть больше 0')
|
normal
|
{
"blob_id": "3d45fd7dcb3b382efaefe2797ebeb33216a840fa",
"index": 680,
"step-1": "<mask token>\n\n\nclass PictureUploadForm(forms.ModelForm):\n\n\n class Meta:\n model = Picture\n exclude = ()\n <mask token>\n <mask token>\n\n\nclass PictureUpdateForm(forms.Form):\n width = forms.IntegerField()\n height = forms.IntegerField()\n size = forms.FloatField()\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n for field_name, field in self.fields.items():\n field.widget.attrs['class'] = 'form-control text-center'\n field.help_text = ''\n\n def clean(self):\n cleaned_data = super().clean()\n if cleaned_data['width'] < 1 or cleaned_data['height'\n ] < 1 or cleaned_data['size'] < 1:\n raise forms.ValidationError('Значения в полях должны быть больше 0'\n )\n",
"step-2": "<mask token>\n\n\nclass PictureUploadForm(forms.ModelForm):\n\n\n class Meta:\n model = Picture\n exclude = ()\n <mask token>\n\n def clean(self):\n cleaned_data = super().clean()\n if cleaned_data['img'] and cleaned_data['urlImg']:\n raise forms.ValidationError(\n 'Должно быть заполнено только одно из полей')\n\n\nclass PictureUpdateForm(forms.Form):\n width = forms.IntegerField()\n height = forms.IntegerField()\n size = forms.FloatField()\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n for field_name, field in self.fields.items():\n field.widget.attrs['class'] = 'form-control text-center'\n field.help_text = ''\n\n def clean(self):\n cleaned_data = super().clean()\n if cleaned_data['width'] < 1 or cleaned_data['height'\n ] < 1 or cleaned_data['size'] < 1:\n raise forms.ValidationError('Значения в полях должны быть больше 0'\n )\n",
"step-3": "<mask token>\n\n\nclass PictureUploadForm(forms.ModelForm):\n\n\n class Meta:\n model = Picture\n exclude = ()\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n for field_name, field in self.fields.items():\n field.widget.attrs['class'] = 'form-control text-center'\n field.help_text = ''\n\n def clean(self):\n cleaned_data = super().clean()\n if cleaned_data['img'] and cleaned_data['urlImg']:\n raise forms.ValidationError(\n 'Должно быть заполнено только одно из полей')\n\n\nclass PictureUpdateForm(forms.Form):\n width = forms.IntegerField()\n height = forms.IntegerField()\n size = forms.FloatField()\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n for field_name, field in self.fields.items():\n field.widget.attrs['class'] = 'form-control text-center'\n field.help_text = ''\n\n def clean(self):\n cleaned_data = super().clean()\n if cleaned_data['width'] < 1 or cleaned_data['height'\n ] < 1 or cleaned_data['size'] < 1:\n raise forms.ValidationError('Значения в полях должны быть больше 0'\n )\n",
"step-4": "from django import forms\nfrom .models import Picture\n\n\nclass PictureUploadForm(forms.ModelForm):\n\n\n class Meta:\n model = Picture\n exclude = ()\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n for field_name, field in self.fields.items():\n field.widget.attrs['class'] = 'form-control text-center'\n field.help_text = ''\n\n def clean(self):\n cleaned_data = super().clean()\n if cleaned_data['img'] and cleaned_data['urlImg']:\n raise forms.ValidationError(\n 'Должно быть заполнено только одно из полей')\n\n\nclass PictureUpdateForm(forms.Form):\n width = forms.IntegerField()\n height = forms.IntegerField()\n size = forms.FloatField()\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n for field_name, field in self.fields.items():\n field.widget.attrs['class'] = 'form-control text-center'\n field.help_text = ''\n\n def clean(self):\n cleaned_data = super().clean()\n if cleaned_data['width'] < 1 or cleaned_data['height'\n ] < 1 or cleaned_data['size'] < 1:\n raise forms.ValidationError('Значения в полях должны быть больше 0'\n )\n",
"step-5": "from django import forms\nfrom .models import Picture\n\n\nclass PictureUploadForm(forms.ModelForm):\n\n class Meta:\n model = Picture\n exclude = ()\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n for field_name, field in self.fields.items():\n field.widget.attrs['class'] = 'form-control text-center'\n field.help_text = ''\n\n def clean(self):\n cleaned_data = super().clean()\n if cleaned_data['img'] and cleaned_data['urlImg']:\n raise forms.ValidationError(\"Должно быть заполнено только одно из полей\")\n\n\nclass PictureUpdateForm(forms.Form):\n\n width = forms.IntegerField()\n height = forms.IntegerField()\n size = forms.FloatField()\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n for field_name, field in self.fields.items():\n field.widget.attrs['class'] = 'form-control text-center'\n field.help_text = ''\n\n def clean(self):\n cleaned_data = super().clean()\n if cleaned_data['width'] < 1 or cleaned_data['height'] < 1 or cleaned_data['size'] < 1:\n raise forms.ValidationError('Значения в полях должны быть больше 0')",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
def get_accuracy(model, kb):
results = []
for clause in kb.clauses:
o1, o2 = model.forward(clause)
if o2.data.numpy()[0][0] > 0.9:
results.append(1.0)
else:
results.append(0.0)
return sum(results) / len(kb.clauses)
def test_model(model, kb1, kb2, filename):
kb_train = kb1.union(kb2)
optimizor = torch.optim.Adam(model.parameters(), lr=0.001)
mone = torch.FloatTensor([-1])
one = torch.FloatTensor([1])
average_prob = []
averate_loss = []
best_accuracy1 = 0.0
best_accuracy2 = 0.0
for i in tqdm(range(1000)):
optimizor.zero_grad()
total_probability = 0.0
total_loss = 0.0
for clause in kb_train.clauses:
loss, prob = model.forward(clause=clause)
loss.backward(one)
total_probability += prob.data.numpy()[0]
total_loss += loss.data.numpy()[0]
optimizor.step()
average_prob.append(total_probability / len(kb_train.clauses))
averate_loss.append(total_loss / len(kb_train.clauses))
accuracy1 = get_accuracy(model, kb1)
accuracy2 = get_accuracy(model, kb2)
if accuracy1 + accuracy2 > best_accuracy1 + best_accuracy2:
best_accuracy1 = accuracy1
best_accuracy2 = accuracy2
pickle.dump((average_prob, averate_loss, best_accuracy1, best_accuracy2
), open('./results/%s' % filename, 'wb'))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if 'DISPLAY' not in os.environ:
matplotlib.use('Agg')
else:
pass
<|reserved_special_token_0|>
sns.set(style='white', context='talk')
def get_accuracy(model, kb):
results = []
for clause in kb.clauses:
o1, o2 = model.forward(clause)
if o2.data.numpy()[0][0] > 0.9:
results.append(1.0)
else:
results.append(0.0)
return sum(results) / len(kb.clauses)
def test_model(model, kb1, kb2, filename):
kb_train = kb1.union(kb2)
optimizor = torch.optim.Adam(model.parameters(), lr=0.001)
mone = torch.FloatTensor([-1])
one = torch.FloatTensor([1])
average_prob = []
averate_loss = []
best_accuracy1 = 0.0
best_accuracy2 = 0.0
for i in tqdm(range(1000)):
optimizor.zero_grad()
total_probability = 0.0
total_loss = 0.0
for clause in kb_train.clauses:
loss, prob = model.forward(clause=clause)
loss.backward(one)
total_probability += prob.data.numpy()[0]
total_loss += loss.data.numpy()[0]
optimizor.step()
average_prob.append(total_probability / len(kb_train.clauses))
averate_loss.append(total_loss / len(kb_train.clauses))
accuracy1 = get_accuracy(model, kb1)
accuracy2 = get_accuracy(model, kb2)
if accuracy1 + accuracy2 > best_accuracy1 + best_accuracy2:
best_accuracy1 = accuracy1
best_accuracy2 = accuracy2
pickle.dump((average_prob, averate_loss, best_accuracy1, best_accuracy2
), open('./results/%s' % filename, 'wb'))
<|reserved_special_token_0|>
for p in propositionals:
gkbs1.append(p.generate_knowledge_base('abcdefgh', change_weight=False))
<|reserved_special_token_0|>
for tkb in gkbs1[1:]:
gkb1 = gkb1.union(tkb)
<|reserved_special_token_0|>
for p in propositionals:
gkbs2.append(p.generate_knowledge_base('ijklmn', change_weight=False))
<|reserved_special_token_0|>
for tkb in gkbs2[1:]:
gkb2 = gkb2.union(tkb)
<|reserved_special_token_0|>
for p in propositionals:
gkbs3.append(p.generate_knowledge_base('abcdefgh', change_weight=True))
<|reserved_special_token_0|>
for tkb in gkbs3[1:]:
gkb3 = gkb3.union(tkb)
<|reserved_special_token_0|>
for p in propositionals:
gkbs4.append(p.generate_knowledge_base('ijklmn', change_weight=True))
<|reserved_special_token_0|>
for tkb in gkbs4[1:]:
gkb4 = gkb4.union(tkb)
<|reserved_special_token_0|>
for emb_dim in emb_dim_range:
test_model(model=LTN(emb_dim, 'abcdefghijklmn', [['S', 1], ['F', 2], [
'C', 1]], CLTN=True), kb1=kb1.union(gkb3), kb2=kb2.union(gkb4),
filename='LTN_Learn_emb_dim=%d.pkl' % emb_dim)
<|reserved_special_token_0|>
for emb_dim in emb_dim_range:
prob, loss, first, second = pickle.load(open(
'./results/LTN_Learn_emb_dim=%d.pkl' % emb_dim, 'rb'))
accuracys1.append(first)
accuracys2.append(second)
plt.plot(emb_dim_range, accuracys1, label='Group1')
plt.plot(emb_dim_range, accuracys2, label='Group2')
plt.legend()
plt.xlabel('Vector Length')
plt.ylabel('Accuracy')
plt.savefig('./Report/img/curve4.pdf')
plt.show()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if 'DISPLAY' not in os.environ:
matplotlib.use('Agg')
else:
pass
<|reserved_special_token_0|>
sns.set(style='white', context='talk')
def get_accuracy(model, kb):
results = []
for clause in kb.clauses:
o1, o2 = model.forward(clause)
if o2.data.numpy()[0][0] > 0.9:
results.append(1.0)
else:
results.append(0.0)
return sum(results) / len(kb.clauses)
def test_model(model, kb1, kb2, filename):
kb_train = kb1.union(kb2)
optimizor = torch.optim.Adam(model.parameters(), lr=0.001)
mone = torch.FloatTensor([-1])
one = torch.FloatTensor([1])
average_prob = []
averate_loss = []
best_accuracy1 = 0.0
best_accuracy2 = 0.0
for i in tqdm(range(1000)):
optimizor.zero_grad()
total_probability = 0.0
total_loss = 0.0
for clause in kb_train.clauses:
loss, prob = model.forward(clause=clause)
loss.backward(one)
total_probability += prob.data.numpy()[0]
total_loss += loss.data.numpy()[0]
optimizor.step()
average_prob.append(total_probability / len(kb_train.clauses))
averate_loss.append(total_loss / len(kb_train.clauses))
accuracy1 = get_accuracy(model, kb1)
accuracy2 = get_accuracy(model, kb2)
if accuracy1 + accuracy2 > best_accuracy1 + best_accuracy2:
best_accuracy1 = accuracy1
best_accuracy2 = accuracy2
pickle.dump((average_prob, averate_loss, best_accuracy1, best_accuracy2
), open('./results/%s' % filename, 'wb'))
kb1 = load_knowledge_base('./facts1.txt')
kb2 = load_knowledge_base('./facts2.txt')
propositionals = load_propositional('./knowledge.txt')
gkbs1 = []
for p in propositionals:
gkbs1.append(p.generate_knowledge_base('abcdefgh', change_weight=False))
gkb1 = gkbs1[0]
for tkb in gkbs1[1:]:
gkb1 = gkb1.union(tkb)
gkbs2 = []
for p in propositionals:
gkbs2.append(p.generate_knowledge_base('ijklmn', change_weight=False))
gkb2 = gkbs2[0]
for tkb in gkbs2[1:]:
gkb2 = gkb2.union(tkb)
gkbs3 = []
for p in propositionals:
gkbs3.append(p.generate_knowledge_base('abcdefgh', change_weight=True))
gkb3 = gkbs3[0]
for tkb in gkbs3[1:]:
gkb3 = gkb3.union(tkb)
gkbs4 = []
for p in propositionals:
gkbs4.append(p.generate_knowledge_base('ijklmn', change_weight=True))
gkb4 = gkbs4[0]
for tkb in gkbs4[1:]:
gkb4 = gkb4.union(tkb)
emb_dim = 50
emb_dim_range = list(range(10, 20, 5)) + list(range(20, 101, 20))
emb_dim_range = list(range(160, 161, 20))
for emb_dim in emb_dim_range:
test_model(model=LTN(emb_dim, 'abcdefghijklmn', [['S', 1], ['F', 2], [
'C', 1]], CLTN=True), kb1=kb1.union(gkb3), kb2=kb2.union(gkb4),
filename='LTN_Learn_emb_dim=%d.pkl' % emb_dim)
accuracys1 = []
accuracys2 = []
for emb_dim in emb_dim_range:
prob, loss, first, second = pickle.load(open(
'./results/LTN_Learn_emb_dim=%d.pkl' % emb_dim, 'rb'))
accuracys1.append(first)
accuracys2.append(second)
plt.plot(emb_dim_range, accuracys1, label='Group1')
plt.plot(emb_dim_range, accuracys2, label='Group2')
plt.legend()
plt.xlabel('Vector Length')
plt.ylabel('Accuracy')
plt.savefig('./Report/img/curve4.pdf')
plt.show()
<|reserved_special_token_1|>
import matplotlib
import os
if 'DISPLAY' not in os.environ:
matplotlib.use('Agg')
else:
pass
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.optim as optim
from matplotlib import pyplot as plt
import seaborn as sns
from tqdm import tqdm
import copy
from utils import Predicate, Clause, KnowledgeBase, Propositional
from utils import load_knowledge_base, load_propositional
from models import LTN
import pickle
import numpy as np
import seaborn as sns
sns.set(style='white', context='talk')
def get_accuracy(model, kb):
results = []
for clause in kb.clauses:
o1, o2 = model.forward(clause)
if o2.data.numpy()[0][0] > 0.9:
results.append(1.0)
else:
results.append(0.0)
return sum(results) / len(kb.clauses)
def test_model(model, kb1, kb2, filename):
kb_train = kb1.union(kb2)
optimizor = torch.optim.Adam(model.parameters(), lr=0.001)
mone = torch.FloatTensor([-1])
one = torch.FloatTensor([1])
average_prob = []
averate_loss = []
best_accuracy1 = 0.0
best_accuracy2 = 0.0
for i in tqdm(range(1000)):
optimizor.zero_grad()
total_probability = 0.0
total_loss = 0.0
for clause in kb_train.clauses:
loss, prob = model.forward(clause=clause)
loss.backward(one)
total_probability += prob.data.numpy()[0]
total_loss += loss.data.numpy()[0]
optimizor.step()
average_prob.append(total_probability / len(kb_train.clauses))
averate_loss.append(total_loss / len(kb_train.clauses))
accuracy1 = get_accuracy(model, kb1)
accuracy2 = get_accuracy(model, kb2)
if accuracy1 + accuracy2 > best_accuracy1 + best_accuracy2:
best_accuracy1 = accuracy1
best_accuracy2 = accuracy2
pickle.dump((average_prob, averate_loss, best_accuracy1, best_accuracy2
), open('./results/%s' % filename, 'wb'))
kb1 = load_knowledge_base('./facts1.txt')
kb2 = load_knowledge_base('./facts2.txt')
propositionals = load_propositional('./knowledge.txt')
gkbs1 = []
for p in propositionals:
gkbs1.append(p.generate_knowledge_base('abcdefgh', change_weight=False))
gkb1 = gkbs1[0]
for tkb in gkbs1[1:]:
gkb1 = gkb1.union(tkb)
gkbs2 = []
for p in propositionals:
gkbs2.append(p.generate_knowledge_base('ijklmn', change_weight=False))
gkb2 = gkbs2[0]
for tkb in gkbs2[1:]:
gkb2 = gkb2.union(tkb)
gkbs3 = []
for p in propositionals:
gkbs3.append(p.generate_knowledge_base('abcdefgh', change_weight=True))
gkb3 = gkbs3[0]
for tkb in gkbs3[1:]:
gkb3 = gkb3.union(tkb)
gkbs4 = []
for p in propositionals:
gkbs4.append(p.generate_knowledge_base('ijklmn', change_weight=True))
gkb4 = gkbs4[0]
for tkb in gkbs4[1:]:
gkb4 = gkb4.union(tkb)
emb_dim = 50
emb_dim_range = list(range(10, 20, 5)) + list(range(20, 101, 20))
emb_dim_range = list(range(160, 161, 20))
for emb_dim in emb_dim_range:
test_model(model=LTN(emb_dim, 'abcdefghijklmn', [['S', 1], ['F', 2], [
'C', 1]], CLTN=True), kb1=kb1.union(gkb3), kb2=kb2.union(gkb4),
filename='LTN_Learn_emb_dim=%d.pkl' % emb_dim)
accuracys1 = []
accuracys2 = []
for emb_dim in emb_dim_range:
prob, loss, first, second = pickle.load(open(
'./results/LTN_Learn_emb_dim=%d.pkl' % emb_dim, 'rb'))
accuracys1.append(first)
accuracys2.append(second)
plt.plot(emb_dim_range, accuracys1, label='Group1')
plt.plot(emb_dim_range, accuracys2, label='Group2')
plt.legend()
plt.xlabel('Vector Length')
plt.ylabel('Accuracy')
plt.savefig('./Report/img/curve4.pdf')
plt.show()
<|reserved_special_token_1|>
# coding: utf-8
# In[1]:
#coding:utf8
import matplotlib
import os
if 'DISPLAY' not in os.environ:
matplotlib.use('Agg')
else:
pass
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.optim as optim
from matplotlib import pyplot as plt
import seaborn as sns
from tqdm import tqdm
import copy
from utils import Predicate,Clause,KnowledgeBase, Propositional
from utils import load_knowledge_base,load_propositional
from models import LTN
import pickle
import numpy as np
import seaborn as sns
sns.set(style="white", context="talk")
# In[2]:
def get_accuracy(model,kb):
results=[]
for clause in kb.clauses:
o1,o2=model.forward(clause)
if o2.data.numpy()[0][0]>0.9:
results.append(1.0)
else:
results.append(0.0)
return sum(results)/len(kb.clauses)
# In[3]:
def test_model(model,kb1, kb2,filename):
kb_train=kb1.union(kb2)
optimizor=torch.optim.Adam(model.parameters(),lr=0.001)
mone=torch.FloatTensor([-1])
one=torch.FloatTensor([1])
average_prob=[]
averate_loss=[]
best_accuracy1=0.0
best_accuracy2=0.0
for i in tqdm(range(1000)):
optimizor.zero_grad()
total_probability=0.0
total_loss=0.0
for clause in kb_train.clauses:
loss,prob=model.forward(clause=clause)
loss.backward(one)
total_probability+=prob.data.numpy()[0]
total_loss+=loss.data.numpy()[0]
optimizor.step()
average_prob.append(total_probability/len(kb_train.clauses))
averate_loss.append(total_loss/len(kb_train.clauses))
accuracy1=get_accuracy(model,kb1)
accuracy2=get_accuracy(model,kb2)
if accuracy1+accuracy2>best_accuracy1+best_accuracy2:
best_accuracy1=accuracy1
best_accuracy2=accuracy2
pickle.dump((average_prob,averate_loss,best_accuracy1,best_accuracy2), open("./results/%s"%filename, "wb" ))
# In[4]:
kb1=load_knowledge_base('./facts1.txt')
kb2=load_knowledge_base('./facts2.txt')
propositionals=load_propositional('./knowledge.txt')
gkbs1=[]
for p in propositionals:
gkbs1.append(p.generate_knowledge_base('abcdefgh',change_weight=False))
gkb1=gkbs1[0]
for tkb in gkbs1[1:]:
gkb1=gkb1.union(tkb)
gkbs2=[]
for p in propositionals:
gkbs2.append(p.generate_knowledge_base('ijklmn',change_weight=False))
gkb2=gkbs2[0]
for tkb in gkbs2[1:]:
gkb2=gkb2.union(tkb)
gkbs3=[]
for p in propositionals:
gkbs3.append(p.generate_knowledge_base('abcdefgh',change_weight=True))
gkb3=gkbs3[0]
for tkb in gkbs3[1:]:
gkb3=gkb3.union(tkb)
gkbs4=[]
for p in propositionals:
gkbs4.append(p.generate_knowledge_base('ijklmn',change_weight=True))
gkb4=gkbs4[0]
for tkb in gkbs4[1:]:
gkb4=gkb4.union(tkb)
# In[5]:
emb_dim=50
# In[6]:
emb_dim_range=list(range(10,20,5))+list(range(20,101,20))
emb_dim_range=list(range(160,161,20))
# In[ ]:
for emb_dim in emb_dim_range:
test_model(
model=LTN(emb_dim,'abcdefghijklmn',[['S',1],['F',2],['C',1]], CLTN=True),
kb1=kb1.union(gkb3),
kb2=kb2.union(gkb4),
filename='LTN_Learn_emb_dim=%d.pkl'%(emb_dim)
)
# In[80]:
accuracys1=[]
accuracys2=[]
for emb_dim in emb_dim_range:
prob,loss,first,second=pickle.load(open('./results/LTN_Learn_emb_dim=%d.pkl'%(emb_dim),'rb'))
accuracys1.append(first)
accuracys2.append(second)
plt.plot(emb_dim_range,accuracys1,label='Group1')
plt.plot(emb_dim_range,accuracys2,label='Group2')
plt.legend()
plt.xlabel('Vector Length')
plt.ylabel('Accuracy')
plt.savefig('./Report/img/curve4.pdf')
plt.show()
|
flexible
|
{
"blob_id": "3022cade3bfa36925bcbda8023e5cd98ed33d093",
"index": 9901,
"step-1": "<mask token>\n\n\ndef get_accuracy(model, kb):\n results = []\n for clause in kb.clauses:\n o1, o2 = model.forward(clause)\n if o2.data.numpy()[0][0] > 0.9:\n results.append(1.0)\n else:\n results.append(0.0)\n return sum(results) / len(kb.clauses)\n\n\ndef test_model(model, kb1, kb2, filename):\n kb_train = kb1.union(kb2)\n optimizor = torch.optim.Adam(model.parameters(), lr=0.001)\n mone = torch.FloatTensor([-1])\n one = torch.FloatTensor([1])\n average_prob = []\n averate_loss = []\n best_accuracy1 = 0.0\n best_accuracy2 = 0.0\n for i in tqdm(range(1000)):\n optimizor.zero_grad()\n total_probability = 0.0\n total_loss = 0.0\n for clause in kb_train.clauses:\n loss, prob = model.forward(clause=clause)\n loss.backward(one)\n total_probability += prob.data.numpy()[0]\n total_loss += loss.data.numpy()[0]\n optimizor.step()\n average_prob.append(total_probability / len(kb_train.clauses))\n averate_loss.append(total_loss / len(kb_train.clauses))\n accuracy1 = get_accuracy(model, kb1)\n accuracy2 = get_accuracy(model, kb2)\n if accuracy1 + accuracy2 > best_accuracy1 + best_accuracy2:\n best_accuracy1 = accuracy1\n best_accuracy2 = accuracy2\n pickle.dump((average_prob, averate_loss, best_accuracy1, best_accuracy2\n ), open('./results/%s' % filename, 'wb'))\n\n\n<mask token>\n",
"step-2": "<mask token>\nif 'DISPLAY' not in os.environ:\n matplotlib.use('Agg')\nelse:\n pass\n<mask token>\nsns.set(style='white', context='talk')\n\n\ndef get_accuracy(model, kb):\n results = []\n for clause in kb.clauses:\n o1, o2 = model.forward(clause)\n if o2.data.numpy()[0][0] > 0.9:\n results.append(1.0)\n else:\n results.append(0.0)\n return sum(results) / len(kb.clauses)\n\n\ndef test_model(model, kb1, kb2, filename):\n kb_train = kb1.union(kb2)\n optimizor = torch.optim.Adam(model.parameters(), lr=0.001)\n mone = torch.FloatTensor([-1])\n one = torch.FloatTensor([1])\n average_prob = []\n averate_loss = []\n best_accuracy1 = 0.0\n best_accuracy2 = 0.0\n for i in tqdm(range(1000)):\n optimizor.zero_grad()\n total_probability = 0.0\n total_loss = 0.0\n for clause in kb_train.clauses:\n loss, prob = model.forward(clause=clause)\n loss.backward(one)\n total_probability += prob.data.numpy()[0]\n total_loss += loss.data.numpy()[0]\n optimizor.step()\n average_prob.append(total_probability / len(kb_train.clauses))\n averate_loss.append(total_loss / len(kb_train.clauses))\n accuracy1 = get_accuracy(model, kb1)\n accuracy2 = get_accuracy(model, kb2)\n if accuracy1 + accuracy2 > best_accuracy1 + best_accuracy2:\n best_accuracy1 = accuracy1\n best_accuracy2 = accuracy2\n pickle.dump((average_prob, averate_loss, best_accuracy1, best_accuracy2\n ), open('./results/%s' % filename, 'wb'))\n\n\n<mask token>\nfor p in propositionals:\n gkbs1.append(p.generate_knowledge_base('abcdefgh', change_weight=False))\n<mask token>\nfor tkb in gkbs1[1:]:\n gkb1 = gkb1.union(tkb)\n<mask token>\nfor p in propositionals:\n gkbs2.append(p.generate_knowledge_base('ijklmn', change_weight=False))\n<mask token>\nfor tkb in gkbs2[1:]:\n gkb2 = gkb2.union(tkb)\n<mask token>\nfor p in propositionals:\n gkbs3.append(p.generate_knowledge_base('abcdefgh', change_weight=True))\n<mask token>\nfor tkb in gkbs3[1:]:\n gkb3 = gkb3.union(tkb)\n<mask token>\nfor p in propositionals:\n gkbs4.append(p.generate_knowledge_base('ijklmn', change_weight=True))\n<mask token>\nfor tkb in gkbs4[1:]:\n gkb4 = gkb4.union(tkb)\n<mask token>\nfor emb_dim in emb_dim_range:\n test_model(model=LTN(emb_dim, 'abcdefghijklmn', [['S', 1], ['F', 2], [\n 'C', 1]], CLTN=True), kb1=kb1.union(gkb3), kb2=kb2.union(gkb4),\n filename='LTN_Learn_emb_dim=%d.pkl' % emb_dim)\n<mask token>\nfor emb_dim in emb_dim_range:\n prob, loss, first, second = pickle.load(open(\n './results/LTN_Learn_emb_dim=%d.pkl' % emb_dim, 'rb'))\n accuracys1.append(first)\n accuracys2.append(second)\nplt.plot(emb_dim_range, accuracys1, label='Group1')\nplt.plot(emb_dim_range, accuracys2, label='Group2')\nplt.legend()\nplt.xlabel('Vector Length')\nplt.ylabel('Accuracy')\nplt.savefig('./Report/img/curve4.pdf')\nplt.show()\n",
"step-3": "<mask token>\nif 'DISPLAY' not in os.environ:\n matplotlib.use('Agg')\nelse:\n pass\n<mask token>\nsns.set(style='white', context='talk')\n\n\ndef get_accuracy(model, kb):\n results = []\n for clause in kb.clauses:\n o1, o2 = model.forward(clause)\n if o2.data.numpy()[0][0] > 0.9:\n results.append(1.0)\n else:\n results.append(0.0)\n return sum(results) / len(kb.clauses)\n\n\ndef test_model(model, kb1, kb2, filename):\n kb_train = kb1.union(kb2)\n optimizor = torch.optim.Adam(model.parameters(), lr=0.001)\n mone = torch.FloatTensor([-1])\n one = torch.FloatTensor([1])\n average_prob = []\n averate_loss = []\n best_accuracy1 = 0.0\n best_accuracy2 = 0.0\n for i in tqdm(range(1000)):\n optimizor.zero_grad()\n total_probability = 0.0\n total_loss = 0.0\n for clause in kb_train.clauses:\n loss, prob = model.forward(clause=clause)\n loss.backward(one)\n total_probability += prob.data.numpy()[0]\n total_loss += loss.data.numpy()[0]\n optimizor.step()\n average_prob.append(total_probability / len(kb_train.clauses))\n averate_loss.append(total_loss / len(kb_train.clauses))\n accuracy1 = get_accuracy(model, kb1)\n accuracy2 = get_accuracy(model, kb2)\n if accuracy1 + accuracy2 > best_accuracy1 + best_accuracy2:\n best_accuracy1 = accuracy1\n best_accuracy2 = accuracy2\n pickle.dump((average_prob, averate_loss, best_accuracy1, best_accuracy2\n ), open('./results/%s' % filename, 'wb'))\n\n\nkb1 = load_knowledge_base('./facts1.txt')\nkb2 = load_knowledge_base('./facts2.txt')\npropositionals = load_propositional('./knowledge.txt')\ngkbs1 = []\nfor p in propositionals:\n gkbs1.append(p.generate_knowledge_base('abcdefgh', change_weight=False))\ngkb1 = gkbs1[0]\nfor tkb in gkbs1[1:]:\n gkb1 = gkb1.union(tkb)\ngkbs2 = []\nfor p in propositionals:\n gkbs2.append(p.generate_knowledge_base('ijklmn', change_weight=False))\ngkb2 = gkbs2[0]\nfor tkb in gkbs2[1:]:\n gkb2 = gkb2.union(tkb)\ngkbs3 = []\nfor p in propositionals:\n gkbs3.append(p.generate_knowledge_base('abcdefgh', change_weight=True))\ngkb3 = gkbs3[0]\nfor tkb in gkbs3[1:]:\n gkb3 = gkb3.union(tkb)\ngkbs4 = []\nfor p in propositionals:\n gkbs4.append(p.generate_knowledge_base('ijklmn', change_weight=True))\ngkb4 = gkbs4[0]\nfor tkb in gkbs4[1:]:\n gkb4 = gkb4.union(tkb)\nemb_dim = 50\nemb_dim_range = list(range(10, 20, 5)) + list(range(20, 101, 20))\nemb_dim_range = list(range(160, 161, 20))\nfor emb_dim in emb_dim_range:\n test_model(model=LTN(emb_dim, 'abcdefghijklmn', [['S', 1], ['F', 2], [\n 'C', 1]], CLTN=True), kb1=kb1.union(gkb3), kb2=kb2.union(gkb4),\n filename='LTN_Learn_emb_dim=%d.pkl' % emb_dim)\naccuracys1 = []\naccuracys2 = []\nfor emb_dim in emb_dim_range:\n prob, loss, first, second = pickle.load(open(\n './results/LTN_Learn_emb_dim=%d.pkl' % emb_dim, 'rb'))\n accuracys1.append(first)\n accuracys2.append(second)\nplt.plot(emb_dim_range, accuracys1, label='Group1')\nplt.plot(emb_dim_range, accuracys2, label='Group2')\nplt.legend()\nplt.xlabel('Vector Length')\nplt.ylabel('Accuracy')\nplt.savefig('./Report/img/curve4.pdf')\nplt.show()\n",
"step-4": "import matplotlib\nimport os\nif 'DISPLAY' not in os.environ:\n matplotlib.use('Agg')\nelse:\n pass\nimport torch\nimport torch.nn as nn\nfrom torch.autograd import Variable\nimport torch.optim as optim\nfrom matplotlib import pyplot as plt\nimport seaborn as sns\nfrom tqdm import tqdm\nimport copy\nfrom utils import Predicate, Clause, KnowledgeBase, Propositional\nfrom utils import load_knowledge_base, load_propositional\nfrom models import LTN\nimport pickle\nimport numpy as np\nimport seaborn as sns\nsns.set(style='white', context='talk')\n\n\ndef get_accuracy(model, kb):\n results = []\n for clause in kb.clauses:\n o1, o2 = model.forward(clause)\n if o2.data.numpy()[0][0] > 0.9:\n results.append(1.0)\n else:\n results.append(0.0)\n return sum(results) / len(kb.clauses)\n\n\ndef test_model(model, kb1, kb2, filename):\n kb_train = kb1.union(kb2)\n optimizor = torch.optim.Adam(model.parameters(), lr=0.001)\n mone = torch.FloatTensor([-1])\n one = torch.FloatTensor([1])\n average_prob = []\n averate_loss = []\n best_accuracy1 = 0.0\n best_accuracy2 = 0.0\n for i in tqdm(range(1000)):\n optimizor.zero_grad()\n total_probability = 0.0\n total_loss = 0.0\n for clause in kb_train.clauses:\n loss, prob = model.forward(clause=clause)\n loss.backward(one)\n total_probability += prob.data.numpy()[0]\n total_loss += loss.data.numpy()[0]\n optimizor.step()\n average_prob.append(total_probability / len(kb_train.clauses))\n averate_loss.append(total_loss / len(kb_train.clauses))\n accuracy1 = get_accuracy(model, kb1)\n accuracy2 = get_accuracy(model, kb2)\n if accuracy1 + accuracy2 > best_accuracy1 + best_accuracy2:\n best_accuracy1 = accuracy1\n best_accuracy2 = accuracy2\n pickle.dump((average_prob, averate_loss, best_accuracy1, best_accuracy2\n ), open('./results/%s' % filename, 'wb'))\n\n\nkb1 = load_knowledge_base('./facts1.txt')\nkb2 = load_knowledge_base('./facts2.txt')\npropositionals = load_propositional('./knowledge.txt')\ngkbs1 = []\nfor p in propositionals:\n gkbs1.append(p.generate_knowledge_base('abcdefgh', change_weight=False))\ngkb1 = gkbs1[0]\nfor tkb in gkbs1[1:]:\n gkb1 = gkb1.union(tkb)\ngkbs2 = []\nfor p in propositionals:\n gkbs2.append(p.generate_knowledge_base('ijklmn', change_weight=False))\ngkb2 = gkbs2[0]\nfor tkb in gkbs2[1:]:\n gkb2 = gkb2.union(tkb)\ngkbs3 = []\nfor p in propositionals:\n gkbs3.append(p.generate_knowledge_base('abcdefgh', change_weight=True))\ngkb3 = gkbs3[0]\nfor tkb in gkbs3[1:]:\n gkb3 = gkb3.union(tkb)\ngkbs4 = []\nfor p in propositionals:\n gkbs4.append(p.generate_knowledge_base('ijklmn', change_weight=True))\ngkb4 = gkbs4[0]\nfor tkb in gkbs4[1:]:\n gkb4 = gkb4.union(tkb)\nemb_dim = 50\nemb_dim_range = list(range(10, 20, 5)) + list(range(20, 101, 20))\nemb_dim_range = list(range(160, 161, 20))\nfor emb_dim in emb_dim_range:\n test_model(model=LTN(emb_dim, 'abcdefghijklmn', [['S', 1], ['F', 2], [\n 'C', 1]], CLTN=True), kb1=kb1.union(gkb3), kb2=kb2.union(gkb4),\n filename='LTN_Learn_emb_dim=%d.pkl' % emb_dim)\naccuracys1 = []\naccuracys2 = []\nfor emb_dim in emb_dim_range:\n prob, loss, first, second = pickle.load(open(\n './results/LTN_Learn_emb_dim=%d.pkl' % emb_dim, 'rb'))\n accuracys1.append(first)\n accuracys2.append(second)\nplt.plot(emb_dim_range, accuracys1, label='Group1')\nplt.plot(emb_dim_range, accuracys2, label='Group2')\nplt.legend()\nplt.xlabel('Vector Length')\nplt.ylabel('Accuracy')\nplt.savefig('./Report/img/curve4.pdf')\nplt.show()\n",
"step-5": "\n# coding: utf-8\n\n# In[1]:\n\n\n#coding:utf8\nimport matplotlib\nimport os\nif 'DISPLAY' not in os.environ:\n matplotlib.use('Agg')\nelse:\n pass\nimport torch\nimport torch.nn as nn\nfrom torch.autograd import Variable\nimport torch.optim as optim\nfrom matplotlib import pyplot as plt\nimport seaborn as sns\nfrom tqdm import tqdm\nimport copy\nfrom utils import Predicate,Clause,KnowledgeBase, Propositional\nfrom utils import load_knowledge_base,load_propositional\nfrom models import LTN\nimport pickle\nimport numpy as np\nimport seaborn as sns\nsns.set(style=\"white\", context=\"talk\")\n\n\n# In[2]:\n\n\ndef get_accuracy(model,kb):\n results=[]\n for clause in kb.clauses:\n o1,o2=model.forward(clause)\n if o2.data.numpy()[0][0]>0.9:\n results.append(1.0)\n else:\n results.append(0.0)\n\n return sum(results)/len(kb.clauses)\n\n\n# In[3]:\n\n\ndef test_model(model,kb1, kb2,filename):\n kb_train=kb1.union(kb2)\n optimizor=torch.optim.Adam(model.parameters(),lr=0.001)\n mone=torch.FloatTensor([-1])\n one=torch.FloatTensor([1])\n average_prob=[]\n averate_loss=[]\n best_accuracy1=0.0\n best_accuracy2=0.0\n for i in tqdm(range(1000)):\n optimizor.zero_grad()\n total_probability=0.0\n total_loss=0.0\n for clause in kb_train.clauses:\n loss,prob=model.forward(clause=clause)\n loss.backward(one)\n total_probability+=prob.data.numpy()[0]\n total_loss+=loss.data.numpy()[0]\n optimizor.step()\n average_prob.append(total_probability/len(kb_train.clauses))\n averate_loss.append(total_loss/len(kb_train.clauses))\n accuracy1=get_accuracy(model,kb1)\n accuracy2=get_accuracy(model,kb2)\n if accuracy1+accuracy2>best_accuracy1+best_accuracy2:\n best_accuracy1=accuracy1\n best_accuracy2=accuracy2\n pickle.dump((average_prob,averate_loss,best_accuracy1,best_accuracy2), open(\"./results/%s\"%filename, \"wb\" ))\n\n\n# In[4]:\n\n\nkb1=load_knowledge_base('./facts1.txt')\nkb2=load_knowledge_base('./facts2.txt')\npropositionals=load_propositional('./knowledge.txt')\ngkbs1=[]\nfor p in propositionals:\n gkbs1.append(p.generate_knowledge_base('abcdefgh',change_weight=False))\ngkb1=gkbs1[0]\nfor tkb in gkbs1[1:]:\n gkb1=gkb1.union(tkb)\ngkbs2=[]\nfor p in propositionals:\n gkbs2.append(p.generate_knowledge_base('ijklmn',change_weight=False))\ngkb2=gkbs2[0]\nfor tkb in gkbs2[1:]:\n gkb2=gkb2.union(tkb)\n\ngkbs3=[]\nfor p in propositionals:\n gkbs3.append(p.generate_knowledge_base('abcdefgh',change_weight=True))\ngkb3=gkbs3[0]\nfor tkb in gkbs3[1:]:\n gkb3=gkb3.union(tkb)\ngkbs4=[]\nfor p in propositionals:\n gkbs4.append(p.generate_knowledge_base('ijklmn',change_weight=True))\ngkb4=gkbs4[0]\nfor tkb in gkbs4[1:]:\n gkb4=gkb4.union(tkb)\n\n\n# In[5]:\n\n\nemb_dim=50\n\n\n# In[6]:\n\n\nemb_dim_range=list(range(10,20,5))+list(range(20,101,20))\nemb_dim_range=list(range(160,161,20))\n\n\n# In[ ]:\n\n\nfor emb_dim in emb_dim_range:\n test_model(\n model=LTN(emb_dim,'abcdefghijklmn',[['S',1],['F',2],['C',1]], CLTN=True),\n kb1=kb1.union(gkb3),\n kb2=kb2.union(gkb4),\n filename='LTN_Learn_emb_dim=%d.pkl'%(emb_dim)\n )\n\n\n# In[80]:\n\n\naccuracys1=[]\naccuracys2=[]\nfor emb_dim in emb_dim_range:\n prob,loss,first,second=pickle.load(open('./results/LTN_Learn_emb_dim=%d.pkl'%(emb_dim),'rb'))\n accuracys1.append(first)\n accuracys2.append(second)\nplt.plot(emb_dim_range,accuracys1,label='Group1')\nplt.plot(emb_dim_range,accuracys2,label='Group2')\nplt.legend()\nplt.xlabel('Vector Length')\nplt.ylabel('Accuracy')\nplt.savefig('./Report/img/curve4.pdf')\nplt.show()\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from flask_opencv_streamer.streamer import Streamer
import cv2
import numpy as np
MASK = np.array([
[0, 1, 0],
[1, -4, 1],
[0, 1, 0]
])
port = 3030
require_login = False
streamer = Streamer(port, require_login)
video_capture = cv2.VideoCapture('http://149.43.156.105/mjpg/video.mjpg')
while True:
_, frame = video_capture.read()
frame = cv2.medianBlur(frame, 3)
frame = cv2.filter2D(frame, -1, MASK)
_, frame = cv2.threshold(frame, 10, 255, cv2.THRESH_BINARY_INV)
streamer.update_frame(frame)
if not streamer.is_streaming:
streamer.start_streaming()
# было в примере, но вроде и без этого работает
# cv2.waitKey(30)
|
normal
|
{
"blob_id": "a19b4928c9423dae6c60f39dbc5af0673b433c8e",
"index": 3551,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile True:\n _, frame = video_capture.read()\n frame = cv2.medianBlur(frame, 3)\n frame = cv2.filter2D(frame, -1, MASK)\n _, frame = cv2.threshold(frame, 10, 255, cv2.THRESH_BINARY_INV)\n streamer.update_frame(frame)\n if not streamer.is_streaming:\n streamer.start_streaming()\n",
"step-3": "<mask token>\nMASK = np.array([[0, 1, 0], [1, -4, 1], [0, 1, 0]])\nport = 3030\nrequire_login = False\nstreamer = Streamer(port, require_login)\nvideo_capture = cv2.VideoCapture('http://149.43.156.105/mjpg/video.mjpg')\nwhile True:\n _, frame = video_capture.read()\n frame = cv2.medianBlur(frame, 3)\n frame = cv2.filter2D(frame, -1, MASK)\n _, frame = cv2.threshold(frame, 10, 255, cv2.THRESH_BINARY_INV)\n streamer.update_frame(frame)\n if not streamer.is_streaming:\n streamer.start_streaming()\n",
"step-4": "from flask_opencv_streamer.streamer import Streamer\nimport cv2\nimport numpy as np\nMASK = np.array([[0, 1, 0], [1, -4, 1], [0, 1, 0]])\nport = 3030\nrequire_login = False\nstreamer = Streamer(port, require_login)\nvideo_capture = cv2.VideoCapture('http://149.43.156.105/mjpg/video.mjpg')\nwhile True:\n _, frame = video_capture.read()\n frame = cv2.medianBlur(frame, 3)\n frame = cv2.filter2D(frame, -1, MASK)\n _, frame = cv2.threshold(frame, 10, 255, cv2.THRESH_BINARY_INV)\n streamer.update_frame(frame)\n if not streamer.is_streaming:\n streamer.start_streaming()\n",
"step-5": "from flask_opencv_streamer.streamer import Streamer\r\nimport cv2\r\nimport numpy as np\r\n\r\nMASK = np.array([\r\n [0, 1, 0],\r\n [1, -4, 1],\r\n [0, 1, 0]\r\n])\r\n\r\nport = 3030\r\nrequire_login = False\r\nstreamer = Streamer(port, require_login)\r\n\r\nvideo_capture = cv2.VideoCapture('http://149.43.156.105/mjpg/video.mjpg')\r\n\r\nwhile True:\r\n _, frame = video_capture.read()\r\n\r\n frame = cv2.medianBlur(frame, 3)\r\n frame = cv2.filter2D(frame, -1, MASK)\r\n _, frame = cv2.threshold(frame, 10, 255, cv2.THRESH_BINARY_INV)\r\n streamer.update_frame(frame)\r\n\r\n if not streamer.is_streaming:\r\n streamer.start_streaming()\r\n # было в примере, но вроде и без этого работает\r\n # cv2.waitKey(30)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from torch import nn
from abc import ABCMeta, abstractmethod
class BaseEncoder(nn.Module):
__metaclass__ = ABCMeta
def __init__(self, **kwargs):
if len(kwargs) > 0:
raise RuntimeError(
"Unrecognized options: {}".format(', '.join(kwargs.keys())))
super(BaseEncoder, self).__init__()
@abstractmethod
def forward(self, features, features_lengths, spkids):
""" Encode a minibatch of audio features
:param features: float32 tensor of size (bs x t x f x c)
:param features_lengths: int64 tensor of size (bs)
:param spkids: string id of speakers
:returns: A tuple with elements:
- encoded: float32 tensor of size (t x bs x d)
- encoded_lens: int64 tensor of size (bs)
"""
pass
def get_parameters_for_optimizer(self):
return self.parameters()
|
normal
|
{
"blob_id": "86ee2300b5270df3dadb22f2cfea626e6556e5db",
"index": 9951,
"step-1": "<mask token>\n\n\nclass BaseEncoder(nn.Module):\n <mask token>\n\n def __init__(self, **kwargs):\n if len(kwargs) > 0:\n raise RuntimeError('Unrecognized options: {}'.format(', '.join(\n kwargs.keys())))\n super(BaseEncoder, self).__init__()\n <mask token>\n\n def get_parameters_for_optimizer(self):\n return self.parameters()\n",
"step-2": "<mask token>\n\n\nclass BaseEncoder(nn.Module):\n <mask token>\n\n def __init__(self, **kwargs):\n if len(kwargs) > 0:\n raise RuntimeError('Unrecognized options: {}'.format(', '.join(\n kwargs.keys())))\n super(BaseEncoder, self).__init__()\n\n @abstractmethod\n def forward(self, features, features_lengths, spkids):\n \"\"\" Encode a minibatch of audio features\n\n :param features: float32 tensor of size (bs x t x f x c)\n :param features_lengths: int64 tensor of size (bs)\n :param spkids: string id of speakers\n :returns: A tuple with elements:\n - encoded: float32 tensor of size (t x bs x d)\n - encoded_lens: int64 tensor of size (bs)\n \"\"\"\n pass\n\n def get_parameters_for_optimizer(self):\n return self.parameters()\n",
"step-3": "<mask token>\n\n\nclass BaseEncoder(nn.Module):\n __metaclass__ = ABCMeta\n\n def __init__(self, **kwargs):\n if len(kwargs) > 0:\n raise RuntimeError('Unrecognized options: {}'.format(', '.join(\n kwargs.keys())))\n super(BaseEncoder, self).__init__()\n\n @abstractmethod\n def forward(self, features, features_lengths, spkids):\n \"\"\" Encode a minibatch of audio features\n\n :param features: float32 tensor of size (bs x t x f x c)\n :param features_lengths: int64 tensor of size (bs)\n :param spkids: string id of speakers\n :returns: A tuple with elements:\n - encoded: float32 tensor of size (t x bs x d)\n - encoded_lens: int64 tensor of size (bs)\n \"\"\"\n pass\n\n def get_parameters_for_optimizer(self):\n return self.parameters()\n",
"step-4": "from torch import nn\nfrom abc import ABCMeta, abstractmethod\n\n\nclass BaseEncoder(nn.Module):\n __metaclass__ = ABCMeta\n\n def __init__(self, **kwargs):\n if len(kwargs) > 0:\n raise RuntimeError('Unrecognized options: {}'.format(', '.join(\n kwargs.keys())))\n super(BaseEncoder, self).__init__()\n\n @abstractmethod\n def forward(self, features, features_lengths, spkids):\n \"\"\" Encode a minibatch of audio features\n\n :param features: float32 tensor of size (bs x t x f x c)\n :param features_lengths: int64 tensor of size (bs)\n :param spkids: string id of speakers\n :returns: A tuple with elements:\n - encoded: float32 tensor of size (t x bs x d)\n - encoded_lens: int64 tensor of size (bs)\n \"\"\"\n pass\n\n def get_parameters_for_optimizer(self):\n return self.parameters()\n",
"step-5": "from torch import nn\nfrom abc import ABCMeta, abstractmethod\n\nclass BaseEncoder(nn.Module):\n __metaclass__ = ABCMeta\n\n def __init__(self, **kwargs):\n if len(kwargs) > 0:\n raise RuntimeError(\n \"Unrecognized options: {}\".format(', '.join(kwargs.keys())))\n super(BaseEncoder, self).__init__()\n\n @abstractmethod\n def forward(self, features, features_lengths, spkids):\n \"\"\" Encode a minibatch of audio features\n\n :param features: float32 tensor of size (bs x t x f x c)\n :param features_lengths: int64 tensor of size (bs)\n :param spkids: string id of speakers\n :returns: A tuple with elements:\n - encoded: float32 tensor of size (t x bs x d)\n - encoded_lens: int64 tensor of size (bs)\n \"\"\"\n pass\n\n def get_parameters_for_optimizer(self):\n return self.parameters()",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
# -*-coding:utf-8 -*
# Copyright (c) 2011-2015, Intel Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import subprocess
import unittest
import time
class RemoteCli(object):
def sendCmd(self, cmd, *args):
shell_cmd = " ".join([self.platform_command, cmd])
if args is not None:
shell_cmd += " " + " ".join(args)
print "CMD :",
print "[" + shell_cmd + "]"
try:
p = subprocess.Popen(shell_cmd, shell=True, stdout=subprocess.PIPE)
except Exception as (errno, strerror):
return None, strerror
out, err = p.communicate()
if out is not None:
out = out.strip()
return out, err
class Pfw(RemoteCli):
def __init__(self):
self.platform_command = "remote-process localhost 5000 "
class Hal(RemoteCli):
def __init__(self):
self.platform_command = "remote-process localhost 5001 "
# Starts the HAL exe
def startHal(self):
cmd= "test-platform $PFW_TEST_CONFIGURATION"
subprocess.Popen(cmd, shell=True)
pass
# Send command "stop" to the HAL
def stopHal(self):
subprocess.call("remote-process localhost 5001 exit", shell=True)
def createInclusiveCriterion(self, name, nb):
self.sendCmd("createInclusiveSelectionCriterion", name, nb)
def createExclusiveCriterion(self, name, nb):
self.sendCmd("createExclusiveSelectionCriterion", name, nb)
# Starts the Pfw
def start(self):
self.sendCmd("start")
# A PfwTestCase gather tests performed on one instance of the PFW.
class PfwTestCase(unittest.TestCase):
hal = Hal()
def __init__(self, argv):
super(PfwTestCase, self).__init__(argv)
self.pfw = Pfw()
@classmethod
def setUpClass(cls):
cls.startHal()
@classmethod
def tearDownClass(cls):
cls.stopHal()
@classmethod
def startHal(cls):
# set up the Hal & pfw
cls.hal.startHal()
time.sleep(0.1)
# create criterions
cls.hal.createInclusiveCriterion("Crit_0", "2")
cls.hal.createExclusiveCriterion("Crit_1", "2")
# start the Pfw
cls.hal.start()
@classmethod
def stopHal(cls):
cls.hal.stopHal()
time.sleep(0.1)
|
normal
|
{
"blob_id": "3c2873add66172a5ed038949c31d514dcd5f26b3",
"index": 7152,
"step-1": "# -*-coding:utf-8 -*\n\n# Copyright (c) 2011-2015, Intel Corporation\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without modification,\n# are permitted provided that the following conditions are met:\n#\n# 1. Redistributions of source code must retain the above copyright notice, this\n# list of conditions and the following disclaimer.\n#\n# 2. Redistributions in binary form must reproduce the above copyright notice,\n# this list of conditions and the following disclaimer in the documentation and/or\n# other materials provided with the distribution.\n#\n# 3. Neither the name of the copyright holder nor the names of its contributors\n# may be used to endorse or promote products derived from this software without\n# specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND\n# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED\n# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR\n# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES\n# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\n# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON\n# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\nimport subprocess\nimport unittest\nimport time\n\nclass RemoteCli(object):\n def sendCmd(self, cmd, *args):\n shell_cmd = \" \".join([self.platform_command, cmd])\n if args is not None:\n shell_cmd += \" \" + \" \".join(args)\n print \"CMD :\",\n print \"[\" + shell_cmd + \"]\"\n try:\n p = subprocess.Popen(shell_cmd, shell=True, stdout=subprocess.PIPE)\n except Exception as (errno, strerror):\n return None, strerror\n out, err = p.communicate()\n if out is not None:\n out = out.strip()\n return out, err\n\nclass Pfw(RemoteCli):\n def __init__(self):\n self.platform_command = \"remote-process localhost 5000 \"\n\nclass Hal(RemoteCli):\n def __init__(self):\n self.platform_command = \"remote-process localhost 5001 \"\n\n # Starts the HAL exe\n def startHal(self):\n cmd= \"test-platform $PFW_TEST_CONFIGURATION\"\n subprocess.Popen(cmd, shell=True)\n pass\n\n # Send command \"stop\" to the HAL\n def stopHal(self):\n subprocess.call(\"remote-process localhost 5001 exit\", shell=True)\n\n def createInclusiveCriterion(self, name, nb):\n self.sendCmd(\"createInclusiveSelectionCriterion\", name, nb)\n\n def createExclusiveCriterion(self, name, nb):\n self.sendCmd(\"createExclusiveSelectionCriterion\", name, nb)\n\n # Starts the Pfw\n def start(self):\n self.sendCmd(\"start\")\n\n# A PfwTestCase gather tests performed on one instance of the PFW.\nclass PfwTestCase(unittest.TestCase):\n\n hal = Hal()\n\n def __init__(self, argv):\n super(PfwTestCase, self).__init__(argv)\n self.pfw = Pfw()\n\n @classmethod\n def setUpClass(cls):\n cls.startHal()\n\n @classmethod\n def tearDownClass(cls):\n cls.stopHal()\n\n @classmethod\n def startHal(cls):\n # set up the Hal & pfw\n cls.hal.startHal()\n time.sleep(0.1)\n # create criterions\n cls.hal.createInclusiveCriterion(\"Crit_0\", \"2\")\n cls.hal.createExclusiveCriterion(\"Crit_1\", \"2\")\n # start the Pfw\n cls.hal.start()\n\n @classmethod\n def stopHal(cls):\n cls.hal.stopHal()\n time.sleep(0.1)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import itertools
def possibleNumber(digitSet, n):
res = [[]]
pools = [digitSet] * n
# print(pools)
for pool in pools:
# print(res)
res = [ x + [y] for x in res for y in pool]
for prod in res:
yield prod
# def possibleNumber(digitSet, n):
# res = []
# temp = itertools.product(digitSet, repeat = n)
# for item in temp:
# res.append(item)
# return res
res = possibleNumber('23', 5)
for i in res:
print(i)
|
normal
|
{
"blob_id": "fcc6dd61b94d5fa7f088fc75b748d976d1b30fa5",
"index": 1781,
"step-1": "<mask token>\n\n\ndef possibleNumber(digitSet, n):\n res = [[]]\n pools = [digitSet] * n\n for pool in pools:\n res = [(x + [y]) for x in res for y in pool]\n for prod in res:\n yield prod\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef possibleNumber(digitSet, n):\n res = [[]]\n pools = [digitSet] * n\n for pool in pools:\n res = [(x + [y]) for x in res for y in pool]\n for prod in res:\n yield prod\n\n\n<mask token>\nfor i in res:\n print(i)\n",
"step-3": "<mask token>\n\n\ndef possibleNumber(digitSet, n):\n res = [[]]\n pools = [digitSet] * n\n for pool in pools:\n res = [(x + [y]) for x in res for y in pool]\n for prod in res:\n yield prod\n\n\nres = possibleNumber('23', 5)\nfor i in res:\n print(i)\n",
"step-4": "import itertools\n\n\ndef possibleNumber(digitSet, n):\n res = [[]]\n pools = [digitSet] * n\n for pool in pools:\n res = [(x + [y]) for x in res for y in pool]\n for prod in res:\n yield prod\n\n\nres = possibleNumber('23', 5)\nfor i in res:\n print(i)\n",
"step-5": "import itertools\n\ndef possibleNumber(digitSet, n):\n res = [[]]\n\n pools = [digitSet] * n\n # print(pools)\n for pool in pools:\n # print(res)\n res = [ x + [y] for x in res for y in pool]\n for prod in res:\n yield prod\n\n# def possibleNumber(digitSet, n):\n# res = []\n# temp = itertools.product(digitSet, repeat = n)\n# for item in temp:\n# res.append(item)\n# return res\n\nres = possibleNumber('23', 5)\nfor i in res:\n print(i)",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class Atendent(models.Model):
user = models.ForeignKey(User)
event = models.ForeignKey(Event, null=True, blank=True)
state = models.IntegerField(null=True, blank=True)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Event(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __unicode__(self):
return self.name
class Atendent(models.Model):
user = models.ForeignKey(User)
event = models.ForeignKey(Event, null=True, blank=True)
state = models.IntegerField(null=True, blank=True)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Event(models.Model):
name = models.CharField('Назва', max_length=200)
date = models.DateField('Дата')
address = models.CharField('Адреса', max_length=255, blank=True, null=True)
attendents = models.ManyToManyField(User, through='Atendent', blank=
True, null=True)
description = models.TextField('Опис', blank=True, null=True)
def __unicode__(self):
return self.name
class Atendent(models.Model):
user = models.ForeignKey(User)
event = models.ForeignKey(Event, null=True, blank=True)
state = models.IntegerField(null=True, blank=True)
<|reserved_special_token_1|>
from django.db import models
from django.contrib.auth.models import User
class Event(models.Model):
name = models.CharField('Назва', max_length=200)
date = models.DateField('Дата')
address = models.CharField('Адреса', max_length=255, blank=True, null=True)
attendents = models.ManyToManyField(User, through='Atendent', blank=
True, null=True)
description = models.TextField('Опис', blank=True, null=True)
def __unicode__(self):
return self.name
class Atendent(models.Model):
user = models.ForeignKey(User)
event = models.ForeignKey(Event, null=True, blank=True)
state = models.IntegerField(null=True, blank=True)
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Event(models.Model):
name = models.CharField('Назва', max_length=200)
date = models.DateField('Дата')
address = models.CharField('Адреса', max_length=255, blank=True, null=True)
attendents = models.ManyToManyField(User, through='Atendent', blank=True, null=True)
description = models.TextField('Опис', blank=True, null=True)
def __unicode__(self):
return self.name
class Atendent(models.Model):
user = models.ForeignKey(User)
event = models.ForeignKey(Event, null=True, blank=True)
state = models.IntegerField(null=True, blank=True)
|
flexible
|
{
"blob_id": "137f9310256f66ccd9fbe6626659c3c4daea0efc",
"index": 8949,
"step-1": "<mask token>\n\n\nclass Atendent(models.Model):\n user = models.ForeignKey(User)\n event = models.ForeignKey(Event, null=True, blank=True)\n state = models.IntegerField(null=True, blank=True)\n",
"step-2": "<mask token>\n\n\nclass Event(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __unicode__(self):\n return self.name\n\n\nclass Atendent(models.Model):\n user = models.ForeignKey(User)\n event = models.ForeignKey(Event, null=True, blank=True)\n state = models.IntegerField(null=True, blank=True)\n",
"step-3": "<mask token>\n\n\nclass Event(models.Model):\n name = models.CharField('Назва', max_length=200)\n date = models.DateField('Дата')\n address = models.CharField('Адреса', max_length=255, blank=True, null=True)\n attendents = models.ManyToManyField(User, through='Atendent', blank=\n True, null=True)\n description = models.TextField('Опис', blank=True, null=True)\n\n def __unicode__(self):\n return self.name\n\n\nclass Atendent(models.Model):\n user = models.ForeignKey(User)\n event = models.ForeignKey(Event, null=True, blank=True)\n state = models.IntegerField(null=True, blank=True)\n",
"step-4": "from django.db import models\nfrom django.contrib.auth.models import User\n\n\nclass Event(models.Model):\n name = models.CharField('Назва', max_length=200)\n date = models.DateField('Дата')\n address = models.CharField('Адреса', max_length=255, blank=True, null=True)\n attendents = models.ManyToManyField(User, through='Atendent', blank=\n True, null=True)\n description = models.TextField('Опис', blank=True, null=True)\n\n def __unicode__(self):\n return self.name\n\n\nclass Atendent(models.Model):\n user = models.ForeignKey(User)\n event = models.ForeignKey(Event, null=True, blank=True)\n state = models.IntegerField(null=True, blank=True)\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom django.db import models\nfrom django.contrib.auth.models import User\n# Create your models here.\n\nclass Event(models.Model):\n name = models.CharField('Назва', max_length=200)\n date = models.DateField('Дата')\n address = models.CharField('Адреса', max_length=255, blank=True, null=True)\n attendents = models.ManyToManyField(User, through='Atendent', blank=True, null=True)\n description = models.TextField('Опис', blank=True, null=True)\n \n def __unicode__(self):\n return self.name\n\nclass Atendent(models.Model):\n user = models.ForeignKey(User)\n event = models.ForeignKey(Event, null=True, blank=True)\n state = models.IntegerField(null=True, blank=True)",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
import numpy as np
import os
# ----------------------------------------------------------------------------
# Common variables
# shifting channels based on rules:
# CH_SHIFT[rule_name] = {src_1_based_ch:new_1_based_ch}
CH_SHIFT = {}
CH_SHIFT[None] = None
# for 1-to-1 cards
CH_SHIFT['1to1'] = {}
for ch1 in xrange(1, 49):
CH_SHIFT['1to1'][ch1] = ch1
for ch1 in xrange(81, 129):
CH_SHIFT['1to1'][ch1] = ch1 - 32
# for 20110720A: assign all 40 A channels to 1-40
# and all 70 M channels to 41-110
CH_SHIFT['20110720A'] = {1: 41, 2: 42, 3: 43, 4: 44, 5: 45, 6: 46,
7: 47, 8: 48, 9: 49, 10: 50, 11: 51, 12: 52, 13: 53, 14: 54,
15: 55, 16: 56, 17: 57, 18: 58, 19: 59, 20: 60, 21: 61,
22: 62, 23: 63, 24: 64, 25: 65, 26: 66, 27: 67, 28: 68,
29: 69, 30: 70, 31: 71, 32: 72, 33: 73, 34: 74, 35: 75,
44: 1, 45: 2, 46: 3, 47: 4, 48: 5, 49: 6, 50: 7, 51: 8,
52: 9, 53: 10, 54: 11, 55: 12, 56: 13, 57: 14, 58: 15,
59: 16, 60: 17, 61: 18, 62: 19, 63: 20, 64: 21, 65: 22,
66: 23, 67: 24, 68: 25, 69: 26, 70: 27, 71: 28, 72: 29,
73: 30, 74: 31, 75: 32, 76: 33, 77: 34, 78: 35, 79: 36,
80: 37, 81: 38, 82: 39, 83: 40, 94: 76, 95: 77, 96: 78,
97: 79, 98: 80, 99: 81, 100: 82, 101: 83, 102: 84, 103: 85,
104: 86, 105: 87, 106: 88, 107: 89, 108: 90, 109: 91,
110: 92, 111: 93, 112: 94, 113: 95, 114: 96, 115: 97,
116: 98, 117: 99, 118: 100, 119: 101, 120: 102, 121: 103,
122: 104, 123: 105, 124: 106, 125: 107, 126: 108,
127: 109, 128: 110}
# ----------------------------------------------------------------------------
# Common functions
def seq_search(iterable, target):
"""do sequential search"""
for i, e in enumerate(iterable):
if e != target:
continue
return i
return None
def sort_uniq(base, *args):
"""sort and remove duplicates based on `base` and apply on to `args`"""
if len(args) == 0:
return None
res = []
# sort
si = np.argsort(base)
base = np.array(base[si])
for arg in args:
res.append(np.array(arg[si]))
# remove duplicates
di = np.nonzero(np.diff(base) == 0)[0]
si = list(set(range(len(base))) - set(list(di)))
for i in xrange(len(res)):
res[i] = np.array(res[i][si])
return res
# -----------------------------------------------------------------------------
def parse_opts(opts0):
"""Parse the options in the command line. This somewhat
archaic function mainly exists for backward-compatability."""
opts = {}
# parse the stuff in "opts"
for opt in opts0:
parsed = opt.split('=')
key = parsed[0].strip()
if len(parsed) > 1:
# OLD: cmd = parsed[1].strip()
cmd = '='.join(parsed[1:]).strip()
else:
cmd = ''
opts[key] = cmd
return opts
def parse_opts2(tokens, optpx='--', argparam=False):
"""A newer option parser. (from perf102)"""
opts0 = []
args = []
n = len(optpx)
for token in tokens:
if token[:2] == optpx:
opts0.append(token[n:])
else:
if argparam:
token = token.split('=')
args.append(token)
opts = parse_opts(opts0)
return args, opts
def parse_opts_adapter(tokens, delim, optpx='--', argparam=False):
"""Adapter to support both old- and new-style options"""
if any([t.startswith(optpx) for t in tokens]):
# new style
args, opts = parse_opts2(tokens, optpx=optpx, argparam=argparam)
else:
# old style
args = tokens[:delim]
opts = parse_opts(tokens[delim:])
return args, opts
def makeavail(sth, sth2idx, idx2sth, query=None):
if sth not in sth2idx:
if query is not None and not query(sth):
return
sth2idx[sth] = len(idx2sth)
idx2sth.append(sth)
def prep_files(flist, sep=',', extchk=True):
flist = flist.split(sep)
if flist[0][0] == '+':
flist = [f.strip() for f in open(flist[0][1:]).readlines()]
if extchk:
assert all([os.path.exists(f) for f in flist])
return flist
def prepare_save_dir(sav_dir):
if sav_dir != '' and not os.path.exists(sav_dir):
try:
os.makedirs(sav_dir)
# in massively-parallel env, it is possible that
# the sav_dir is created after os.path.exists() check.
# We just ignore if makedirs fails.
except Exception:
pass
def detect_cpus():
# Linux, Unix and MacOS:
if hasattr(os, "sysconf"):
if 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
# Linux & Unix:
ncpus = os.sysconf("SC_NPROCESSORS_ONLN")
if isinstance(ncpus, int) and ncpus > 0:
return ncpus
else: # OSX:
return int(os.popen2("sysctl -n hw.ncpu")[1].read())
# Windows:
if 'NUMBER_OF_PROCESSORS' in os.environ:
ncpus = int(os.environ["NUMBER_OF_PROCESSORS"])
if ncpus > 0:
return ncpus
return 1
# -----------------------------------------------------------------------------
# Peri-stimulus data extraction related
N_PRE_PT = 11
SEARCH_RNG = [6, 16]
T_REJECT = 10
N_REJECT = 50
def invalidate_artifacts(buf0, t_reject=T_REJECT,
n_reject=N_REJECT, verbose=True):
"""If there are more than `N_REJET` spikes within `T_REJECT`us window,
invalidate all of them.
"""
ti_all = [(b['timestamp'], i) for i, b in enumerate(buf0)]
ti_all = sorted(ti_all)
t_all = np.array([t[0] for t in ti_all])
i_all = [t[1] for t in ti_all]
nb = len(buf0)
ri = range(nb)
i = 0
while i < nb - 1:
ii = []
t0 = t_all[i]
for j in xrange(i + 1, nb):
if t_all[j] < t0 + t_reject:
ii.append(j)
else:
break
i = j
if len(ii) < n_reject:
continue
for ix in ii:
try:
ri.remove(i_all[ix])
except ValueError:
pass
buf = [buf0[i] for i in ri]
if verbose and len(buf) != nb:
print '* Rejecting', nb - len(buf), 'spikes.'
return buf
def set_new_threshold(wavform, thr, n_pre=N_PRE_PT, rng=SEARCH_RNG, i_chg=20):
"""Set new threshold `thr`.
If the `waveform` cannot pass `thr` returns None.
The new waveform is re-aligned based on the steepest point.
The returned new waveform has `n_pre` points before the alignment point.
"""
wav = np.array(wavform)
sgn = np.sign(thr)
if np.max(wav[rng[0]:rng[1]] * sgn) < np.abs(thr): return None # reject
""" NOT USED -- GIVES IMPRECISE RESULT
# -- align: find the steepest point having the same sign as `sgn`
df = np.diff(wav)
si = np.argsort(-sgn * df) # reverse sorted
for i in si:
if np.sign(wav[i]) == sgn: break
"""
# -- align: find the point where waveform crosses `thr`
n = len(wav)
for i in range(n - 1):
if sgn * wav[i] <= sgn * thr and sgn * thr <= sgn * wav[i + 1]:
break
if i == n - 2:
# although i could be n - 2, it's highly likely an artifact
return None
n_shift = n_pre - i - 1 # > 0: right shift, < 0: left shift
if n_shift == 0:
return wav
wavnew = np.empty(wav.shape)
wavnew[n_shift:] = wav[:-n_shift] # PBC shifting
wavnew[:n_shift] = wav[-n_shift:]
# -- done: but if the spike doesn't change its sign
# within `i_chg`, reject.
if np.max(-sgn * wavnew[n_pre:i_chg]) < 0:
return None
""" DEBUG
if np.abs(n_shift) > 3:
print '!!!', n_shift, '/', i, '/', n
print '---', np.max(-sgn * wavnew[n_pre:i_chg])
print list(wav)
print list(wavnew)
"""
return wavnew
def set_new_threshold_rng(wav, thr, rng=(11, 13), i_chg=32):
return set_new_threshold(wav, thr, rng=rng, i_chg=i_chg)
# return set_new_threshold(wav, thr)
# -----------------------------------------------------------------------------
# Math codes
DEFAULT_N_PCA = 3
def fastnorm(x):
# fastnorm: from Nicolas' code
xv = x.ravel()
return np.dot(xv, xv) ** 0.5
# fastsvd: from Nicolas' code
def fastsvd(M):
h, w = M.shape
# -- thin matrix
if h >= w:
# subspace of M'M
U, S, V = np.linalg.svd(np.dot(M.T, M))
U = np.dot(M, V.T)
# normalize
for i in xrange(w):
S[i] = fastnorm(U[:, i])
U[:, i] = U[:, i] / S[i]
# -- fat matrix
else:
# subspace of MM'
U, S, V = np.linalg.svd(np.dot(M, M.T))
V = np.dot(U.T, M)
# normalize
for i in xrange(h):
S[i] = fastnorm(V[i])
V[i, :] = V[i] / S[i]
return U, S, V
def pca_eigvec(M, pca_threshold=DEFAULT_N_PCA):
U, S, V = fastsvd(M)
eigvectors = V.T
eigvectors = eigvectors[:, :pca_threshold]
# this gives PCA:
# M = np.dot(M, eigvectors)
return eigvectors
|
normal
|
{
"blob_id": "c2ee716b72652035502a1f07dfe8aa68a104b2bb",
"index": 8255,
"step-1": "import numpy as np\nimport os\n\n# ----------------------------------------------------------------------------\n# Common variables\n\n# shifting channels based on rules:\n# CH_SHIFT[rule_name] = {src_1_based_ch:new_1_based_ch}\nCH_SHIFT = {}\nCH_SHIFT[None] = None\n# for 1-to-1 cards\nCH_SHIFT['1to1'] = {}\nfor ch1 in xrange(1, 49):\n CH_SHIFT['1to1'][ch1] = ch1\nfor ch1 in xrange(81, 129):\n CH_SHIFT['1to1'][ch1] = ch1 - 32\n\n# for 20110720A: assign all 40 A channels to 1-40\n# and all 70 M channels to 41-110\nCH_SHIFT['20110720A'] = {1: 41, 2: 42, 3: 43, 4: 44, 5: 45, 6: 46,\n 7: 47, 8: 48, 9: 49, 10: 50, 11: 51, 12: 52, 13: 53, 14: 54,\n 15: 55, 16: 56, 17: 57, 18: 58, 19: 59, 20: 60, 21: 61,\n 22: 62, 23: 63, 24: 64, 25: 65, 26: 66, 27: 67, 28: 68,\n 29: 69, 30: 70, 31: 71, 32: 72, 33: 73, 34: 74, 35: 75,\n 44: 1, 45: 2, 46: 3, 47: 4, 48: 5, 49: 6, 50: 7, 51: 8,\n 52: 9, 53: 10, 54: 11, 55: 12, 56: 13, 57: 14, 58: 15,\n 59: 16, 60: 17, 61: 18, 62: 19, 63: 20, 64: 21, 65: 22,\n 66: 23, 67: 24, 68: 25, 69: 26, 70: 27, 71: 28, 72: 29,\n 73: 30, 74: 31, 75: 32, 76: 33, 77: 34, 78: 35, 79: 36,\n 80: 37, 81: 38, 82: 39, 83: 40, 94: 76, 95: 77, 96: 78,\n 97: 79, 98: 80, 99: 81, 100: 82, 101: 83, 102: 84, 103: 85,\n 104: 86, 105: 87, 106: 88, 107: 89, 108: 90, 109: 91,\n 110: 92, 111: 93, 112: 94, 113: 95, 114: 96, 115: 97,\n 116: 98, 117: 99, 118: 100, 119: 101, 120: 102, 121: 103,\n 122: 104, 123: 105, 124: 106, 125: 107, 126: 108,\n 127: 109, 128: 110}\n\n\n# ----------------------------------------------------------------------------\n# Common functions\ndef seq_search(iterable, target):\n \"\"\"do sequential search\"\"\"\n for i, e in enumerate(iterable):\n if e != target:\n continue\n return i\n return None\n\n\ndef sort_uniq(base, *args):\n \"\"\"sort and remove duplicates based on `base` and apply on to `args`\"\"\"\n if len(args) == 0:\n return None\n res = []\n # sort\n si = np.argsort(base)\n base = np.array(base[si])\n for arg in args:\n res.append(np.array(arg[si]))\n # remove duplicates\n di = np.nonzero(np.diff(base) == 0)[0]\n si = list(set(range(len(base))) - set(list(di)))\n for i in xrange(len(res)):\n res[i] = np.array(res[i][si])\n return res\n\n\n# -----------------------------------------------------------------------------\ndef parse_opts(opts0):\n \"\"\"Parse the options in the command line. This somewhat\n archaic function mainly exists for backward-compatability.\"\"\"\n opts = {}\n # parse the stuff in \"opts\"\n for opt in opts0:\n parsed = opt.split('=')\n key = parsed[0].strip()\n if len(parsed) > 1:\n # OLD: cmd = parsed[1].strip()\n cmd = '='.join(parsed[1:]).strip()\n else:\n cmd = ''\n opts[key] = cmd\n\n return opts\n\n\ndef parse_opts2(tokens, optpx='--', argparam=False):\n \"\"\"A newer option parser. (from perf102)\"\"\"\n opts0 = []\n args = []\n n = len(optpx)\n\n for token in tokens:\n if token[:2] == optpx:\n opts0.append(token[n:])\n else:\n if argparam:\n token = token.split('=')\n args.append(token)\n\n opts = parse_opts(opts0)\n\n return args, opts\n\n\ndef parse_opts_adapter(tokens, delim, optpx='--', argparam=False):\n \"\"\"Adapter to support both old- and new-style options\"\"\"\n if any([t.startswith(optpx) for t in tokens]):\n # new style\n args, opts = parse_opts2(tokens, optpx=optpx, argparam=argparam)\n else:\n # old style\n args = tokens[:delim]\n opts = parse_opts(tokens[delim:])\n return args, opts\n\n\ndef makeavail(sth, sth2idx, idx2sth, query=None):\n if sth not in sth2idx:\n if query is not None and not query(sth):\n return\n sth2idx[sth] = len(idx2sth)\n idx2sth.append(sth)\n\n\ndef prep_files(flist, sep=',', extchk=True):\n flist = flist.split(sep)\n if flist[0][0] == '+':\n flist = [f.strip() for f in open(flist[0][1:]).readlines()]\n if extchk:\n assert all([os.path.exists(f) for f in flist])\n\n return flist\n\n\ndef prepare_save_dir(sav_dir):\n if sav_dir != '' and not os.path.exists(sav_dir):\n try:\n os.makedirs(sav_dir)\n # in massively-parallel env, it is possible that\n # the sav_dir is created after os.path.exists() check.\n # We just ignore if makedirs fails.\n except Exception:\n pass\n\n\ndef detect_cpus():\n # Linux, Unix and MacOS:\n if hasattr(os, \"sysconf\"):\n if 'SC_NPROCESSORS_ONLN' in os.sysconf_names:\n # Linux & Unix:\n ncpus = os.sysconf(\"SC_NPROCESSORS_ONLN\")\n if isinstance(ncpus, int) and ncpus > 0:\n return ncpus\n else: # OSX:\n return int(os.popen2(\"sysctl -n hw.ncpu\")[1].read())\n # Windows:\n if 'NUMBER_OF_PROCESSORS' in os.environ:\n ncpus = int(os.environ[\"NUMBER_OF_PROCESSORS\"])\n if ncpus > 0:\n return ncpus\n return 1\n\n\n# -----------------------------------------------------------------------------\n# Peri-stimulus data extraction related\nN_PRE_PT = 11\nSEARCH_RNG = [6, 16]\nT_REJECT = 10\nN_REJECT = 50\n\n\ndef invalidate_artifacts(buf0, t_reject=T_REJECT,\n n_reject=N_REJECT, verbose=True):\n \"\"\"If there are more than `N_REJET` spikes within `T_REJECT`us window,\n invalidate all of them.\n \"\"\"\n ti_all = [(b['timestamp'], i) for i, b in enumerate(buf0)]\n ti_all = sorted(ti_all)\n t_all = np.array([t[0] for t in ti_all])\n i_all = [t[1] for t in ti_all]\n\n nb = len(buf0)\n ri = range(nb)\n i = 0\n while i < nb - 1:\n ii = []\n t0 = t_all[i]\n for j in xrange(i + 1, nb):\n if t_all[j] < t0 + t_reject:\n ii.append(j)\n else:\n break\n i = j\n\n if len(ii) < n_reject:\n continue\n for ix in ii:\n try:\n ri.remove(i_all[ix])\n except ValueError:\n pass\n\n buf = [buf0[i] for i in ri]\n if verbose and len(buf) != nb:\n print '* Rejecting', nb - len(buf), 'spikes.'\n return buf\n\n\ndef set_new_threshold(wavform, thr, n_pre=N_PRE_PT, rng=SEARCH_RNG, i_chg=20):\n \"\"\"Set new threshold `thr`.\n If the `waveform` cannot pass `thr` returns None.\n The new waveform is re-aligned based on the steepest point.\n The returned new waveform has `n_pre` points before the alignment point.\n \"\"\"\n wav = np.array(wavform)\n sgn = np.sign(thr)\n if np.max(wav[rng[0]:rng[1]] * sgn) < np.abs(thr): return None # reject\n\n \"\"\" NOT USED -- GIVES IMPRECISE RESULT\n # -- align: find the steepest point having the same sign as `sgn`\n df = np.diff(wav)\n si = np.argsort(-sgn * df) # reverse sorted\n for i in si:\n if np.sign(wav[i]) == sgn: break\n \"\"\"\n # -- align: find the point where waveform crosses `thr`\n n = len(wav)\n for i in range(n - 1):\n if sgn * wav[i] <= sgn * thr and sgn * thr <= sgn * wav[i + 1]:\n break\n if i == n - 2:\n # although i could be n - 2, it's highly likely an artifact\n return None\n n_shift = n_pre - i - 1 # > 0: right shift, < 0: left shift\n if n_shift == 0:\n return wav\n\n wavnew = np.empty(wav.shape)\n wavnew[n_shift:] = wav[:-n_shift] # PBC shifting\n wavnew[:n_shift] = wav[-n_shift:]\n\n # -- done: but if the spike doesn't change its sign\n # within `i_chg`, reject.\n if np.max(-sgn * wavnew[n_pre:i_chg]) < 0:\n return None\n\n \"\"\" DEBUG\n if np.abs(n_shift) > 3:\n print '!!!', n_shift, '/', i, '/', n\n print '---', np.max(-sgn * wavnew[n_pre:i_chg])\n print list(wav)\n print list(wavnew)\n \"\"\"\n\n return wavnew\n\n\ndef set_new_threshold_rng(wav, thr, rng=(11, 13), i_chg=32):\n return set_new_threshold(wav, thr, rng=rng, i_chg=i_chg)\n # return set_new_threshold(wav, thr)\n\n\n# -----------------------------------------------------------------------------\n# Math codes\nDEFAULT_N_PCA = 3\n\n\ndef fastnorm(x):\n # fastnorm: from Nicolas' code\n xv = x.ravel()\n return np.dot(xv, xv) ** 0.5\n\n\n# fastsvd: from Nicolas' code\ndef fastsvd(M):\n h, w = M.shape\n # -- thin matrix\n if h >= w:\n # subspace of M'M\n U, S, V = np.linalg.svd(np.dot(M.T, M))\n U = np.dot(M, V.T)\n # normalize\n for i in xrange(w):\n S[i] = fastnorm(U[:, i])\n U[:, i] = U[:, i] / S[i]\n # -- fat matrix\n else:\n # subspace of MM'\n U, S, V = np.linalg.svd(np.dot(M, M.T))\n V = np.dot(U.T, M)\n # normalize\n for i in xrange(h):\n S[i] = fastnorm(V[i])\n V[i, :] = V[i] / S[i]\n return U, S, V\n\n\ndef pca_eigvec(M, pca_threshold=DEFAULT_N_PCA):\n U, S, V = fastsvd(M)\n eigvectors = V.T\n eigvectors = eigvectors[:, :pca_threshold]\n # this gives PCA:\n # M = np.dot(M, eigvectors)\n return eigvectors\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
class BinaryTree:
def __init__(self, data=None):
self.data = data
self.left = None
self.right = None
def insert(self, data):
if self.data != None:
arr = [self]
while len(arr) > 0:
node = arr[0]
if node.left:
arr.append(node.left)
else:
node.left = BinaryTree(data)
break
if node.right:
arr.append(node.right)
else:
node.right = BinaryTree(data)
break
arr = arr[1:]
else:
self.data = data
<|reserved_special_token_0|>
def preorder(self):
print(self.data, end=' ')
if self.left:
self.left.preorder()
if self.right:
self.right.preorder()
def inorder(self):
if self.left:
self.left.inorder()
print(self.data, end=' ')
if self.right:
self.right.inorder()
def postorder(self):
if self.left:
self.left.postorder()
if self.right:
self.right.postorder()
print(self.data, end=' ')
<|reserved_special_token_0|>
def height(self):
if self.left == None or self.right == None:
return 0
lh = self.left.height()
rh = self.right.height()
return max(lh, rh) + 1
def level(self):
if self.left == None or self.right == None:
return 0
lh = self.left.level()
rh = self.right.level()
return max(lh, rh) + 1
<|reserved_special_token_0|>
def size(self):
if self == None:
return 0
ls = rs = 0
if self.left:
ls = self.left.size()
if self.right:
rs = self.right.size()
return ls + rs + 1
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def deepest(self):
if self == None:
return None
arr = [self]
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
temp = arr[-1]
arr = arr[1:]
return temp.data
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def halfNodes(self):
if self == None:
return 0
arr = [self]
count = 0
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
if (node.left == None and node.right or node.left and node.
right == None):
count += 1
arr = arr[1:]
return count
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def delete(self):
arr = [self]
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
temp = arr[-1]
arr = arr[1:]
temp = None
<|reserved_special_token_1|>
class BinaryTree:
def __init__(self, data=None):
self.data = data
self.left = None
self.right = None
def insert(self, data):
if self.data != None:
arr = [self]
while len(arr) > 0:
node = arr[0]
if node.left:
arr.append(node.left)
else:
node.left = BinaryTree(data)
break
if node.right:
arr.append(node.right)
else:
node.right = BinaryTree(data)
break
arr = arr[1:]
else:
self.data = data
def insertNodes(self, arr):
for i in arr:
self.insert(i)
def preorder(self):
print(self.data, end=' ')
if self.left:
self.left.preorder()
if self.right:
self.right.preorder()
def inorder(self):
if self.left:
self.left.inorder()
print(self.data, end=' ')
if self.right:
self.right.inorder()
def postorder(self):
if self.left:
self.left.postorder()
if self.right:
self.right.postorder()
print(self.data, end=' ')
<|reserved_special_token_0|>
def height(self):
if self.left == None or self.right == None:
return 0
lh = self.left.height()
rh = self.right.height()
return max(lh, rh) + 1
def level(self):
if self.left == None or self.right == None:
return 0
lh = self.left.level()
rh = self.right.level()
return max(lh, rh) + 1
<|reserved_special_token_0|>
def size(self):
if self == None:
return 0
ls = rs = 0
if self.left:
ls = self.left.size()
if self.right:
rs = self.right.size()
return ls + rs + 1
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def deepest(self):
if self == None:
return None
arr = [self]
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
temp = arr[-1]
arr = arr[1:]
return temp.data
<|reserved_special_token_0|>
def fullNodes(self):
if self == None:
return 0
arr = [self]
count = 0
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
if node.left and node.right:
count += 1
arr = arr[1:]
return count
def halfNodes(self):
if self == None:
return 0
arr = [self]
count = 0
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
if (node.left == None and node.right or node.left and node.
right == None):
count += 1
arr = arr[1:]
return count
def allPaths(self, path=[0] * 1000, pathlen=0):
if self == None:
return
path[pathlen] = self.data
pathlen += 1
if self.left == None and self.right == None:
for i in range(pathlen - 1):
print(path[i], end='->')
print(path[pathlen])
return
if self.left:
self.left.allPaths(path, pathlen)
if self.right:
self.right.allPaths(path, pathlen)
<|reserved_special_token_0|>
def delete(self):
arr = [self]
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
temp = arr[-1]
arr = arr[1:]
temp = None
<|reserved_special_token_1|>
class BinaryTree:
def __init__(self, data=None):
self.data = data
self.left = None
self.right = None
def insert(self, data):
if self.data != None:
arr = [self]
while len(arr) > 0:
node = arr[0]
if node.left:
arr.append(node.left)
else:
node.left = BinaryTree(data)
break
if node.right:
arr.append(node.right)
else:
node.right = BinaryTree(data)
break
arr = arr[1:]
else:
self.data = data
def insertNodes(self, arr):
for i in arr:
self.insert(i)
def preorder(self):
print(self.data, end=' ')
if self.left:
self.left.preorder()
if self.right:
self.right.preorder()
def inorder(self):
if self.left:
self.left.inorder()
print(self.data, end=' ')
if self.right:
self.right.inorder()
def postorder(self):
if self.left:
self.left.postorder()
if self.right:
self.right.postorder()
print(self.data, end=' ')
<|reserved_special_token_0|>
def height(self):
if self.left == None or self.right == None:
return 0
lh = self.left.height()
rh = self.right.height()
return max(lh, rh) + 1
def level(self):
if self.left == None or self.right == None:
return 0
lh = self.left.level()
rh = self.right.level()
return max(lh, rh) + 1
<|reserved_special_token_0|>
def size(self):
if self == None:
return 0
ls = rs = 0
if self.left:
ls = self.left.size()
if self.right:
rs = self.right.size()
return ls + rs + 1
def max(self):
if self == None:
return 0
lmx = rmx = 0
if self.left:
lmx = self.left.max()
if self.right:
rmx = self.right.max()
return max(lmx, rmx, self.data)
def min(self):
if self == None:
return 0
lmn = rmn = 0
if self.left:
lmn = self.left.min()
if self.right:
rmn = self.right.min()
return min(lmn, rmn, self.data)
def deepest(self):
if self == None:
return None
arr = [self]
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
temp = arr[-1]
arr = arr[1:]
return temp.data
<|reserved_special_token_0|>
def fullNodes(self):
if self == None:
return 0
arr = [self]
count = 0
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
if node.left and node.right:
count += 1
arr = arr[1:]
return count
def halfNodes(self):
if self == None:
return 0
arr = [self]
count = 0
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
if (node.left == None and node.right or node.left and node.
right == None):
count += 1
arr = arr[1:]
return count
def allPaths(self, path=[0] * 1000, pathlen=0):
if self == None:
return
path[pathlen] = self.data
pathlen += 1
if self.left == None and self.right == None:
for i in range(pathlen - 1):
print(path[i], end='->')
print(path[pathlen])
return
if self.left:
self.left.allPaths(path, pathlen)
if self.right:
self.right.allPaths(path, pathlen)
def sum(self):
if self == None:
return 0
ls = rs = 0
if self.left:
ls = self.left.sum()
if self.right:
rs = self.right.sum()
return self.data + ls + rs
def delete(self):
arr = [self]
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
temp = arr[-1]
arr = arr[1:]
temp = None
<|reserved_special_token_1|>
class BinaryTree:
def __init__(self, data=None):
self.data = data
self.left = None
self.right = None
def insert(self, data):
if self.data != None:
arr = [self]
while len(arr) > 0:
node = arr[0]
if node.left:
arr.append(node.left)
else:
node.left = BinaryTree(data)
break
if node.right:
arr.append(node.right)
else:
node.right = BinaryTree(data)
break
arr = arr[1:]
else:
self.data = data
def insertNodes(self, arr):
for i in arr:
self.insert(i)
def preorder(self):
print(self.data, end=' ')
if self.left:
self.left.preorder()
if self.right:
self.right.preorder()
def inorder(self):
if self.left:
self.left.inorder()
print(self.data, end=' ')
if self.right:
self.right.inorder()
def postorder(self):
if self.left:
self.left.postorder()
if self.right:
self.right.postorder()
print(self.data, end=' ')
<|reserved_special_token_0|>
def height(self):
if self.left == None or self.right == None:
return 0
lh = self.left.height()
rh = self.right.height()
return max(lh, rh) + 1
def level(self):
if self.left == None or self.right == None:
return 0
lh = self.left.level()
rh = self.right.level()
return max(lh, rh) + 1
<|reserved_special_token_0|>
def size(self):
if self == None:
return 0
ls = rs = 0
if self.left:
ls = self.left.size()
if self.right:
rs = self.right.size()
return ls + rs + 1
def max(self):
if self == None:
return 0
lmx = rmx = 0
if self.left:
lmx = self.left.max()
if self.right:
rmx = self.right.max()
return max(lmx, rmx, self.data)
def min(self):
if self == None:
return 0
lmn = rmn = 0
if self.left:
lmn = self.left.min()
if self.right:
rmn = self.right.min()
return min(lmn, rmn, self.data)
def deepest(self):
if self == None:
return None
arr = [self]
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
temp = arr[-1]
arr = arr[1:]
return temp.data
def leafNodes(self):
if self.left == None and self.right == None:
return 1
lln = rln = 0
if self.left:
lln = self.left.leafNodes()
if self.right:
rln = self.right.leafNodes()
return lln + rln
def fullNodes(self):
if self == None:
return 0
arr = [self]
count = 0
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
if node.left and node.right:
count += 1
arr = arr[1:]
return count
def halfNodes(self):
if self == None:
return 0
arr = [self]
count = 0
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
if (node.left == None and node.right or node.left and node.
right == None):
count += 1
arr = arr[1:]
return count
def allPaths(self, path=[0] * 1000, pathlen=0):
if self == None:
return
path[pathlen] = self.data
pathlen += 1
if self.left == None and self.right == None:
for i in range(pathlen - 1):
print(path[i], end='->')
print(path[pathlen])
return
if self.left:
self.left.allPaths(path, pathlen)
if self.right:
self.right.allPaths(path, pathlen)
def sum(self):
if self == None:
return 0
ls = rs = 0
if self.left:
ls = self.left.sum()
if self.right:
rs = self.right.sum()
return self.data + ls + rs
def delete(self):
arr = [self]
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
temp = arr[-1]
arr = arr[1:]
temp = None
<|reserved_special_token_1|>
##############################################
# Binary Tree #
# by Vishal Nirmal #
# #
# A Binary Tree ADT implementation. #
##############################################
class BinaryTree:
def __init__(self, data=None):
self.data = data
self.left = None
self.right = None
def insert(self, data):
if self.data != None:
arr = [self]
while len(arr) > 0:
node = arr[0]
if node.left:
arr.append(node.left)
else:
node.left = BinaryTree(data)
break
if node.right:
arr.append(node.right)
else:
node.right = BinaryTree(data)
break
arr = arr[1:]
else:
self.data = data
def insertNodes(self, arr):
for i in arr:
self.insert(i)
def preorder(self):
print(self.data, end=' ')
if self.left:
self.left.preorder()
if self.right:
self.right.preorder()
def inorder(self):
if self.left:
self.left.inorder()
print(self.data, end=' ')
if self.right:
self.right.inorder()
def postorder(self):
if self.left:
self.left.postorder()
if self.right:
self.right.postorder()
print(self.data, end=' ')
def levelorder(self):
arr = [self]
while len(arr):
node = arr[0]
print(node.data, end=' ')
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
arr = arr[1:]
def height(self):
if self.left == None or self.right==None:
return 0
lh = self.left.height()
rh = self.right.height()
return max(lh, rh)+1
def level(self):
if self.left == None or self.right==None:
return 0
lh = self.left.level()
rh = self.right.level()
return max(lh, rh)+1
def search(self, data):
if self == None:
return False
if self.data == data:
return True
if self.left and self.left.search(data) == True:
return True
if self.right:
return self.right.search(data)
def size(self):
if self == None:
return 0
ls = rs = 0
if self.left:
ls = self.left.size()
if self.right:
rs = self.right.size()
return ls + rs + 1
def max(self):
if self == None:
return 0
lmx = rmx = 0
if self.left:
lmx = self.left.max()
if self.right:
rmx = self.right.max()
return max(lmx, rmx, self.data)
def min(self):
if self == None:
return 0
lmn = rmn = 0
if self.left:
lmn = self.left.min()
if self.right:
rmn = self.right.min()
return min(lmn, rmn, self.data)
def deepest(self):
if self==None:
return None
arr = [self]
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
temp = arr[-1]
arr = arr[1:]
return temp.data
def leafNodes(self):
if self.left == None and self.right == None:
return 1
lln = rln = 0
if self.left:
lln = self.left.leafNodes()
if self.right:
rln = self.right.leafNodes()
return lln + rln
def fullNodes(self):
if self==None:
return 0
arr = [self]
count = 0
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
if node.left and node.right:
count+=1
arr = arr[1:]
return count
def halfNodes(self):
if self==None:
return 0
arr = [self]
count = 0
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
if (node.left==None and node.right) or (node.left and node.right==None):
count+=1
arr = arr[1:]
return count
def allPaths(self, path=[0]*1000, pathlen=0):
if self == None:
return
path[pathlen] = self.data
pathlen+=1
if self.left == None and self.right == None:
for i in range(pathlen-1):
print(path[i], end='->')
print(path[pathlen])
return
if self.left:
self.left.allPaths(path, pathlen)
if self.right:
self.right.allPaths(path, pathlen)
def sum(self):
if self == None:
return 0
ls = rs = 0
if self.left:
ls = self.left.sum()
if self.right:
rs = self.right.sum()
return self.data+ls+rs
def delete(self):
arr = [self]
while len(arr):
node = arr[0]
if node.left:
arr.append(node.left)
if node.right:
arr.append(node.right)
temp = arr[-1]
arr = arr[1:]
temp = None
|
flexible
|
{
"blob_id": "3eaced9609c7adfa5457d7dcad8b2dfaeb697b16",
"index": 3220,
"step-1": "class BinaryTree:\n\n def __init__(self, data=None):\n self.data = data\n self.left = None\n self.right = None\n\n def insert(self, data):\n if self.data != None:\n arr = [self]\n while len(arr) > 0:\n node = arr[0]\n if node.left:\n arr.append(node.left)\n else:\n node.left = BinaryTree(data)\n break\n if node.right:\n arr.append(node.right)\n else:\n node.right = BinaryTree(data)\n break\n arr = arr[1:]\n else:\n self.data = data\n <mask token>\n\n def preorder(self):\n print(self.data, end=' ')\n if self.left:\n self.left.preorder()\n if self.right:\n self.right.preorder()\n\n def inorder(self):\n if self.left:\n self.left.inorder()\n print(self.data, end=' ')\n if self.right:\n self.right.inorder()\n\n def postorder(self):\n if self.left:\n self.left.postorder()\n if self.right:\n self.right.postorder()\n print(self.data, end=' ')\n <mask token>\n\n def height(self):\n if self.left == None or self.right == None:\n return 0\n lh = self.left.height()\n rh = self.right.height()\n return max(lh, rh) + 1\n\n def level(self):\n if self.left == None or self.right == None:\n return 0\n lh = self.left.level()\n rh = self.right.level()\n return max(lh, rh) + 1\n <mask token>\n\n def size(self):\n if self == None:\n return 0\n ls = rs = 0\n if self.left:\n ls = self.left.size()\n if self.right:\n rs = self.right.size()\n return ls + rs + 1\n <mask token>\n <mask token>\n\n def deepest(self):\n if self == None:\n return None\n arr = [self]\n while len(arr):\n node = arr[0]\n if node.left:\n arr.append(node.left)\n if node.right:\n arr.append(node.right)\n temp = arr[-1]\n arr = arr[1:]\n return temp.data\n <mask token>\n <mask token>\n\n def halfNodes(self):\n if self == None:\n return 0\n arr = [self]\n count = 0\n while len(arr):\n node = arr[0]\n if node.left:\n arr.append(node.left)\n if node.right:\n arr.append(node.right)\n if (node.left == None and node.right or node.left and node.\n right == None):\n count += 1\n arr = arr[1:]\n return count\n <mask token>\n <mask token>\n\n def delete(self):\n arr = [self]\n while len(arr):\n node = arr[0]\n if node.left:\n arr.append(node.left)\n if node.right:\n arr.append(node.right)\n temp = arr[-1]\n arr = arr[1:]\n temp = None\n",
"step-2": "class BinaryTree:\n\n def __init__(self, data=None):\n self.data = data\n self.left = None\n self.right = None\n\n def insert(self, data):\n if self.data != None:\n arr = [self]\n while len(arr) > 0:\n node = arr[0]\n if node.left:\n arr.append(node.left)\n else:\n node.left = BinaryTree(data)\n break\n if node.right:\n arr.append(node.right)\n else:\n node.right = BinaryTree(data)\n break\n arr = arr[1:]\n else:\n self.data = data\n\n def insertNodes(self, arr):\n for i in arr:\n self.insert(i)\n\n def preorder(self):\n print(self.data, end=' ')\n if self.left:\n self.left.preorder()\n if self.right:\n self.right.preorder()\n\n def inorder(self):\n if self.left:\n self.left.inorder()\n print(self.data, end=' ')\n if self.right:\n self.right.inorder()\n\n def postorder(self):\n if self.left:\n self.left.postorder()\n if self.right:\n self.right.postorder()\n print(self.data, end=' ')\n <mask token>\n\n def height(self):\n if self.left == None or self.right == None:\n return 0\n lh = self.left.height()\n rh = self.right.height()\n return max(lh, rh) + 1\n\n def level(self):\n if self.left == None or self.right == None:\n return 0\n lh = self.left.level()\n rh = self.right.level()\n return max(lh, rh) + 1\n <mask token>\n\n def size(self):\n if self == None:\n return 0\n ls = rs = 0\n if self.left:\n ls = self.left.size()\n if self.right:\n rs = self.right.size()\n return ls + rs + 1\n <mask token>\n <mask token>\n\n def deepest(self):\n if self == None:\n return None\n arr = [self]\n while len(arr):\n node = arr[0]\n if node.left:\n arr.append(node.left)\n if node.right:\n arr.append(node.right)\n temp = arr[-1]\n arr = arr[1:]\n return temp.data\n <mask token>\n\n def fullNodes(self):\n if self == None:\n return 0\n arr = [self]\n count = 0\n while len(arr):\n node = arr[0]\n if node.left:\n arr.append(node.left)\n if node.right:\n arr.append(node.right)\n if node.left and node.right:\n count += 1\n arr = arr[1:]\n return count\n\n def halfNodes(self):\n if self == None:\n return 0\n arr = [self]\n count = 0\n while len(arr):\n node = arr[0]\n if node.left:\n arr.append(node.left)\n if node.right:\n arr.append(node.right)\n if (node.left == None and node.right or node.left and node.\n right == None):\n count += 1\n arr = arr[1:]\n return count\n\n def allPaths(self, path=[0] * 1000, pathlen=0):\n if self == None:\n return\n path[pathlen] = self.data\n pathlen += 1\n if self.left == None and self.right == None:\n for i in range(pathlen - 1):\n print(path[i], end='->')\n print(path[pathlen])\n return\n if self.left:\n self.left.allPaths(path, pathlen)\n if self.right:\n self.right.allPaths(path, pathlen)\n <mask token>\n\n def delete(self):\n arr = [self]\n while len(arr):\n node = arr[0]\n if node.left:\n arr.append(node.left)\n if node.right:\n arr.append(node.right)\n temp = arr[-1]\n arr = arr[1:]\n temp = None\n",
"step-3": "class BinaryTree:\n\n def __init__(self, data=None):\n self.data = data\n self.left = None\n self.right = None\n\n def insert(self, data):\n if self.data != None:\n arr = [self]\n while len(arr) > 0:\n node = arr[0]\n if node.left:\n arr.append(node.left)\n else:\n node.left = BinaryTree(data)\n break\n if node.right:\n arr.append(node.right)\n else:\n node.right = BinaryTree(data)\n break\n arr = arr[1:]\n else:\n self.data = data\n\n def insertNodes(self, arr):\n for i in arr:\n self.insert(i)\n\n def preorder(self):\n print(self.data, end=' ')\n if self.left:\n self.left.preorder()\n if self.right:\n self.right.preorder()\n\n def inorder(self):\n if self.left:\n self.left.inorder()\n print(self.data, end=' ')\n if self.right:\n self.right.inorder()\n\n def postorder(self):\n if self.left:\n self.left.postorder()\n if self.right:\n self.right.postorder()\n print(self.data, end=' ')\n <mask token>\n\n def height(self):\n if self.left == None or self.right == None:\n return 0\n lh = self.left.height()\n rh = self.right.height()\n return max(lh, rh) + 1\n\n def level(self):\n if self.left == None or self.right == None:\n return 0\n lh = self.left.level()\n rh = self.right.level()\n return max(lh, rh) + 1\n <mask token>\n\n def size(self):\n if self == None:\n return 0\n ls = rs = 0\n if self.left:\n ls = self.left.size()\n if self.right:\n rs = self.right.size()\n return ls + rs + 1\n\n def max(self):\n if self == None:\n return 0\n lmx = rmx = 0\n if self.left:\n lmx = self.left.max()\n if self.right:\n rmx = self.right.max()\n return max(lmx, rmx, self.data)\n\n def min(self):\n if self == None:\n return 0\n lmn = rmn = 0\n if self.left:\n lmn = self.left.min()\n if self.right:\n rmn = self.right.min()\n return min(lmn, rmn, self.data)\n\n def deepest(self):\n if self == None:\n return None\n arr = [self]\n while len(arr):\n node = arr[0]\n if node.left:\n arr.append(node.left)\n if node.right:\n arr.append(node.right)\n temp = arr[-1]\n arr = arr[1:]\n return temp.data\n <mask token>\n\n def fullNodes(self):\n if self == None:\n return 0\n arr = [self]\n count = 0\n while len(arr):\n node = arr[0]\n if node.left:\n arr.append(node.left)\n if node.right:\n arr.append(node.right)\n if node.left and node.right:\n count += 1\n arr = arr[1:]\n return count\n\n def halfNodes(self):\n if self == None:\n return 0\n arr = [self]\n count = 0\n while len(arr):\n node = arr[0]\n if node.left:\n arr.append(node.left)\n if node.right:\n arr.append(node.right)\n if (node.left == None and node.right or node.left and node.\n right == None):\n count += 1\n arr = arr[1:]\n return count\n\n def allPaths(self, path=[0] * 1000, pathlen=0):\n if self == None:\n return\n path[pathlen] = self.data\n pathlen += 1\n if self.left == None and self.right == None:\n for i in range(pathlen - 1):\n print(path[i], end='->')\n print(path[pathlen])\n return\n if self.left:\n self.left.allPaths(path, pathlen)\n if self.right:\n self.right.allPaths(path, pathlen)\n\n def sum(self):\n if self == None:\n return 0\n ls = rs = 0\n if self.left:\n ls = self.left.sum()\n if self.right:\n rs = self.right.sum()\n return self.data + ls + rs\n\n def delete(self):\n arr = [self]\n while len(arr):\n node = arr[0]\n if node.left:\n arr.append(node.left)\n if node.right:\n arr.append(node.right)\n temp = arr[-1]\n arr = arr[1:]\n temp = None\n",
"step-4": "class BinaryTree:\n\n def __init__(self, data=None):\n self.data = data\n self.left = None\n self.right = None\n\n def insert(self, data):\n if self.data != None:\n arr = [self]\n while len(arr) > 0:\n node = arr[0]\n if node.left:\n arr.append(node.left)\n else:\n node.left = BinaryTree(data)\n break\n if node.right:\n arr.append(node.right)\n else:\n node.right = BinaryTree(data)\n break\n arr = arr[1:]\n else:\n self.data = data\n\n def insertNodes(self, arr):\n for i in arr:\n self.insert(i)\n\n def preorder(self):\n print(self.data, end=' ')\n if self.left:\n self.left.preorder()\n if self.right:\n self.right.preorder()\n\n def inorder(self):\n if self.left:\n self.left.inorder()\n print(self.data, end=' ')\n if self.right:\n self.right.inorder()\n\n def postorder(self):\n if self.left:\n self.left.postorder()\n if self.right:\n self.right.postorder()\n print(self.data, end=' ')\n <mask token>\n\n def height(self):\n if self.left == None or self.right == None:\n return 0\n lh = self.left.height()\n rh = self.right.height()\n return max(lh, rh) + 1\n\n def level(self):\n if self.left == None or self.right == None:\n return 0\n lh = self.left.level()\n rh = self.right.level()\n return max(lh, rh) + 1\n <mask token>\n\n def size(self):\n if self == None:\n return 0\n ls = rs = 0\n if self.left:\n ls = self.left.size()\n if self.right:\n rs = self.right.size()\n return ls + rs + 1\n\n def max(self):\n if self == None:\n return 0\n lmx = rmx = 0\n if self.left:\n lmx = self.left.max()\n if self.right:\n rmx = self.right.max()\n return max(lmx, rmx, self.data)\n\n def min(self):\n if self == None:\n return 0\n lmn = rmn = 0\n if self.left:\n lmn = self.left.min()\n if self.right:\n rmn = self.right.min()\n return min(lmn, rmn, self.data)\n\n def deepest(self):\n if self == None:\n return None\n arr = [self]\n while len(arr):\n node = arr[0]\n if node.left:\n arr.append(node.left)\n if node.right:\n arr.append(node.right)\n temp = arr[-1]\n arr = arr[1:]\n return temp.data\n\n def leafNodes(self):\n if self.left == None and self.right == None:\n return 1\n lln = rln = 0\n if self.left:\n lln = self.left.leafNodes()\n if self.right:\n rln = self.right.leafNodes()\n return lln + rln\n\n def fullNodes(self):\n if self == None:\n return 0\n arr = [self]\n count = 0\n while len(arr):\n node = arr[0]\n if node.left:\n arr.append(node.left)\n if node.right:\n arr.append(node.right)\n if node.left and node.right:\n count += 1\n arr = arr[1:]\n return count\n\n def halfNodes(self):\n if self == None:\n return 0\n arr = [self]\n count = 0\n while len(arr):\n node = arr[0]\n if node.left:\n arr.append(node.left)\n if node.right:\n arr.append(node.right)\n if (node.left == None and node.right or node.left and node.\n right == None):\n count += 1\n arr = arr[1:]\n return count\n\n def allPaths(self, path=[0] * 1000, pathlen=0):\n if self == None:\n return\n path[pathlen] = self.data\n pathlen += 1\n if self.left == None and self.right == None:\n for i in range(pathlen - 1):\n print(path[i], end='->')\n print(path[pathlen])\n return\n if self.left:\n self.left.allPaths(path, pathlen)\n if self.right:\n self.right.allPaths(path, pathlen)\n\n def sum(self):\n if self == None:\n return 0\n ls = rs = 0\n if self.left:\n ls = self.left.sum()\n if self.right:\n rs = self.right.sum()\n return self.data + ls + rs\n\n def delete(self):\n arr = [self]\n while len(arr):\n node = arr[0]\n if node.left:\n arr.append(node.left)\n if node.right:\n arr.append(node.right)\n temp = arr[-1]\n arr = arr[1:]\n temp = None\n",
"step-5": "##############################################\n# Binary Tree #\n# by Vishal Nirmal #\n# #\n# A Binary Tree ADT implementation. #\n##############################################\n\n\n\n\nclass BinaryTree:\n\n def __init__(self, data=None):\n\n self.data = data\n\n self.left = None\n\n self.right = None\n\n def insert(self, data):\n\n if self.data != None:\n\n arr = [self]\n\n while len(arr) > 0:\n\n node = arr[0]\n\n if node.left:\n\n arr.append(node.left)\n\n else:\n\n node.left = BinaryTree(data)\n\n break\n\n if node.right:\n\n arr.append(node.right)\n\n else:\n\n node.right = BinaryTree(data)\n\n break\n\n arr = arr[1:]\n\n else:\n\n self.data = data\n\n def insertNodes(self, arr):\n\n for i in arr:\n\n self.insert(i)\n\n def preorder(self):\n\n print(self.data, end=' ')\n\n if self.left:\n\n self.left.preorder()\n\n if self.right:\n\n self.right.preorder()\n\n def inorder(self):\n\n if self.left:\n\n self.left.inorder()\n\n print(self.data, end=' ')\n\n if self.right:\n\n self.right.inorder()\n\n def postorder(self):\n\n if self.left:\n\n self.left.postorder()\n\n if self.right:\n\n self.right.postorder()\n\n print(self.data, end=' ')\n\n def levelorder(self):\n\n arr = [self]\n\n while len(arr):\n\n node = arr[0]\n\n print(node.data, end=' ')\n\n if node.left:\n\n arr.append(node.left)\n\n if node.right:\n\n arr.append(node.right)\n\n arr = arr[1:]\n\n def height(self):\n\n if self.left == None or self.right==None:\n\n return 0\n\n lh = self.left.height()\n\n rh = self.right.height()\n\n return max(lh, rh)+1\n\n def level(self):\n\n if self.left == None or self.right==None:\n\n return 0\n\n lh = self.left.level()\n\n rh = self.right.level()\n\n return max(lh, rh)+1\n\n def search(self, data):\n\n if self == None:\n\n return False\n\n if self.data == data:\n\n return True\n\n if self.left and self.left.search(data) == True:\n\n return True\n\n if self.right:\n\n return self.right.search(data)\n\n def size(self):\n\n if self == None:\n\n return 0\n\n ls = rs = 0\n\n if self.left:\n\n ls = self.left.size()\n\n if self.right:\n\n rs = self.right.size()\n\n return ls + rs + 1\n\n def max(self):\n\n if self == None:\n\n return 0\n\n lmx = rmx = 0\n\n if self.left:\n\n lmx = self.left.max()\n\n if self.right:\n\n rmx = self.right.max()\n\n return max(lmx, rmx, self.data)\n\n def min(self):\n\n if self == None:\n\n return 0\n\n lmn = rmn = 0\n\n if self.left:\n\n lmn = self.left.min()\n\n if self.right:\n\n rmn = self.right.min()\n\n return min(lmn, rmn, self.data)\n\n def deepest(self):\n\n if self==None:\n\n return None\n\n arr = [self]\n\n while len(arr):\n\n node = arr[0]\n\n if node.left:\n\n arr.append(node.left)\n\n if node.right:\n\n arr.append(node.right)\n\n temp = arr[-1]\n\n arr = arr[1:]\n\n return temp.data\n\n def leafNodes(self):\n\n if self.left == None and self.right == None:\n\n return 1\n\n lln = rln = 0\n\n if self.left:\n\n lln = self.left.leafNodes()\n\n if self.right:\n\n rln = self.right.leafNodes()\n\n return lln + rln\n\n def fullNodes(self):\n\n if self==None:\n\n return 0\n\n arr = [self]\n\n count = 0\n\n while len(arr):\n\n node = arr[0]\n\n if node.left:\n\n arr.append(node.left)\n\n if node.right:\n\n arr.append(node.right)\n\n if node.left and node.right:\n\n count+=1\n\n arr = arr[1:]\n\n return count\n\n def halfNodes(self):\n\n if self==None:\n\n return 0\n\n arr = [self]\n\n count = 0\n\n while len(arr):\n\n node = arr[0]\n\n if node.left:\n\n arr.append(node.left)\n\n if node.right:\n\n arr.append(node.right)\n\n if (node.left==None and node.right) or (node.left and node.right==None):\n\n count+=1\n\n arr = arr[1:]\n\n return count\n\n def allPaths(self, path=[0]*1000, pathlen=0):\n\n if self == None:\n\n return\n\n path[pathlen] = self.data\n\n pathlen+=1\n\n if self.left == None and self.right == None:\n\n for i in range(pathlen-1):\n\n print(path[i], end='->')\n\n print(path[pathlen])\n\n return\n\n if self.left:\n\n self.left.allPaths(path, pathlen)\n\n if self.right:\n\n self.right.allPaths(path, pathlen)\n \n def sum(self):\n\n if self == None:\n\n return 0\n\n ls = rs = 0\n\n if self.left:\n\n ls = self.left.sum()\n\n if self.right:\n\n rs = self.right.sum()\n\n return self.data+ls+rs\n\n def delete(self):\n\n arr = [self]\n\n while len(arr):\n\n node = arr[0]\n\n if node.left:\n\n arr.append(node.left)\n\n if node.right:\n\n arr.append(node.right)\n\n temp = arr[-1]\n\n arr = arr[1:]\n\n temp = None",
"step-ids": [
12,
15,
18,
19,
22
]
}
|
[
12,
15,
18,
19,
22
] |
#!/bin/env python
# coding: utf-8
"""
Dakara Online protocol generator, by Alejandro Santos
"""
from genpackets import *
from gendefs_js import *
BUILDERS = []
HANDLERS = []
DECODE_DISPATCH = []
ARGS_HANDLER = []
def write_packets_from(f, fph, base_name, namespace, P):
# Enum with IDs
if base_name != "ServerPacket" :
f.write("""var {base_name}ID = {{ \n""".format(base_name=base_name))
for i, x in enumerate(P):
if x:
f.write(" {name} : {packet_id}".format(base_name=base_name, name=x.name, packet_id=i))
f.write(",\n")
f.write(""" {base_name}ID_PACKET_COUNT : {packet_id}\n}};\n""".format(base_name=base_name, packet_id=len(P)))
# Factory
'''
f.write("""
function {base_name}Factory(buffer) {{
if (buffer.length() < 1) return 0;
var p;
PacketID = buffer.PeekByte();
switch (PacketID) {{
""".format(base_name=base_name))
for i, x in enumerate(P):
if not x: continue
f.write("""
case {i}:
p = new {name}(buffer);
break;
""".format(i=i, name=x.name))
f.write("""
}}
return p;
}}
""".format())
'''
for i, x in enumerate(P):
if not x: continue
header_fields = []
header_fields_signature = []
items_assign_e = []
items_assign_build = []
ctor_fields = ""
min_byte_count = 0
ctor_fields_bytequeue = ""
parametros_fields = ""
parametros_args = ""
serialize_fields = ""
if x.name == "MultiMessage":
escribir_multimessage(f)
continue
for y in x.args:
arg_name = y[0]
arg_type = y[1] & 0xff
arg_type_str = TYPE_TO_STR[arg_type]
arg_type_sig_str = TYPE_TO_SIGNATURE_STR[arg_type]
arg_is_array = ((y[1] & TYPE_ARRAY) == TYPE_ARRAY)
type_reader_name = TYPE_TO_READER_NAME[arg_type]
type_writer_name = TYPE_TO_WRITER_NAME[arg_type]
ctor_fields += ", " + arg_name + "()"
items_assign_e.append(" {arg_name}: {arg_name},".format(arg_name=arg_name))
items_assign_build.append(" e.{arg_name}= {arg_name};".format(arg_name=arg_name))
if arg_is_array:
array_size=y[2]
min_byte_count += TYPE_SIZE[arg_type] * array_size
header_fields.append(" {arg_name}; ".format(arg_type_str=arg_type_str, arg_name=arg_name, array_size=array_size))
header_fields_signature.append("{arg_name} ".format(arg_type_str=arg_type_sig_str, arg_name=arg_name, array_size=array_size))
ctor_fields_bytequeue += x.get_ctor_fields_bytequeue_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name, array_size=array_size)
parametros_fields += x.get_parametros_fields_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name, array_size=array_size)
parametros_args += x.get_parametros_args_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name, array_size=array_size)
serialize_fields += x.get_serialize_fields_fmt(arg_is_array).format(arg_name=arg_name, type_writer_name=type_writer_name, array_size=array_size)
else:
min_byte_count += TYPE_SIZE[arg_type]
header_fields.append(" {arg_type_str} {arg_name}; ".format(arg_type_str=arg_type_str, arg_name=arg_name))
header_fields_signature.append("{arg_type_str} {arg_name}".format(arg_type_str=arg_type_sig_str, arg_name=arg_name))
ctor_fields_bytequeue += x.get_ctor_fields_bytequeue_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name)
parametros_fields += x.get_parametros_fields_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name)
parametros_args += x.get_parametros_args_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name)
serialize_fields += x.get_serialize_fields_fmt(arg_is_array).format(arg_name=arg_name, type_writer_name=type_writer_name)
format_args = {
'base_name': base_name,
'name': x.name,
'header_fields': '\n'.join(header_fields),
'header_fields_signature': ', '.join(header_fields_signature),
'items_assign_e': '\n'.join(items_assign_e),
'items_assign_build': '\n'.join(items_assign_build),
'ctor_fields': ctor_fields,
'packet_id': i,
'min_byte_count': min_byte_count,
'ctor_fields_bytequeue': ctor_fields_bytequeue,
'serialize_fields': serialize_fields,
'parametros_fields' : parametros_fields,
'parametros_args' : parametros_args
}
# Individual packet header
if base_name != "ServerPacket" :
f.write(x.get_header_fmt().format(**format_args))
BUILDERS.append(x.get_builder_fmt().format(**format_args))
if base_name == "ServerPacket" :
HANDLERS.append(x.get_handler_fmt().format(**format_args))
#para el serverpacketdecodeanddispatch (sin tener que crear packetes)
if base_name == "ServerPacket" :
dec_dispatch = x.get_parametros_fmt().format(**format_args);
#le saco la ultima coma si es que tiene:
pos = dec_dispatch.rfind(",")
if pos > 0:
dec_dispatch = dec_dispatch[:pos] + dec_dispatch[pos+1:]
DECODE_DISPATCH.append(dec_dispatch)
if base_name == "ServerPacket" :
args_handler = x.get_argumentosHandler_fmt().format(**format_args);
#le saco la ultima coma si es que tiene:
pos = args_handler.rfind(",")
if pos > 0:
args_handler = args_handler[:pos] + args_handler[pos+1:]
#le saco fin de linea
pos = args_handler.rfind("\n")
args_handler = args_handler[:pos] + args_handler[pos+1:]
ARGS_HANDLER.append(args_handler)
# Decode and Dispatch, keeping the Packet in the stack
# Suggested by hmk
if base_name == "ServerPacket" :
f.write("""
function {base_name}DecodeAndDispatch(buffer, handler) {{
if (buffer.length() < 1) return;
var PacketID = buffer.ReadByte();
switch (PacketID) {{
""".format(base_name=base_name))
for i, x in enumerate(P):
if not x: continue
f.write("""
case {i}:
{{
{decode_dispatch}
break;
}}
""".format(i=i, decode_dispatch=DECODE_DISPATCH.pop(0)))
f.write("""
default:
{{
msg = "error decoding packet id: " + PacketID;
throw new Error(msg);
}}
}}
}}
""".format())
fph.write("""
/** ESTE ARCHIVO SOLO ESTA PARA FACILITAR ESCRIBIR LOS HANLDLES POR PRIMERA VEZ, NO TINENE NINGUN USO ***************************************************************************************************************************************************/
""".format(base_name=base_name))
for i, x in enumerate(P):
if not x: continue
fph.write("""\n\thandle{name}: function ({arg_handler}){{ \n""".format(base_name=base_name, name=x.name, arg_handler = ARGS_HANDLER.pop(0)))
#fph.write(HANDLERS.pop(0))
fph.write("""\t\tlog.network("TODO: handle{name} ");\n\t}},\n""".format(base_name=base_name, name=x.name))
for i, x in enumerate(P):
if not x: continue
#fph.write("""\n\thandle{name}: function (p){{ \n""".format(base_name=base_name, name=x.name))
#fph.write(HANDLERS.pop(0))
#fph.write("""\t\talert("TODO: handle{name} ");\n\t}},\n""".format(base_name=base_name, name=x.name))
fph.write("""
/** ESTE ARCHIVO SOLO ESTA PARA FACILITAR ESCRIBIR LOS HANLDLES POR PRIMERA VEZ, NO TINENE NINGUN USO ***************************************************************************************************************************************************/
""")
def write_packets():
f = open("protocol.js", "w")
fph = open("protocolhandlerAux.js", "w")
f.write("""
/* Automatically generated file */
define(['enums'], function (Enums) {
""")
write_packets_from(f,fph, "ClientPacket", "client", CLIENT_PACKETS)
write_packets_from(f,fph, "ClientGMPacket", "clientgm", CLIENT_GM_PACKETS)
write_packets_from(f,fph, "ServerPacket", "server", SERVER_PACKETS)
#Multimessages hardcodeado: // TODO ; hacerlo bien
f.write("""
class Protocolo{
""")
for builder in BUILDERS:
f.write(builder)
f.write("""
ServerPacketDecodeAndDispatch(buffer, handler){
ServerPacketDecodeAndDispatch(buffer, handler);
}
""")
f.write("""
}
return Protocolo;
}); """)
f.close()
fph.close()
def escribir_multimessage(f):
DECODE_DISPATCH.append('''
var msgIdx = buffer.ReadByte();
switch (msgIdx) {
case Enums.eMessage.NPCHitUser:
handler.handleNPCHitUser(buffer.ReadByte(), buffer.ReadInteger());
break;
case Enums.eMessage.UserHitNPC:
handler.handleUserHitNPC(buffer.ReadLong());
break;
case Enums.eMessage.UserAttackedSwing:
handler.handleUserAttackedSwing(buffer.ReadInteger());
break;
case Enums.eMessage.UserHittedByUser:
handler.handleUserHittedByUser(buffer.ReadInteger(), buffer.ReadByte(), buffer.ReadInteger());
break;
case Enums.eMessage.UserHittedUser:
handler.handleUserHittedUser(buffer.ReadInteger(), buffer.ReadByte(), buffer.ReadInteger());
break;
case Enums.eMessage.WorkRequestTarget:
handler.handleWorkRequestTarget(buffer.ReadByte());
break;
case Enums.eMessage.HaveKilledUser:
handler.handleHaveKilledUser(buffer.ReadInteger(),buffer.ReadLong());
break;
case Enums.eMessage.UserKill:
handler.handleUserKill(buffer.ReadInteger());
break;
case Enums.eMessage.Home:
handler.handleHome(buffer.ReadByte(),buffer.ReadInteger(),buffer.ReadUnicodeString());
break;
case Enums.eMessage.DontSeeAnything:
handler.handleDontSeeAnything();
break;
case Enums.eMessage.NPCSwing:
handler.handleNPCSwing();
break;
case Enums.eMessage.NPCKillUser:
handler.handleNPCKillUser();
break;
case Enums.eMessage.BlockedWithShieldUser:
handler.handleBlockedWithShieldUser();
break;
case Enums.eMessage.BlockedWithShieldOther:
handler.handleBlockedWithShieldOther();
break;
case Enums.eMessage.UserSwing:
handler.handleUserSwing();
break;
case Enums.eMessage.SafeModeOn:
handler.handleSafeModeOn();
break;
case Enums.eMessage.SafeModeOff:
handler.handleSafeModeOff();
break;
case Enums.eMessage.ResuscitationSafeOff:
handler.handleResuscitationSafeOff();
break;
case Enums.eMessage.ResuscitationSafeOn:
handler.handleResuscitationSafeOn();
break;
case Enums.eMessage.NobilityLost:
handler.handleNobilityLost();
break;
case Enums.eMessage.CantUseWhileMeditating:
handler.handleCantUseWhileMeditating();
break;
case Enums.eMessage.EarnExp:
handler.handleEarnExp();
break;
case Enums.eMessage.FinishHome:
handler.handleFinishHome();
break;
case Enums.eMessage.CancelHome:
handler.handleCancelHome();
break;
default:
throw new Error("Multimessage: " + msgIdx + " no reconocido por el protocolo");
}
''')
ARGS_HANDLER.append("msgIdx,args")
def main():
write_packets()
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "22dccf6bb76dab735f373089d0772f475b2d5a5d",
"index": 6849,
"step-1": "<mask token>\n\n\ndef write_packets_from(f, fph, base_name, namespace, P):\n if base_name != 'ServerPacket':\n f.write('var {base_name}ID = {{ \\n'.format(base_name=base_name))\n for i, x in enumerate(P):\n if x:\n f.write(' {name} : {packet_id}'.format(base_name=\n base_name, name=x.name, packet_id=i))\n f.write(',\\n')\n f.write(' {base_name}ID_PACKET_COUNT : {packet_id}\\n}};\\n'.\n format(base_name=base_name, packet_id=len(P)))\n \"\"\"\n f.write(\"\"\\\"\nfunction {base_name}Factory(buffer) {{\n if (buffer.length() < 1) return 0;\n var p;\n PacketID = buffer.PeekByte();\n\n switch (PacketID) {{\n\"\"\\\".format(base_name=base_name))\n\n for i, x in enumerate(P):\n if not x: continue\n f.write(\"\"\\\"\n case {i}:\n p = new {name}(buffer);\n break;\n\"\"\\\".format(i=i, name=x.name))\n\n f.write(\"\"\\\"\n }}\n return p;\n}}\n\"\"\\\".format())\n \"\"\"\n for i, x in enumerate(P):\n if not x:\n continue\n header_fields = []\n header_fields_signature = []\n items_assign_e = []\n items_assign_build = []\n ctor_fields = ''\n min_byte_count = 0\n ctor_fields_bytequeue = ''\n parametros_fields = ''\n parametros_args = ''\n serialize_fields = ''\n if x.name == 'MultiMessage':\n escribir_multimessage(f)\n continue\n for y in x.args:\n arg_name = y[0]\n arg_type = y[1] & 255\n arg_type_str = TYPE_TO_STR[arg_type]\n arg_type_sig_str = TYPE_TO_SIGNATURE_STR[arg_type]\n arg_is_array = y[1] & TYPE_ARRAY == TYPE_ARRAY\n type_reader_name = TYPE_TO_READER_NAME[arg_type]\n type_writer_name = TYPE_TO_WRITER_NAME[arg_type]\n ctor_fields += ', ' + arg_name + '()'\n items_assign_e.append(' {arg_name}: {arg_name},'.\n format(arg_name=arg_name))\n items_assign_build.append(' e.{arg_name}= {arg_name};'.\n format(arg_name=arg_name))\n if arg_is_array:\n array_size = y[2]\n min_byte_count += TYPE_SIZE[arg_type] * array_size\n header_fields.append(' {arg_name}; '.format(arg_type_str\n =arg_type_str, arg_name=arg_name, array_size=array_size))\n header_fields_signature.append('{arg_name} '.format(\n arg_type_str=arg_type_sig_str, arg_name=arg_name,\n array_size=array_size))\n ctor_fields_bytequeue += x.get_ctor_fields_bytequeue_fmt(\n arg_is_array).format(arg_name=arg_name,\n type_reader_name=type_reader_name, array_size=array_size)\n parametros_fields += x.get_parametros_fields_fmt(arg_is_array\n ).format(arg_name=arg_name, type_reader_name=\n type_reader_name, array_size=array_size)\n parametros_args += x.get_parametros_args_fmt(arg_is_array\n ).format(arg_name=arg_name, type_reader_name=\n type_reader_name, array_size=array_size)\n serialize_fields += x.get_serialize_fields_fmt(arg_is_array\n ).format(arg_name=arg_name, type_writer_name=\n type_writer_name, array_size=array_size)\n else:\n min_byte_count += TYPE_SIZE[arg_type]\n header_fields.append(' {arg_type_str} {arg_name}; '.\n format(arg_type_str=arg_type_str, arg_name=arg_name))\n header_fields_signature.append('{arg_type_str} {arg_name}'.\n format(arg_type_str=arg_type_sig_str, arg_name=arg_name))\n ctor_fields_bytequeue += x.get_ctor_fields_bytequeue_fmt(\n arg_is_array).format(arg_name=arg_name,\n type_reader_name=type_reader_name)\n parametros_fields += x.get_parametros_fields_fmt(arg_is_array\n ).format(arg_name=arg_name, type_reader_name=\n type_reader_name)\n parametros_args += x.get_parametros_args_fmt(arg_is_array\n ).format(arg_name=arg_name, type_reader_name=\n type_reader_name)\n serialize_fields += x.get_serialize_fields_fmt(arg_is_array\n ).format(arg_name=arg_name, type_writer_name=\n type_writer_name)\n format_args = {'base_name': base_name, 'name': x.name,\n 'header_fields': '\\n'.join(header_fields),\n 'header_fields_signature': ', '.join(header_fields_signature),\n 'items_assign_e': '\\n'.join(items_assign_e),\n 'items_assign_build': '\\n'.join(items_assign_build),\n 'ctor_fields': ctor_fields, 'packet_id': i, 'min_byte_count':\n min_byte_count, 'ctor_fields_bytequeue': ctor_fields_bytequeue,\n 'serialize_fields': serialize_fields, 'parametros_fields':\n parametros_fields, 'parametros_args': parametros_args}\n if base_name != 'ServerPacket':\n f.write(x.get_header_fmt().format(**format_args))\n BUILDERS.append(x.get_builder_fmt().format(**format_args))\n if base_name == 'ServerPacket':\n HANDLERS.append(x.get_handler_fmt().format(**format_args))\n if base_name == 'ServerPacket':\n dec_dispatch = x.get_parametros_fmt().format(**format_args)\n pos = dec_dispatch.rfind(',')\n if pos > 0:\n dec_dispatch = dec_dispatch[:pos] + dec_dispatch[pos + 1:]\n DECODE_DISPATCH.append(dec_dispatch)\n if base_name == 'ServerPacket':\n args_handler = x.get_argumentosHandler_fmt().format(**format_args)\n pos = args_handler.rfind(',')\n if pos > 0:\n args_handler = args_handler[:pos] + args_handler[pos + 1:]\n pos = args_handler.rfind('\\n')\n args_handler = args_handler[:pos] + args_handler[pos + 1:]\n ARGS_HANDLER.append(args_handler)\n if base_name == 'ServerPacket':\n f.write(\n \"\"\"\nfunction {base_name}DecodeAndDispatch(buffer, handler) {{\n if (buffer.length() < 1) return;\n var PacketID = buffer.ReadByte();\n\n switch (PacketID) {{\n\"\"\"\n .format(base_name=base_name))\n for i, x in enumerate(P):\n if not x:\n continue\n f.write(\n \"\"\"\n case {i}:\n {{\n {decode_dispatch}\n break;\n }}\n\"\"\"\n .format(i=i, decode_dispatch=DECODE_DISPATCH.pop(0)))\n f.write(\n \"\"\"\n default:\n {{\n msg = \"error decoding packet id: \" + PacketID;\n throw new Error(msg);\n }}\n }}\n}}\n\"\"\"\n .format())\n fph.write(\n \"\"\"\n/** ESTE ARCHIVO SOLO ESTA PARA FACILITAR ESCRIBIR LOS HANLDLES POR PRIMERA VEZ, NO TINENE NINGUN USO ***************************************************************************************************************************************************/\n\"\"\"\n .format(base_name=base_name))\n for i, x in enumerate(P):\n if not x:\n continue\n fph.write('\\n\\thandle{name}: function ({arg_handler}){{ \\n'.\n format(base_name=base_name, name=x.name, arg_handler=\n ARGS_HANDLER.pop(0)))\n fph.write('\\t\\tlog.network(\"TODO: handle{name} \");\\n\\t}},\\n'.\n format(base_name=base_name, name=x.name))\n for i, x in enumerate(P):\n if not x:\n continue\n fph.write(\n \"\"\"\n/** ESTE ARCHIVO SOLO ESTA PARA FACILITAR ESCRIBIR LOS HANLDLES POR PRIMERA VEZ, NO TINENE NINGUN USO ***************************************************************************************************************************************************/\n\"\"\"\n )\n\n\ndef write_packets():\n f = open('protocol.js', 'w')\n fph = open('protocolhandlerAux.js', 'w')\n f.write(\n \"\\n/* Automatically generated file */\\n\\ndefine(['enums'], function (Enums) {\\n\"\n )\n write_packets_from(f, fph, 'ClientPacket', 'client', CLIENT_PACKETS)\n write_packets_from(f, fph, 'ClientGMPacket', 'clientgm', CLIENT_GM_PACKETS)\n write_packets_from(f, fph, 'ServerPacket', 'server', SERVER_PACKETS)\n f.write(\"\"\"\n class Protocolo{\n\"\"\")\n for builder in BUILDERS:\n f.write(builder)\n f.write(\n \"\"\"\n ServerPacketDecodeAndDispatch(buffer, handler){\n ServerPacketDecodeAndDispatch(buffer, handler);\n }\n \"\"\"\n )\n f.write('\\n }\\n\\n return Protocolo;\\n}); ')\n f.close()\n fph.close()\n\n\ndef escribir_multimessage(f):\n DECODE_DISPATCH.append(\n \"\"\"\n\n var msgIdx = buffer.ReadByte();\n switch (msgIdx) {\n\n case Enums.eMessage.NPCHitUser:\n handler.handleNPCHitUser(buffer.ReadByte(), buffer.ReadInteger());\n break;\n\n case Enums.eMessage.UserHitNPC:\n handler.handleUserHitNPC(buffer.ReadLong());\n break;\n\n case Enums.eMessage.UserAttackedSwing:\n handler.handleUserAttackedSwing(buffer.ReadInteger());\n break;\n\n case Enums.eMessage.UserHittedByUser:\n handler.handleUserHittedByUser(buffer.ReadInteger(), buffer.ReadByte(), buffer.ReadInteger());\n break;\n\n case Enums.eMessage.UserHittedUser:\n handler.handleUserHittedUser(buffer.ReadInteger(), buffer.ReadByte(), buffer.ReadInteger());\n break;\n\n case Enums.eMessage.WorkRequestTarget:\n handler.handleWorkRequestTarget(buffer.ReadByte());\n break;\n\n case Enums.eMessage.HaveKilledUser:\n handler.handleHaveKilledUser(buffer.ReadInteger(),buffer.ReadLong());\n break;\n\n case Enums.eMessage.UserKill:\n handler.handleUserKill(buffer.ReadInteger());\n break;\n\n case Enums.eMessage.Home:\n handler.handleHome(buffer.ReadByte(),buffer.ReadInteger(),buffer.ReadUnicodeString());\n break;\n\n case Enums.eMessage.DontSeeAnything:\n handler.handleDontSeeAnything();\n break;\n\n case Enums.eMessage.NPCSwing:\n\n handler.handleNPCSwing();\n break;\n\n case Enums.eMessage.NPCKillUser:\n\n handler.handleNPCKillUser();\n break;\n\n case Enums.eMessage.BlockedWithShieldUser:\n\n handler.handleBlockedWithShieldUser();\n break;\n\n case Enums.eMessage.BlockedWithShieldOther:\n\n handler.handleBlockedWithShieldOther();\n break;\n\n case Enums.eMessage.UserSwing:\n\n handler.handleUserSwing();\n break;\n\n case Enums.eMessage.SafeModeOn:\n\n handler.handleSafeModeOn();\n break;\n\n case Enums.eMessage.SafeModeOff:\n\n handler.handleSafeModeOff();\n break;\n\n case Enums.eMessage.ResuscitationSafeOff:\n\n handler.handleResuscitationSafeOff();\n break;\n\n case Enums.eMessage.ResuscitationSafeOn:\n\n handler.handleResuscitationSafeOn();\n break;\n\n case Enums.eMessage.NobilityLost:\n\n handler.handleNobilityLost();\n break;\n\n case Enums.eMessage.CantUseWhileMeditating:\n\n handler.handleCantUseWhileMeditating();\n break;\n\n case Enums.eMessage.EarnExp:\n\n handler.handleEarnExp();\n break;\n\n case Enums.eMessage.FinishHome:\n\n handler.handleFinishHome();\n break;\n\n case Enums.eMessage.CancelHome:\n\n handler.handleCancelHome();\n break;\n\n default:\n throw new Error(\"Multimessage: \" + msgIdx + \" no reconocido por el protocolo\");\n }\n\"\"\"\n )\n ARGS_HANDLER.append('msgIdx,args')\n\n\ndef main():\n write_packets()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef write_packets_from(f, fph, base_name, namespace, P):\n if base_name != 'ServerPacket':\n f.write('var {base_name}ID = {{ \\n'.format(base_name=base_name))\n for i, x in enumerate(P):\n if x:\n f.write(' {name} : {packet_id}'.format(base_name=\n base_name, name=x.name, packet_id=i))\n f.write(',\\n')\n f.write(' {base_name}ID_PACKET_COUNT : {packet_id}\\n}};\\n'.\n format(base_name=base_name, packet_id=len(P)))\n \"\"\"\n f.write(\"\"\\\"\nfunction {base_name}Factory(buffer) {{\n if (buffer.length() < 1) return 0;\n var p;\n PacketID = buffer.PeekByte();\n\n switch (PacketID) {{\n\"\"\\\".format(base_name=base_name))\n\n for i, x in enumerate(P):\n if not x: continue\n f.write(\"\"\\\"\n case {i}:\n p = new {name}(buffer);\n break;\n\"\"\\\".format(i=i, name=x.name))\n\n f.write(\"\"\\\"\n }}\n return p;\n}}\n\"\"\\\".format())\n \"\"\"\n for i, x in enumerate(P):\n if not x:\n continue\n header_fields = []\n header_fields_signature = []\n items_assign_e = []\n items_assign_build = []\n ctor_fields = ''\n min_byte_count = 0\n ctor_fields_bytequeue = ''\n parametros_fields = ''\n parametros_args = ''\n serialize_fields = ''\n if x.name == 'MultiMessage':\n escribir_multimessage(f)\n continue\n for y in x.args:\n arg_name = y[0]\n arg_type = y[1] & 255\n arg_type_str = TYPE_TO_STR[arg_type]\n arg_type_sig_str = TYPE_TO_SIGNATURE_STR[arg_type]\n arg_is_array = y[1] & TYPE_ARRAY == TYPE_ARRAY\n type_reader_name = TYPE_TO_READER_NAME[arg_type]\n type_writer_name = TYPE_TO_WRITER_NAME[arg_type]\n ctor_fields += ', ' + arg_name + '()'\n items_assign_e.append(' {arg_name}: {arg_name},'.\n format(arg_name=arg_name))\n items_assign_build.append(' e.{arg_name}= {arg_name};'.\n format(arg_name=arg_name))\n if arg_is_array:\n array_size = y[2]\n min_byte_count += TYPE_SIZE[arg_type] * array_size\n header_fields.append(' {arg_name}; '.format(arg_type_str\n =arg_type_str, arg_name=arg_name, array_size=array_size))\n header_fields_signature.append('{arg_name} '.format(\n arg_type_str=arg_type_sig_str, arg_name=arg_name,\n array_size=array_size))\n ctor_fields_bytequeue += x.get_ctor_fields_bytequeue_fmt(\n arg_is_array).format(arg_name=arg_name,\n type_reader_name=type_reader_name, array_size=array_size)\n parametros_fields += x.get_parametros_fields_fmt(arg_is_array\n ).format(arg_name=arg_name, type_reader_name=\n type_reader_name, array_size=array_size)\n parametros_args += x.get_parametros_args_fmt(arg_is_array\n ).format(arg_name=arg_name, type_reader_name=\n type_reader_name, array_size=array_size)\n serialize_fields += x.get_serialize_fields_fmt(arg_is_array\n ).format(arg_name=arg_name, type_writer_name=\n type_writer_name, array_size=array_size)\n else:\n min_byte_count += TYPE_SIZE[arg_type]\n header_fields.append(' {arg_type_str} {arg_name}; '.\n format(arg_type_str=arg_type_str, arg_name=arg_name))\n header_fields_signature.append('{arg_type_str} {arg_name}'.\n format(arg_type_str=arg_type_sig_str, arg_name=arg_name))\n ctor_fields_bytequeue += x.get_ctor_fields_bytequeue_fmt(\n arg_is_array).format(arg_name=arg_name,\n type_reader_name=type_reader_name)\n parametros_fields += x.get_parametros_fields_fmt(arg_is_array\n ).format(arg_name=arg_name, type_reader_name=\n type_reader_name)\n parametros_args += x.get_parametros_args_fmt(arg_is_array\n ).format(arg_name=arg_name, type_reader_name=\n type_reader_name)\n serialize_fields += x.get_serialize_fields_fmt(arg_is_array\n ).format(arg_name=arg_name, type_writer_name=\n type_writer_name)\n format_args = {'base_name': base_name, 'name': x.name,\n 'header_fields': '\\n'.join(header_fields),\n 'header_fields_signature': ', '.join(header_fields_signature),\n 'items_assign_e': '\\n'.join(items_assign_e),\n 'items_assign_build': '\\n'.join(items_assign_build),\n 'ctor_fields': ctor_fields, 'packet_id': i, 'min_byte_count':\n min_byte_count, 'ctor_fields_bytequeue': ctor_fields_bytequeue,\n 'serialize_fields': serialize_fields, 'parametros_fields':\n parametros_fields, 'parametros_args': parametros_args}\n if base_name != 'ServerPacket':\n f.write(x.get_header_fmt().format(**format_args))\n BUILDERS.append(x.get_builder_fmt().format(**format_args))\n if base_name == 'ServerPacket':\n HANDLERS.append(x.get_handler_fmt().format(**format_args))\n if base_name == 'ServerPacket':\n dec_dispatch = x.get_parametros_fmt().format(**format_args)\n pos = dec_dispatch.rfind(',')\n if pos > 0:\n dec_dispatch = dec_dispatch[:pos] + dec_dispatch[pos + 1:]\n DECODE_DISPATCH.append(dec_dispatch)\n if base_name == 'ServerPacket':\n args_handler = x.get_argumentosHandler_fmt().format(**format_args)\n pos = args_handler.rfind(',')\n if pos > 0:\n args_handler = args_handler[:pos] + args_handler[pos + 1:]\n pos = args_handler.rfind('\\n')\n args_handler = args_handler[:pos] + args_handler[pos + 1:]\n ARGS_HANDLER.append(args_handler)\n if base_name == 'ServerPacket':\n f.write(\n \"\"\"\nfunction {base_name}DecodeAndDispatch(buffer, handler) {{\n if (buffer.length() < 1) return;\n var PacketID = buffer.ReadByte();\n\n switch (PacketID) {{\n\"\"\"\n .format(base_name=base_name))\n for i, x in enumerate(P):\n if not x:\n continue\n f.write(\n \"\"\"\n case {i}:\n {{\n {decode_dispatch}\n break;\n }}\n\"\"\"\n .format(i=i, decode_dispatch=DECODE_DISPATCH.pop(0)))\n f.write(\n \"\"\"\n default:\n {{\n msg = \"error decoding packet id: \" + PacketID;\n throw new Error(msg);\n }}\n }}\n}}\n\"\"\"\n .format())\n fph.write(\n \"\"\"\n/** ESTE ARCHIVO SOLO ESTA PARA FACILITAR ESCRIBIR LOS HANLDLES POR PRIMERA VEZ, NO TINENE NINGUN USO ***************************************************************************************************************************************************/\n\"\"\"\n .format(base_name=base_name))\n for i, x in enumerate(P):\n if not x:\n continue\n fph.write('\\n\\thandle{name}: function ({arg_handler}){{ \\n'.\n format(base_name=base_name, name=x.name, arg_handler=\n ARGS_HANDLER.pop(0)))\n fph.write('\\t\\tlog.network(\"TODO: handle{name} \");\\n\\t}},\\n'.\n format(base_name=base_name, name=x.name))\n for i, x in enumerate(P):\n if not x:\n continue\n fph.write(\n \"\"\"\n/** ESTE ARCHIVO SOLO ESTA PARA FACILITAR ESCRIBIR LOS HANLDLES POR PRIMERA VEZ, NO TINENE NINGUN USO ***************************************************************************************************************************************************/\n\"\"\"\n )\n\n\ndef write_packets():\n f = open('protocol.js', 'w')\n fph = open('protocolhandlerAux.js', 'w')\n f.write(\n \"\\n/* Automatically generated file */\\n\\ndefine(['enums'], function (Enums) {\\n\"\n )\n write_packets_from(f, fph, 'ClientPacket', 'client', CLIENT_PACKETS)\n write_packets_from(f, fph, 'ClientGMPacket', 'clientgm', CLIENT_GM_PACKETS)\n write_packets_from(f, fph, 'ServerPacket', 'server', SERVER_PACKETS)\n f.write(\"\"\"\n class Protocolo{\n\"\"\")\n for builder in BUILDERS:\n f.write(builder)\n f.write(\n \"\"\"\n ServerPacketDecodeAndDispatch(buffer, handler){\n ServerPacketDecodeAndDispatch(buffer, handler);\n }\n \"\"\"\n )\n f.write('\\n }\\n\\n return Protocolo;\\n}); ')\n f.close()\n fph.close()\n\n\ndef escribir_multimessage(f):\n DECODE_DISPATCH.append(\n \"\"\"\n\n var msgIdx = buffer.ReadByte();\n switch (msgIdx) {\n\n case Enums.eMessage.NPCHitUser:\n handler.handleNPCHitUser(buffer.ReadByte(), buffer.ReadInteger());\n break;\n\n case Enums.eMessage.UserHitNPC:\n handler.handleUserHitNPC(buffer.ReadLong());\n break;\n\n case Enums.eMessage.UserAttackedSwing:\n handler.handleUserAttackedSwing(buffer.ReadInteger());\n break;\n\n case Enums.eMessage.UserHittedByUser:\n handler.handleUserHittedByUser(buffer.ReadInteger(), buffer.ReadByte(), buffer.ReadInteger());\n break;\n\n case Enums.eMessage.UserHittedUser:\n handler.handleUserHittedUser(buffer.ReadInteger(), buffer.ReadByte(), buffer.ReadInteger());\n break;\n\n case Enums.eMessage.WorkRequestTarget:\n handler.handleWorkRequestTarget(buffer.ReadByte());\n break;\n\n case Enums.eMessage.HaveKilledUser:\n handler.handleHaveKilledUser(buffer.ReadInteger(),buffer.ReadLong());\n break;\n\n case Enums.eMessage.UserKill:\n handler.handleUserKill(buffer.ReadInteger());\n break;\n\n case Enums.eMessage.Home:\n handler.handleHome(buffer.ReadByte(),buffer.ReadInteger(),buffer.ReadUnicodeString());\n break;\n\n case Enums.eMessage.DontSeeAnything:\n handler.handleDontSeeAnything();\n break;\n\n case Enums.eMessage.NPCSwing:\n\n handler.handleNPCSwing();\n break;\n\n case Enums.eMessage.NPCKillUser:\n\n handler.handleNPCKillUser();\n break;\n\n case Enums.eMessage.BlockedWithShieldUser:\n\n handler.handleBlockedWithShieldUser();\n break;\n\n case Enums.eMessage.BlockedWithShieldOther:\n\n handler.handleBlockedWithShieldOther();\n break;\n\n case Enums.eMessage.UserSwing:\n\n handler.handleUserSwing();\n break;\n\n case Enums.eMessage.SafeModeOn:\n\n handler.handleSafeModeOn();\n break;\n\n case Enums.eMessage.SafeModeOff:\n\n handler.handleSafeModeOff();\n break;\n\n case Enums.eMessage.ResuscitationSafeOff:\n\n handler.handleResuscitationSafeOff();\n break;\n\n case Enums.eMessage.ResuscitationSafeOn:\n\n handler.handleResuscitationSafeOn();\n break;\n\n case Enums.eMessage.NobilityLost:\n\n handler.handleNobilityLost();\n break;\n\n case Enums.eMessage.CantUseWhileMeditating:\n\n handler.handleCantUseWhileMeditating();\n break;\n\n case Enums.eMessage.EarnExp:\n\n handler.handleEarnExp();\n break;\n\n case Enums.eMessage.FinishHome:\n\n handler.handleFinishHome();\n break;\n\n case Enums.eMessage.CancelHome:\n\n handler.handleCancelHome();\n break;\n\n default:\n throw new Error(\"Multimessage: \" + msgIdx + \" no reconocido por el protocolo\");\n }\n\"\"\"\n )\n ARGS_HANDLER.append('msgIdx,args')\n\n\ndef main():\n write_packets()\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\nBUILDERS = []\nHANDLERS = []\nDECODE_DISPATCH = []\nARGS_HANDLER = []\n\n\ndef write_packets_from(f, fph, base_name, namespace, P):\n if base_name != 'ServerPacket':\n f.write('var {base_name}ID = {{ \\n'.format(base_name=base_name))\n for i, x in enumerate(P):\n if x:\n f.write(' {name} : {packet_id}'.format(base_name=\n base_name, name=x.name, packet_id=i))\n f.write(',\\n')\n f.write(' {base_name}ID_PACKET_COUNT : {packet_id}\\n}};\\n'.\n format(base_name=base_name, packet_id=len(P)))\n \"\"\"\n f.write(\"\"\\\"\nfunction {base_name}Factory(buffer) {{\n if (buffer.length() < 1) return 0;\n var p;\n PacketID = buffer.PeekByte();\n\n switch (PacketID) {{\n\"\"\\\".format(base_name=base_name))\n\n for i, x in enumerate(P):\n if not x: continue\n f.write(\"\"\\\"\n case {i}:\n p = new {name}(buffer);\n break;\n\"\"\\\".format(i=i, name=x.name))\n\n f.write(\"\"\\\"\n }}\n return p;\n}}\n\"\"\\\".format())\n \"\"\"\n for i, x in enumerate(P):\n if not x:\n continue\n header_fields = []\n header_fields_signature = []\n items_assign_e = []\n items_assign_build = []\n ctor_fields = ''\n min_byte_count = 0\n ctor_fields_bytequeue = ''\n parametros_fields = ''\n parametros_args = ''\n serialize_fields = ''\n if x.name == 'MultiMessage':\n escribir_multimessage(f)\n continue\n for y in x.args:\n arg_name = y[0]\n arg_type = y[1] & 255\n arg_type_str = TYPE_TO_STR[arg_type]\n arg_type_sig_str = TYPE_TO_SIGNATURE_STR[arg_type]\n arg_is_array = y[1] & TYPE_ARRAY == TYPE_ARRAY\n type_reader_name = TYPE_TO_READER_NAME[arg_type]\n type_writer_name = TYPE_TO_WRITER_NAME[arg_type]\n ctor_fields += ', ' + arg_name + '()'\n items_assign_e.append(' {arg_name}: {arg_name},'.\n format(arg_name=arg_name))\n items_assign_build.append(' e.{arg_name}= {arg_name};'.\n format(arg_name=arg_name))\n if arg_is_array:\n array_size = y[2]\n min_byte_count += TYPE_SIZE[arg_type] * array_size\n header_fields.append(' {arg_name}; '.format(arg_type_str\n =arg_type_str, arg_name=arg_name, array_size=array_size))\n header_fields_signature.append('{arg_name} '.format(\n arg_type_str=arg_type_sig_str, arg_name=arg_name,\n array_size=array_size))\n ctor_fields_bytequeue += x.get_ctor_fields_bytequeue_fmt(\n arg_is_array).format(arg_name=arg_name,\n type_reader_name=type_reader_name, array_size=array_size)\n parametros_fields += x.get_parametros_fields_fmt(arg_is_array\n ).format(arg_name=arg_name, type_reader_name=\n type_reader_name, array_size=array_size)\n parametros_args += x.get_parametros_args_fmt(arg_is_array\n ).format(arg_name=arg_name, type_reader_name=\n type_reader_name, array_size=array_size)\n serialize_fields += x.get_serialize_fields_fmt(arg_is_array\n ).format(arg_name=arg_name, type_writer_name=\n type_writer_name, array_size=array_size)\n else:\n min_byte_count += TYPE_SIZE[arg_type]\n header_fields.append(' {arg_type_str} {arg_name}; '.\n format(arg_type_str=arg_type_str, arg_name=arg_name))\n header_fields_signature.append('{arg_type_str} {arg_name}'.\n format(arg_type_str=arg_type_sig_str, arg_name=arg_name))\n ctor_fields_bytequeue += x.get_ctor_fields_bytequeue_fmt(\n arg_is_array).format(arg_name=arg_name,\n type_reader_name=type_reader_name)\n parametros_fields += x.get_parametros_fields_fmt(arg_is_array\n ).format(arg_name=arg_name, type_reader_name=\n type_reader_name)\n parametros_args += x.get_parametros_args_fmt(arg_is_array\n ).format(arg_name=arg_name, type_reader_name=\n type_reader_name)\n serialize_fields += x.get_serialize_fields_fmt(arg_is_array\n ).format(arg_name=arg_name, type_writer_name=\n type_writer_name)\n format_args = {'base_name': base_name, 'name': x.name,\n 'header_fields': '\\n'.join(header_fields),\n 'header_fields_signature': ', '.join(header_fields_signature),\n 'items_assign_e': '\\n'.join(items_assign_e),\n 'items_assign_build': '\\n'.join(items_assign_build),\n 'ctor_fields': ctor_fields, 'packet_id': i, 'min_byte_count':\n min_byte_count, 'ctor_fields_bytequeue': ctor_fields_bytequeue,\n 'serialize_fields': serialize_fields, 'parametros_fields':\n parametros_fields, 'parametros_args': parametros_args}\n if base_name != 'ServerPacket':\n f.write(x.get_header_fmt().format(**format_args))\n BUILDERS.append(x.get_builder_fmt().format(**format_args))\n if base_name == 'ServerPacket':\n HANDLERS.append(x.get_handler_fmt().format(**format_args))\n if base_name == 'ServerPacket':\n dec_dispatch = x.get_parametros_fmt().format(**format_args)\n pos = dec_dispatch.rfind(',')\n if pos > 0:\n dec_dispatch = dec_dispatch[:pos] + dec_dispatch[pos + 1:]\n DECODE_DISPATCH.append(dec_dispatch)\n if base_name == 'ServerPacket':\n args_handler = x.get_argumentosHandler_fmt().format(**format_args)\n pos = args_handler.rfind(',')\n if pos > 0:\n args_handler = args_handler[:pos] + args_handler[pos + 1:]\n pos = args_handler.rfind('\\n')\n args_handler = args_handler[:pos] + args_handler[pos + 1:]\n ARGS_HANDLER.append(args_handler)\n if base_name == 'ServerPacket':\n f.write(\n \"\"\"\nfunction {base_name}DecodeAndDispatch(buffer, handler) {{\n if (buffer.length() < 1) return;\n var PacketID = buffer.ReadByte();\n\n switch (PacketID) {{\n\"\"\"\n .format(base_name=base_name))\n for i, x in enumerate(P):\n if not x:\n continue\n f.write(\n \"\"\"\n case {i}:\n {{\n {decode_dispatch}\n break;\n }}\n\"\"\"\n .format(i=i, decode_dispatch=DECODE_DISPATCH.pop(0)))\n f.write(\n \"\"\"\n default:\n {{\n msg = \"error decoding packet id: \" + PacketID;\n throw new Error(msg);\n }}\n }}\n}}\n\"\"\"\n .format())\n fph.write(\n \"\"\"\n/** ESTE ARCHIVO SOLO ESTA PARA FACILITAR ESCRIBIR LOS HANLDLES POR PRIMERA VEZ, NO TINENE NINGUN USO ***************************************************************************************************************************************************/\n\"\"\"\n .format(base_name=base_name))\n for i, x in enumerate(P):\n if not x:\n continue\n fph.write('\\n\\thandle{name}: function ({arg_handler}){{ \\n'.\n format(base_name=base_name, name=x.name, arg_handler=\n ARGS_HANDLER.pop(0)))\n fph.write('\\t\\tlog.network(\"TODO: handle{name} \");\\n\\t}},\\n'.\n format(base_name=base_name, name=x.name))\n for i, x in enumerate(P):\n if not x:\n continue\n fph.write(\n \"\"\"\n/** ESTE ARCHIVO SOLO ESTA PARA FACILITAR ESCRIBIR LOS HANLDLES POR PRIMERA VEZ, NO TINENE NINGUN USO ***************************************************************************************************************************************************/\n\"\"\"\n )\n\n\ndef write_packets():\n f = open('protocol.js', 'w')\n fph = open('protocolhandlerAux.js', 'w')\n f.write(\n \"\\n/* Automatically generated file */\\n\\ndefine(['enums'], function (Enums) {\\n\"\n )\n write_packets_from(f, fph, 'ClientPacket', 'client', CLIENT_PACKETS)\n write_packets_from(f, fph, 'ClientGMPacket', 'clientgm', CLIENT_GM_PACKETS)\n write_packets_from(f, fph, 'ServerPacket', 'server', SERVER_PACKETS)\n f.write(\"\"\"\n class Protocolo{\n\"\"\")\n for builder in BUILDERS:\n f.write(builder)\n f.write(\n \"\"\"\n ServerPacketDecodeAndDispatch(buffer, handler){\n ServerPacketDecodeAndDispatch(buffer, handler);\n }\n \"\"\"\n )\n f.write('\\n }\\n\\n return Protocolo;\\n}); ')\n f.close()\n fph.close()\n\n\ndef escribir_multimessage(f):\n DECODE_DISPATCH.append(\n \"\"\"\n\n var msgIdx = buffer.ReadByte();\n switch (msgIdx) {\n\n case Enums.eMessage.NPCHitUser:\n handler.handleNPCHitUser(buffer.ReadByte(), buffer.ReadInteger());\n break;\n\n case Enums.eMessage.UserHitNPC:\n handler.handleUserHitNPC(buffer.ReadLong());\n break;\n\n case Enums.eMessage.UserAttackedSwing:\n handler.handleUserAttackedSwing(buffer.ReadInteger());\n break;\n\n case Enums.eMessage.UserHittedByUser:\n handler.handleUserHittedByUser(buffer.ReadInteger(), buffer.ReadByte(), buffer.ReadInteger());\n break;\n\n case Enums.eMessage.UserHittedUser:\n handler.handleUserHittedUser(buffer.ReadInteger(), buffer.ReadByte(), buffer.ReadInteger());\n break;\n\n case Enums.eMessage.WorkRequestTarget:\n handler.handleWorkRequestTarget(buffer.ReadByte());\n break;\n\n case Enums.eMessage.HaveKilledUser:\n handler.handleHaveKilledUser(buffer.ReadInteger(),buffer.ReadLong());\n break;\n\n case Enums.eMessage.UserKill:\n handler.handleUserKill(buffer.ReadInteger());\n break;\n\n case Enums.eMessage.Home:\n handler.handleHome(buffer.ReadByte(),buffer.ReadInteger(),buffer.ReadUnicodeString());\n break;\n\n case Enums.eMessage.DontSeeAnything:\n handler.handleDontSeeAnything();\n break;\n\n case Enums.eMessage.NPCSwing:\n\n handler.handleNPCSwing();\n break;\n\n case Enums.eMessage.NPCKillUser:\n\n handler.handleNPCKillUser();\n break;\n\n case Enums.eMessage.BlockedWithShieldUser:\n\n handler.handleBlockedWithShieldUser();\n break;\n\n case Enums.eMessage.BlockedWithShieldOther:\n\n handler.handleBlockedWithShieldOther();\n break;\n\n case Enums.eMessage.UserSwing:\n\n handler.handleUserSwing();\n break;\n\n case Enums.eMessage.SafeModeOn:\n\n handler.handleSafeModeOn();\n break;\n\n case Enums.eMessage.SafeModeOff:\n\n handler.handleSafeModeOff();\n break;\n\n case Enums.eMessage.ResuscitationSafeOff:\n\n handler.handleResuscitationSafeOff();\n break;\n\n case Enums.eMessage.ResuscitationSafeOn:\n\n handler.handleResuscitationSafeOn();\n break;\n\n case Enums.eMessage.NobilityLost:\n\n handler.handleNobilityLost();\n break;\n\n case Enums.eMessage.CantUseWhileMeditating:\n\n handler.handleCantUseWhileMeditating();\n break;\n\n case Enums.eMessage.EarnExp:\n\n handler.handleEarnExp();\n break;\n\n case Enums.eMessage.FinishHome:\n\n handler.handleFinishHome();\n break;\n\n case Enums.eMessage.CancelHome:\n\n handler.handleCancelHome();\n break;\n\n default:\n throw new Error(\"Multimessage: \" + msgIdx + \" no reconocido por el protocolo\");\n }\n\"\"\"\n )\n ARGS_HANDLER.append('msgIdx,args')\n\n\ndef main():\n write_packets()\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\nfrom genpackets import *\nfrom gendefs_js import *\nBUILDERS = []\nHANDLERS = []\nDECODE_DISPATCH = []\nARGS_HANDLER = []\n\n\ndef write_packets_from(f, fph, base_name, namespace, P):\n if base_name != 'ServerPacket':\n f.write('var {base_name}ID = {{ \\n'.format(base_name=base_name))\n for i, x in enumerate(P):\n if x:\n f.write(' {name} : {packet_id}'.format(base_name=\n base_name, name=x.name, packet_id=i))\n f.write(',\\n')\n f.write(' {base_name}ID_PACKET_COUNT : {packet_id}\\n}};\\n'.\n format(base_name=base_name, packet_id=len(P)))\n \"\"\"\n f.write(\"\"\\\"\nfunction {base_name}Factory(buffer) {{\n if (buffer.length() < 1) return 0;\n var p;\n PacketID = buffer.PeekByte();\n\n switch (PacketID) {{\n\"\"\\\".format(base_name=base_name))\n\n for i, x in enumerate(P):\n if not x: continue\n f.write(\"\"\\\"\n case {i}:\n p = new {name}(buffer);\n break;\n\"\"\\\".format(i=i, name=x.name))\n\n f.write(\"\"\\\"\n }}\n return p;\n}}\n\"\"\\\".format())\n \"\"\"\n for i, x in enumerate(P):\n if not x:\n continue\n header_fields = []\n header_fields_signature = []\n items_assign_e = []\n items_assign_build = []\n ctor_fields = ''\n min_byte_count = 0\n ctor_fields_bytequeue = ''\n parametros_fields = ''\n parametros_args = ''\n serialize_fields = ''\n if x.name == 'MultiMessage':\n escribir_multimessage(f)\n continue\n for y in x.args:\n arg_name = y[0]\n arg_type = y[1] & 255\n arg_type_str = TYPE_TO_STR[arg_type]\n arg_type_sig_str = TYPE_TO_SIGNATURE_STR[arg_type]\n arg_is_array = y[1] & TYPE_ARRAY == TYPE_ARRAY\n type_reader_name = TYPE_TO_READER_NAME[arg_type]\n type_writer_name = TYPE_TO_WRITER_NAME[arg_type]\n ctor_fields += ', ' + arg_name + '()'\n items_assign_e.append(' {arg_name}: {arg_name},'.\n format(arg_name=arg_name))\n items_assign_build.append(' e.{arg_name}= {arg_name};'.\n format(arg_name=arg_name))\n if arg_is_array:\n array_size = y[2]\n min_byte_count += TYPE_SIZE[arg_type] * array_size\n header_fields.append(' {arg_name}; '.format(arg_type_str\n =arg_type_str, arg_name=arg_name, array_size=array_size))\n header_fields_signature.append('{arg_name} '.format(\n arg_type_str=arg_type_sig_str, arg_name=arg_name,\n array_size=array_size))\n ctor_fields_bytequeue += x.get_ctor_fields_bytequeue_fmt(\n arg_is_array).format(arg_name=arg_name,\n type_reader_name=type_reader_name, array_size=array_size)\n parametros_fields += x.get_parametros_fields_fmt(arg_is_array\n ).format(arg_name=arg_name, type_reader_name=\n type_reader_name, array_size=array_size)\n parametros_args += x.get_parametros_args_fmt(arg_is_array\n ).format(arg_name=arg_name, type_reader_name=\n type_reader_name, array_size=array_size)\n serialize_fields += x.get_serialize_fields_fmt(arg_is_array\n ).format(arg_name=arg_name, type_writer_name=\n type_writer_name, array_size=array_size)\n else:\n min_byte_count += TYPE_SIZE[arg_type]\n header_fields.append(' {arg_type_str} {arg_name}; '.\n format(arg_type_str=arg_type_str, arg_name=arg_name))\n header_fields_signature.append('{arg_type_str} {arg_name}'.\n format(arg_type_str=arg_type_sig_str, arg_name=arg_name))\n ctor_fields_bytequeue += x.get_ctor_fields_bytequeue_fmt(\n arg_is_array).format(arg_name=arg_name,\n type_reader_name=type_reader_name)\n parametros_fields += x.get_parametros_fields_fmt(arg_is_array\n ).format(arg_name=arg_name, type_reader_name=\n type_reader_name)\n parametros_args += x.get_parametros_args_fmt(arg_is_array\n ).format(arg_name=arg_name, type_reader_name=\n type_reader_name)\n serialize_fields += x.get_serialize_fields_fmt(arg_is_array\n ).format(arg_name=arg_name, type_writer_name=\n type_writer_name)\n format_args = {'base_name': base_name, 'name': x.name,\n 'header_fields': '\\n'.join(header_fields),\n 'header_fields_signature': ', '.join(header_fields_signature),\n 'items_assign_e': '\\n'.join(items_assign_e),\n 'items_assign_build': '\\n'.join(items_assign_build),\n 'ctor_fields': ctor_fields, 'packet_id': i, 'min_byte_count':\n min_byte_count, 'ctor_fields_bytequeue': ctor_fields_bytequeue,\n 'serialize_fields': serialize_fields, 'parametros_fields':\n parametros_fields, 'parametros_args': parametros_args}\n if base_name != 'ServerPacket':\n f.write(x.get_header_fmt().format(**format_args))\n BUILDERS.append(x.get_builder_fmt().format(**format_args))\n if base_name == 'ServerPacket':\n HANDLERS.append(x.get_handler_fmt().format(**format_args))\n if base_name == 'ServerPacket':\n dec_dispatch = x.get_parametros_fmt().format(**format_args)\n pos = dec_dispatch.rfind(',')\n if pos > 0:\n dec_dispatch = dec_dispatch[:pos] + dec_dispatch[pos + 1:]\n DECODE_DISPATCH.append(dec_dispatch)\n if base_name == 'ServerPacket':\n args_handler = x.get_argumentosHandler_fmt().format(**format_args)\n pos = args_handler.rfind(',')\n if pos > 0:\n args_handler = args_handler[:pos] + args_handler[pos + 1:]\n pos = args_handler.rfind('\\n')\n args_handler = args_handler[:pos] + args_handler[pos + 1:]\n ARGS_HANDLER.append(args_handler)\n if base_name == 'ServerPacket':\n f.write(\n \"\"\"\nfunction {base_name}DecodeAndDispatch(buffer, handler) {{\n if (buffer.length() < 1) return;\n var PacketID = buffer.ReadByte();\n\n switch (PacketID) {{\n\"\"\"\n .format(base_name=base_name))\n for i, x in enumerate(P):\n if not x:\n continue\n f.write(\n \"\"\"\n case {i}:\n {{\n {decode_dispatch}\n break;\n }}\n\"\"\"\n .format(i=i, decode_dispatch=DECODE_DISPATCH.pop(0)))\n f.write(\n \"\"\"\n default:\n {{\n msg = \"error decoding packet id: \" + PacketID;\n throw new Error(msg);\n }}\n }}\n}}\n\"\"\"\n .format())\n fph.write(\n \"\"\"\n/** ESTE ARCHIVO SOLO ESTA PARA FACILITAR ESCRIBIR LOS HANLDLES POR PRIMERA VEZ, NO TINENE NINGUN USO ***************************************************************************************************************************************************/\n\"\"\"\n .format(base_name=base_name))\n for i, x in enumerate(P):\n if not x:\n continue\n fph.write('\\n\\thandle{name}: function ({arg_handler}){{ \\n'.\n format(base_name=base_name, name=x.name, arg_handler=\n ARGS_HANDLER.pop(0)))\n fph.write('\\t\\tlog.network(\"TODO: handle{name} \");\\n\\t}},\\n'.\n format(base_name=base_name, name=x.name))\n for i, x in enumerate(P):\n if not x:\n continue\n fph.write(\n \"\"\"\n/** ESTE ARCHIVO SOLO ESTA PARA FACILITAR ESCRIBIR LOS HANLDLES POR PRIMERA VEZ, NO TINENE NINGUN USO ***************************************************************************************************************************************************/\n\"\"\"\n )\n\n\ndef write_packets():\n f = open('protocol.js', 'w')\n fph = open('protocolhandlerAux.js', 'w')\n f.write(\n \"\\n/* Automatically generated file */\\n\\ndefine(['enums'], function (Enums) {\\n\"\n )\n write_packets_from(f, fph, 'ClientPacket', 'client', CLIENT_PACKETS)\n write_packets_from(f, fph, 'ClientGMPacket', 'clientgm', CLIENT_GM_PACKETS)\n write_packets_from(f, fph, 'ServerPacket', 'server', SERVER_PACKETS)\n f.write(\"\"\"\n class Protocolo{\n\"\"\")\n for builder in BUILDERS:\n f.write(builder)\n f.write(\n \"\"\"\n ServerPacketDecodeAndDispatch(buffer, handler){\n ServerPacketDecodeAndDispatch(buffer, handler);\n }\n \"\"\"\n )\n f.write('\\n }\\n\\n return Protocolo;\\n}); ')\n f.close()\n fph.close()\n\n\ndef escribir_multimessage(f):\n DECODE_DISPATCH.append(\n \"\"\"\n\n var msgIdx = buffer.ReadByte();\n switch (msgIdx) {\n\n case Enums.eMessage.NPCHitUser:\n handler.handleNPCHitUser(buffer.ReadByte(), buffer.ReadInteger());\n break;\n\n case Enums.eMessage.UserHitNPC:\n handler.handleUserHitNPC(buffer.ReadLong());\n break;\n\n case Enums.eMessage.UserAttackedSwing:\n handler.handleUserAttackedSwing(buffer.ReadInteger());\n break;\n\n case Enums.eMessage.UserHittedByUser:\n handler.handleUserHittedByUser(buffer.ReadInteger(), buffer.ReadByte(), buffer.ReadInteger());\n break;\n\n case Enums.eMessage.UserHittedUser:\n handler.handleUserHittedUser(buffer.ReadInteger(), buffer.ReadByte(), buffer.ReadInteger());\n break;\n\n case Enums.eMessage.WorkRequestTarget:\n handler.handleWorkRequestTarget(buffer.ReadByte());\n break;\n\n case Enums.eMessage.HaveKilledUser:\n handler.handleHaveKilledUser(buffer.ReadInteger(),buffer.ReadLong());\n break;\n\n case Enums.eMessage.UserKill:\n handler.handleUserKill(buffer.ReadInteger());\n break;\n\n case Enums.eMessage.Home:\n handler.handleHome(buffer.ReadByte(),buffer.ReadInteger(),buffer.ReadUnicodeString());\n break;\n\n case Enums.eMessage.DontSeeAnything:\n handler.handleDontSeeAnything();\n break;\n\n case Enums.eMessage.NPCSwing:\n\n handler.handleNPCSwing();\n break;\n\n case Enums.eMessage.NPCKillUser:\n\n handler.handleNPCKillUser();\n break;\n\n case Enums.eMessage.BlockedWithShieldUser:\n\n handler.handleBlockedWithShieldUser();\n break;\n\n case Enums.eMessage.BlockedWithShieldOther:\n\n handler.handleBlockedWithShieldOther();\n break;\n\n case Enums.eMessage.UserSwing:\n\n handler.handleUserSwing();\n break;\n\n case Enums.eMessage.SafeModeOn:\n\n handler.handleSafeModeOn();\n break;\n\n case Enums.eMessage.SafeModeOff:\n\n handler.handleSafeModeOff();\n break;\n\n case Enums.eMessage.ResuscitationSafeOff:\n\n handler.handleResuscitationSafeOff();\n break;\n\n case Enums.eMessage.ResuscitationSafeOn:\n\n handler.handleResuscitationSafeOn();\n break;\n\n case Enums.eMessage.NobilityLost:\n\n handler.handleNobilityLost();\n break;\n\n case Enums.eMessage.CantUseWhileMeditating:\n\n handler.handleCantUseWhileMeditating();\n break;\n\n case Enums.eMessage.EarnExp:\n\n handler.handleEarnExp();\n break;\n\n case Enums.eMessage.FinishHome:\n\n handler.handleFinishHome();\n break;\n\n case Enums.eMessage.CancelHome:\n\n handler.handleCancelHome();\n break;\n\n default:\n throw new Error(\"Multimessage: \" + msgIdx + \" no reconocido por el protocolo\");\n }\n\"\"\"\n )\n ARGS_HANDLER.append('msgIdx,args')\n\n\ndef main():\n write_packets()\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/bin/env python\n# coding: utf-8\n\n\"\"\"\nDakara Online protocol generator, by Alejandro Santos\n\"\"\"\n\nfrom genpackets import *\nfrom gendefs_js import *\n\nBUILDERS = []\nHANDLERS = []\nDECODE_DISPATCH = []\nARGS_HANDLER = []\ndef write_packets_from(f, fph, base_name, namespace, P):\n\n\n # Enum with IDs\n if base_name != \"ServerPacket\" :\n \tf.write(\"\"\"var {base_name}ID = {{ \\n\"\"\".format(base_name=base_name))\n \tfor i, x in enumerate(P):\n \t\tif x:\n \t\t\tf.write(\" {name} : {packet_id}\".format(base_name=base_name, name=x.name, packet_id=i))\n \t\t\tf.write(\",\\n\")\n \tf.write(\"\"\" {base_name}ID_PACKET_COUNT : {packet_id}\\n}};\\n\"\"\".format(base_name=base_name, packet_id=len(P)))\n\n# Factory\n '''\n f.write(\"\"\"\nfunction {base_name}Factory(buffer) {{\n if (buffer.length() < 1) return 0;\n var p;\n PacketID = buffer.PeekByte();\n\n switch (PacketID) {{\n\"\"\".format(base_name=base_name))\n\n for i, x in enumerate(P):\n if not x: continue\n f.write(\"\"\"\n case {i}:\n p = new {name}(buffer);\n break;\n\"\"\".format(i=i, name=x.name))\n\n f.write(\"\"\"\n }}\n return p;\n}}\n\"\"\".format())\n '''\n \n for i, x in enumerate(P):\n if not x: continue\n\n header_fields = []\n header_fields_signature = []\n items_assign_e = []\n items_assign_build = []\n ctor_fields = \"\"\n min_byte_count = 0\n ctor_fields_bytequeue = \"\"\n parametros_fields = \"\"\n parametros_args = \"\"\n serialize_fields = \"\"\n\n if x.name == \"MultiMessage\":\n escribir_multimessage(f)\n continue\n\n for y in x.args:\n arg_name = y[0]\n arg_type = y[1] & 0xff\n arg_type_str = TYPE_TO_STR[arg_type]\n arg_type_sig_str = TYPE_TO_SIGNATURE_STR[arg_type]\n arg_is_array = ((y[1] & TYPE_ARRAY) == TYPE_ARRAY)\n type_reader_name = TYPE_TO_READER_NAME[arg_type]\n type_writer_name = TYPE_TO_WRITER_NAME[arg_type]\n\n ctor_fields += \", \" + arg_name + \"()\"\n\n items_assign_e.append(\" {arg_name}: {arg_name},\".format(arg_name=arg_name))\n items_assign_build.append(\" e.{arg_name}= {arg_name};\".format(arg_name=arg_name))\n\n if arg_is_array:\n array_size=y[2]\n min_byte_count += TYPE_SIZE[arg_type] * array_size\n header_fields.append(\" {arg_name}; \".format(arg_type_str=arg_type_str, arg_name=arg_name, array_size=array_size))\n header_fields_signature.append(\"{arg_name} \".format(arg_type_str=arg_type_sig_str, arg_name=arg_name, array_size=array_size))\n ctor_fields_bytequeue += x.get_ctor_fields_bytequeue_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name, array_size=array_size)\n \tparametros_fields += x.get_parametros_fields_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name, array_size=array_size)\n \tparametros_args += x.get_parametros_args_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name, array_size=array_size)\n serialize_fields += x.get_serialize_fields_fmt(arg_is_array).format(arg_name=arg_name, type_writer_name=type_writer_name, array_size=array_size)\n else:\n min_byte_count += TYPE_SIZE[arg_type]\n header_fields.append(\" {arg_type_str} {arg_name}; \".format(arg_type_str=arg_type_str, arg_name=arg_name))\n header_fields_signature.append(\"{arg_type_str} {arg_name}\".format(arg_type_str=arg_type_sig_str, arg_name=arg_name))\n ctor_fields_bytequeue += x.get_ctor_fields_bytequeue_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name)\n parametros_fields += x.get_parametros_fields_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name)\n parametros_args += x.get_parametros_args_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name)\n serialize_fields += x.get_serialize_fields_fmt(arg_is_array).format(arg_name=arg_name, type_writer_name=type_writer_name)\n\n format_args = {\n 'base_name': base_name,\n 'name': x.name,\n 'header_fields': '\\n'.join(header_fields),\n 'header_fields_signature': ', '.join(header_fields_signature),\n 'items_assign_e': '\\n'.join(items_assign_e),\n 'items_assign_build': '\\n'.join(items_assign_build),\n 'ctor_fields': ctor_fields,\n 'packet_id': i,\n 'min_byte_count': min_byte_count,\n 'ctor_fields_bytequeue': ctor_fields_bytequeue,\n 'serialize_fields': serialize_fields,\n 'parametros_fields' : parametros_fields,\n 'parametros_args' : parametros_args\n }\n\n # Individual packet header\n if base_name != \"ServerPacket\" :\n \tf.write(x.get_header_fmt().format(**format_args))\n \tBUILDERS.append(x.get_builder_fmt().format(**format_args))\n\n if base_name == \"ServerPacket\" :\n HANDLERS.append(x.get_handler_fmt().format(**format_args))\n\n #para el serverpacketdecodeanddispatch (sin tener que crear packetes)\n if base_name == \"ServerPacket\" :\n \tdec_dispatch = x.get_parametros_fmt().format(**format_args);\n \t#le saco la ultima coma si es que tiene:\n \tpos = dec_dispatch.rfind(\",\")\n \tif pos > 0:\n \t\tdec_dispatch = dec_dispatch[:pos] + dec_dispatch[pos+1:]\n \tDECODE_DISPATCH.append(dec_dispatch)\n\n if base_name == \"ServerPacket\" :\n args_handler = x.get_argumentosHandler_fmt().format(**format_args);\n #le saco la ultima coma si es que tiene:\n pos = args_handler.rfind(\",\")\n if pos > 0:\n \targs_handler = args_handler[:pos] + args_handler[pos+1:]\n #le saco fin de linea\n pos = args_handler.rfind(\"\\n\")\n args_handler = args_handler[:pos] + args_handler[pos+1:]\n ARGS_HANDLER.append(args_handler)\n\n\n\n\n\n \n # Decode and Dispatch, keeping the Packet in the stack\n # Suggested by hmk\n if base_name == \"ServerPacket\" :\n f.write(\"\"\"\nfunction {base_name}DecodeAndDispatch(buffer, handler) {{\n if (buffer.length() < 1) return;\n var PacketID = buffer.ReadByte();\n\n switch (PacketID) {{\n\"\"\".format(base_name=base_name))\n\n for i, x in enumerate(P):\n if not x: continue\n f.write(\"\"\"\n case {i}:\n {{\n {decode_dispatch}\n break;\n }}\n\"\"\".format(i=i, decode_dispatch=DECODE_DISPATCH.pop(0)))\n\n f.write(\"\"\"\n default:\n {{\n msg = \"error decoding packet id: \" + PacketID;\n throw new Error(msg);\n }}\n }}\n}}\n\"\"\".format())\n\n fph.write(\"\"\"\n/** ESTE ARCHIVO SOLO ESTA PARA FACILITAR ESCRIBIR LOS HANLDLES POR PRIMERA VEZ, NO TINENE NINGUN USO ***************************************************************************************************************************************************/\n\"\"\".format(base_name=base_name))\n for i, x in enumerate(P):\n if not x: continue\n fph.write(\"\"\"\\n\\thandle{name}: function ({arg_handler}){{ \\n\"\"\".format(base_name=base_name, name=x.name, arg_handler = ARGS_HANDLER.pop(0)))\n #fph.write(HANDLERS.pop(0))\n fph.write(\"\"\"\\t\\tlog.network(\"TODO: handle{name} \");\\n\\t}},\\n\"\"\".format(base_name=base_name, name=x.name))\n\n for i, x in enumerate(P):\n if not x: continue\n #fph.write(\"\"\"\\n\\thandle{name}: function (p){{ \\n\"\"\".format(base_name=base_name, name=x.name))\n #fph.write(HANDLERS.pop(0))\n #fph.write(\"\"\"\\t\\talert(\"TODO: handle{name} \");\\n\\t}},\\n\"\"\".format(base_name=base_name, name=x.name))\n\n fph.write(\"\"\"\n/** ESTE ARCHIVO SOLO ESTA PARA FACILITAR ESCRIBIR LOS HANLDLES POR PRIMERA VEZ, NO TINENE NINGUN USO ***************************************************************************************************************************************************/\n\"\"\")\n\n\ndef write_packets():\n f = open(\"protocol.js\", \"w\")\n fph = open(\"protocolhandlerAux.js\", \"w\")\n\n f.write(\"\"\"\n/* Automatically generated file */\n\ndefine(['enums'], function (Enums) {\n\"\"\")\n\n write_packets_from(f,fph, \"ClientPacket\", \"client\", CLIENT_PACKETS)\n write_packets_from(f,fph, \"ClientGMPacket\", \"clientgm\", CLIENT_GM_PACKETS)\n write_packets_from(f,fph, \"ServerPacket\", \"server\", SERVER_PACKETS)\n\n #Multimessages hardcodeado: // TODO ; hacerlo bien\n f.write(\"\"\"\n class Protocolo{\n\"\"\")\n for builder in BUILDERS:\n f.write(builder)\n\n f.write(\"\"\"\n ServerPacketDecodeAndDispatch(buffer, handler){\n ServerPacketDecodeAndDispatch(buffer, handler);\n }\n \"\"\")\n f.write(\"\"\"\n }\n\n return Protocolo;\n}); \"\"\")\n\n\n\n\n\n f.close()\n fph.close()\n\n\n\ndef escribir_multimessage(f):\n DECODE_DISPATCH.append('''\n\n var msgIdx = buffer.ReadByte();\n switch (msgIdx) {\n\n case Enums.eMessage.NPCHitUser:\n handler.handleNPCHitUser(buffer.ReadByte(), buffer.ReadInteger());\n break;\n\n case Enums.eMessage.UserHitNPC:\n handler.handleUserHitNPC(buffer.ReadLong());\n break;\n\n case Enums.eMessage.UserAttackedSwing:\n handler.handleUserAttackedSwing(buffer.ReadInteger());\n break;\n\n case Enums.eMessage.UserHittedByUser:\n handler.handleUserHittedByUser(buffer.ReadInteger(), buffer.ReadByte(), buffer.ReadInteger());\n break;\n\n case Enums.eMessage.UserHittedUser:\n handler.handleUserHittedUser(buffer.ReadInteger(), buffer.ReadByte(), buffer.ReadInteger());\n break;\n\n case Enums.eMessage.WorkRequestTarget:\n handler.handleWorkRequestTarget(buffer.ReadByte());\n break;\n\n case Enums.eMessage.HaveKilledUser:\n handler.handleHaveKilledUser(buffer.ReadInteger(),buffer.ReadLong());\n break;\n\n case Enums.eMessage.UserKill:\n handler.handleUserKill(buffer.ReadInteger());\n break;\n\n case Enums.eMessage.Home:\n handler.handleHome(buffer.ReadByte(),buffer.ReadInteger(),buffer.ReadUnicodeString());\n break;\n\n case Enums.eMessage.DontSeeAnything:\n handler.handleDontSeeAnything();\n break;\n\n case Enums.eMessage.NPCSwing:\n\n handler.handleNPCSwing();\n break;\n\n case Enums.eMessage.NPCKillUser:\n\n handler.handleNPCKillUser();\n break;\n\n case Enums.eMessage.BlockedWithShieldUser:\n\n handler.handleBlockedWithShieldUser();\n break;\n\n case Enums.eMessage.BlockedWithShieldOther:\n\n handler.handleBlockedWithShieldOther();\n break;\n\n case Enums.eMessage.UserSwing:\n\n handler.handleUserSwing();\n break;\n\n case Enums.eMessage.SafeModeOn:\n\n handler.handleSafeModeOn();\n break;\n\n case Enums.eMessage.SafeModeOff:\n\n handler.handleSafeModeOff();\n break;\n\n case Enums.eMessage.ResuscitationSafeOff:\n\n handler.handleResuscitationSafeOff();\n break;\n\n case Enums.eMessage.ResuscitationSafeOn:\n\n handler.handleResuscitationSafeOn();\n break;\n\n case Enums.eMessage.NobilityLost:\n\n handler.handleNobilityLost();\n break;\n\n case Enums.eMessage.CantUseWhileMeditating:\n\n handler.handleCantUseWhileMeditating();\n break;\n\n case Enums.eMessage.EarnExp:\n\n handler.handleEarnExp();\n break;\n\n case Enums.eMessage.FinishHome:\n\n handler.handleFinishHome();\n break;\n\n case Enums.eMessage.CancelHome:\n\n handler.handleCancelHome();\n break;\n\n default:\n throw new Error(\"Multimessage: \" + msgIdx + \" no reconocido por el protocolo\");\n }\n''')\n ARGS_HANDLER.append(\"msgIdx,args\")\n\n\ndef main():\n write_packets()\n\nif __name__ == '__main__':\n main()",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
from datetime import datetime
import warnings
import numpy as np
import xarray as xr
from .common import HDF4, expects_file_info
pyhdf_is_installed = False
try:
from pyhdf import HDF, VS, V
from pyhdf.SD import SD, SDC
pyhdf_is_installed = True
except ImportError:
pass
__all__ = [
'CloudSat',
]
class CloudSat(HDF4):
"""File handler for CloudSat data in HDF4 files.
"""
# This file handler always wants to return at least time, lat and lon
# fields. These fields are required for this:
standard_fields = {
"UTC_start",
"Profile_time",
"Latitude",
"Longitude"
}
# Map the standard fields to standard names:
mapping = {
"Latitude": "lat",
"Longitude": "lon",
"dim_0": "scnline",
}
def __init__(self, **kwargs):
# Call the base class initializer
super().__init__(**kwargs)
@expects_file_info()
def get_info(self, file_info, **kwargs):
"""Return a :class:`FileInfo` object with parameters about the
file content.
Args:
file_info: Path and name of the file of which to retrieve the info
about.
**kwargs: Additional keyword arguments.
Returns:
A FileInfo object.
"""
file = SD(file_info.path, SDC.READ)
file_info.times[0] = \
datetime.strptime(getattr(file, 'start_time'), "%Y%m%d%H%M%S")
file_info.times[1] = \
datetime.strptime(getattr(file, 'end_time'), "%Y%m%d%H%M%S")
return file_info
@expects_file_info()
def read(self, file_info, **kwargs):
"""Read and parse HDF4 files and load them to a xarray.Dataset
A description about all variables in CloudSat dataset can be found in
http://www.cloudsat.cira.colostate.edu/data-products/level-2c/2c-ice?term=53.
Args:
file_info: Path and name of the file as string or FileInfo object.
**kwargs: Additional keyword arguments that are valid for
:class:`typhon.files.handlers.common.HDF4`.
Returns:
A xarray.Dataset object.
"""
# We need to import at least the standard fields
user_fields = kwargs.pop("fields", {})
fields = self.standard_fields | set(user_fields)
# We catch the user mapping here, since we do not want to deal with
# user-defined names in the further processing. Instead, we use our own
# mapping
user_mapping = kwargs.pop("mapping", None)
# Load the dataset from the file:
dataset = super().read(
file_info, fields=fields, mapping=self.mapping, **kwargs
)
dataset["time"] = self._get_time_field(dataset, file_info)
# Remove fields that we do not need any longer (expect the user asked
# for them explicitly)
dataset = dataset.drop_vars(
{"UTC_start", "Profile_time"} - set(user_fields),
)
if user_mapping is not None:
dataset = dataset.rename(user_mapping)
return dataset
def _get_time_field(self, dataset, file_info):
# This gives us the starting time of the first profile in seconds
# since midnight in UTC:
first_profile_time = round(dataset['UTC_start'].item(0))
# This gives us the starting time of all other profiles in seconds
# since the start of the first profile.
profile_times = dataset['Profile_time']
# Convert the seconds to milliseconds
profile_times *= 1000
profile_times = profile_times.astype("int")
try:
date = file_info.times[0].date()
except AttributeError:
# We have to load the info by ourselves:
date = self.get_info(file_info).times[0].date()
# Put all times together so we obtain one full timestamp
# (date + time) for each data point. We are using the
# starting date coming from parsing the filename.
profile_times = \
np.datetime64(date) \
+ np.timedelta64(first_profile_time, "s") \
+ profile_times.astype("timedelta64[ms]")
return profile_times
|
normal
|
{
"blob_id": "4328d526da14db756fad8d05457724a23e3e3ef6",
"index": 3939,
"step-1": "<mask token>\n\n\nclass CloudSat(HDF4):\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n @expects_file_info()\n def get_info(self, file_info, **kwargs):\n \"\"\"Return a :class:`FileInfo` object with parameters about the\n file content.\n\n Args:\n file_info: Path and name of the file of which to retrieve the info\n about.\n **kwargs: Additional keyword arguments.\n\n Returns:\n A FileInfo object.\n \"\"\"\n file = SD(file_info.path, SDC.READ)\n file_info.times[0] = datetime.strptime(getattr(file, 'start_time'),\n '%Y%m%d%H%M%S')\n file_info.times[1] = datetime.strptime(getattr(file, 'end_time'),\n '%Y%m%d%H%M%S')\n return file_info\n <mask token>\n\n def _get_time_field(self, dataset, file_info):\n first_profile_time = round(dataset['UTC_start'].item(0))\n profile_times = dataset['Profile_time']\n profile_times *= 1000\n profile_times = profile_times.astype('int')\n try:\n date = file_info.times[0].date()\n except AttributeError:\n date = self.get_info(file_info).times[0].date()\n profile_times = np.datetime64(date) + np.timedelta64(first_profile_time\n , 's') + profile_times.astype('timedelta64[ms]')\n return profile_times\n",
"step-2": "<mask token>\n\n\nclass CloudSat(HDF4):\n \"\"\"File handler for CloudSat data in HDF4 files.\n \"\"\"\n standard_fields = {'UTC_start', 'Profile_time', 'Latitude', 'Longitude'}\n mapping = {'Latitude': 'lat', 'Longitude': 'lon', 'dim_0': 'scnline'}\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n @expects_file_info()\n def get_info(self, file_info, **kwargs):\n \"\"\"Return a :class:`FileInfo` object with parameters about the\n file content.\n\n Args:\n file_info: Path and name of the file of which to retrieve the info\n about.\n **kwargs: Additional keyword arguments.\n\n Returns:\n A FileInfo object.\n \"\"\"\n file = SD(file_info.path, SDC.READ)\n file_info.times[0] = datetime.strptime(getattr(file, 'start_time'),\n '%Y%m%d%H%M%S')\n file_info.times[1] = datetime.strptime(getattr(file, 'end_time'),\n '%Y%m%d%H%M%S')\n return file_info\n\n @expects_file_info()\n def read(self, file_info, **kwargs):\n \"\"\"Read and parse HDF4 files and load them to a xarray.Dataset\n\n A description about all variables in CloudSat dataset can be found in\n http://www.cloudsat.cira.colostate.edu/data-products/level-2c/2c-ice?term=53.\n\n Args:\n file_info: Path and name of the file as string or FileInfo object.\n **kwargs: Additional keyword arguments that are valid for\n :class:`typhon.files.handlers.common.HDF4`.\n\n Returns:\n A xarray.Dataset object.\n \"\"\"\n user_fields = kwargs.pop('fields', {})\n fields = self.standard_fields | set(user_fields)\n user_mapping = kwargs.pop('mapping', None)\n dataset = super().read(file_info, fields=fields, mapping=self.\n mapping, **kwargs)\n dataset['time'] = self._get_time_field(dataset, file_info)\n dataset = dataset.drop_vars({'UTC_start', 'Profile_time'} - set(\n user_fields))\n if user_mapping is not None:\n dataset = dataset.rename(user_mapping)\n return dataset\n\n def _get_time_field(self, dataset, file_info):\n first_profile_time = round(dataset['UTC_start'].item(0))\n profile_times = dataset['Profile_time']\n profile_times *= 1000\n profile_times = profile_times.astype('int')\n try:\n date = file_info.times[0].date()\n except AttributeError:\n date = self.get_info(file_info).times[0].date()\n profile_times = np.datetime64(date) + np.timedelta64(first_profile_time\n , 's') + profile_times.astype('timedelta64[ms]')\n return profile_times\n",
"step-3": "<mask token>\ntry:\n from pyhdf import HDF, VS, V\n from pyhdf.SD import SD, SDC\n pyhdf_is_installed = True\nexcept ImportError:\n pass\n<mask token>\n\n\nclass CloudSat(HDF4):\n \"\"\"File handler for CloudSat data in HDF4 files.\n \"\"\"\n standard_fields = {'UTC_start', 'Profile_time', 'Latitude', 'Longitude'}\n mapping = {'Latitude': 'lat', 'Longitude': 'lon', 'dim_0': 'scnline'}\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n @expects_file_info()\n def get_info(self, file_info, **kwargs):\n \"\"\"Return a :class:`FileInfo` object with parameters about the\n file content.\n\n Args:\n file_info: Path and name of the file of which to retrieve the info\n about.\n **kwargs: Additional keyword arguments.\n\n Returns:\n A FileInfo object.\n \"\"\"\n file = SD(file_info.path, SDC.READ)\n file_info.times[0] = datetime.strptime(getattr(file, 'start_time'),\n '%Y%m%d%H%M%S')\n file_info.times[1] = datetime.strptime(getattr(file, 'end_time'),\n '%Y%m%d%H%M%S')\n return file_info\n\n @expects_file_info()\n def read(self, file_info, **kwargs):\n \"\"\"Read and parse HDF4 files and load them to a xarray.Dataset\n\n A description about all variables in CloudSat dataset can be found in\n http://www.cloudsat.cira.colostate.edu/data-products/level-2c/2c-ice?term=53.\n\n Args:\n file_info: Path and name of the file as string or FileInfo object.\n **kwargs: Additional keyword arguments that are valid for\n :class:`typhon.files.handlers.common.HDF4`.\n\n Returns:\n A xarray.Dataset object.\n \"\"\"\n user_fields = kwargs.pop('fields', {})\n fields = self.standard_fields | set(user_fields)\n user_mapping = kwargs.pop('mapping', None)\n dataset = super().read(file_info, fields=fields, mapping=self.\n mapping, **kwargs)\n dataset['time'] = self._get_time_field(dataset, file_info)\n dataset = dataset.drop_vars({'UTC_start', 'Profile_time'} - set(\n user_fields))\n if user_mapping is not None:\n dataset = dataset.rename(user_mapping)\n return dataset\n\n def _get_time_field(self, dataset, file_info):\n first_profile_time = round(dataset['UTC_start'].item(0))\n profile_times = dataset['Profile_time']\n profile_times *= 1000\n profile_times = profile_times.astype('int')\n try:\n date = file_info.times[0].date()\n except AttributeError:\n date = self.get_info(file_info).times[0].date()\n profile_times = np.datetime64(date) + np.timedelta64(first_profile_time\n , 's') + profile_times.astype('timedelta64[ms]')\n return profile_times\n",
"step-4": "<mask token>\npyhdf_is_installed = False\ntry:\n from pyhdf import HDF, VS, V\n from pyhdf.SD import SD, SDC\n pyhdf_is_installed = True\nexcept ImportError:\n pass\n__all__ = ['CloudSat']\n\n\nclass CloudSat(HDF4):\n \"\"\"File handler for CloudSat data in HDF4 files.\n \"\"\"\n standard_fields = {'UTC_start', 'Profile_time', 'Latitude', 'Longitude'}\n mapping = {'Latitude': 'lat', 'Longitude': 'lon', 'dim_0': 'scnline'}\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n @expects_file_info()\n def get_info(self, file_info, **kwargs):\n \"\"\"Return a :class:`FileInfo` object with parameters about the\n file content.\n\n Args:\n file_info: Path and name of the file of which to retrieve the info\n about.\n **kwargs: Additional keyword arguments.\n\n Returns:\n A FileInfo object.\n \"\"\"\n file = SD(file_info.path, SDC.READ)\n file_info.times[0] = datetime.strptime(getattr(file, 'start_time'),\n '%Y%m%d%H%M%S')\n file_info.times[1] = datetime.strptime(getattr(file, 'end_time'),\n '%Y%m%d%H%M%S')\n return file_info\n\n @expects_file_info()\n def read(self, file_info, **kwargs):\n \"\"\"Read and parse HDF4 files and load them to a xarray.Dataset\n\n A description about all variables in CloudSat dataset can be found in\n http://www.cloudsat.cira.colostate.edu/data-products/level-2c/2c-ice?term=53.\n\n Args:\n file_info: Path and name of the file as string or FileInfo object.\n **kwargs: Additional keyword arguments that are valid for\n :class:`typhon.files.handlers.common.HDF4`.\n\n Returns:\n A xarray.Dataset object.\n \"\"\"\n user_fields = kwargs.pop('fields', {})\n fields = self.standard_fields | set(user_fields)\n user_mapping = kwargs.pop('mapping', None)\n dataset = super().read(file_info, fields=fields, mapping=self.\n mapping, **kwargs)\n dataset['time'] = self._get_time_field(dataset, file_info)\n dataset = dataset.drop_vars({'UTC_start', 'Profile_time'} - set(\n user_fields))\n if user_mapping is not None:\n dataset = dataset.rename(user_mapping)\n return dataset\n\n def _get_time_field(self, dataset, file_info):\n first_profile_time = round(dataset['UTC_start'].item(0))\n profile_times = dataset['Profile_time']\n profile_times *= 1000\n profile_times = profile_times.astype('int')\n try:\n date = file_info.times[0].date()\n except AttributeError:\n date = self.get_info(file_info).times[0].date()\n profile_times = np.datetime64(date) + np.timedelta64(first_profile_time\n , 's') + profile_times.astype('timedelta64[ms]')\n return profile_times\n",
"step-5": "from datetime import datetime\nimport warnings\n\nimport numpy as np\nimport xarray as xr\n\nfrom .common import HDF4, expects_file_info\n\npyhdf_is_installed = False\ntry:\n from pyhdf import HDF, VS, V\n from pyhdf.SD import SD, SDC\n pyhdf_is_installed = True\nexcept ImportError:\n pass\n\n__all__ = [\n 'CloudSat',\n]\n\n\nclass CloudSat(HDF4):\n \"\"\"File handler for CloudSat data in HDF4 files.\n \"\"\"\n\n # This file handler always wants to return at least time, lat and lon\n # fields. These fields are required for this:\n standard_fields = {\n \"UTC_start\",\n \"Profile_time\",\n \"Latitude\",\n \"Longitude\"\n }\n\n # Map the standard fields to standard names:\n mapping = {\n \"Latitude\": \"lat\",\n \"Longitude\": \"lon\",\n \"dim_0\": \"scnline\",\n }\n\n def __init__(self, **kwargs):\n\n # Call the base class initializer\n super().__init__(**kwargs)\n\n @expects_file_info()\n def get_info(self, file_info, **kwargs):\n \"\"\"Return a :class:`FileInfo` object with parameters about the\n file content.\n\n Args:\n file_info: Path and name of the file of which to retrieve the info\n about.\n **kwargs: Additional keyword arguments.\n\n Returns:\n A FileInfo object.\n \"\"\"\n\n file = SD(file_info.path, SDC.READ)\n file_info.times[0] = \\\n datetime.strptime(getattr(file, 'start_time'), \"%Y%m%d%H%M%S\")\n file_info.times[1] = \\\n datetime.strptime(getattr(file, 'end_time'), \"%Y%m%d%H%M%S\")\n\n return file_info\n\n @expects_file_info()\n def read(self, file_info, **kwargs):\n \"\"\"Read and parse HDF4 files and load them to a xarray.Dataset\n\n A description about all variables in CloudSat dataset can be found in\n http://www.cloudsat.cira.colostate.edu/data-products/level-2c/2c-ice?term=53.\n\n Args:\n file_info: Path and name of the file as string or FileInfo object.\n **kwargs: Additional keyword arguments that are valid for\n :class:`typhon.files.handlers.common.HDF4`.\n\n Returns:\n A xarray.Dataset object.\n \"\"\"\n\n # We need to import at least the standard fields\n user_fields = kwargs.pop(\"fields\", {})\n fields = self.standard_fields | set(user_fields)\n\n # We catch the user mapping here, since we do not want to deal with\n # user-defined names in the further processing. Instead, we use our own\n # mapping\n user_mapping = kwargs.pop(\"mapping\", None)\n\n # Load the dataset from the file:\n dataset = super().read(\n file_info, fields=fields, mapping=self.mapping, **kwargs\n )\n\n dataset[\"time\"] = self._get_time_field(dataset, file_info)\n\n # Remove fields that we do not need any longer (expect the user asked\n # for them explicitly)\n dataset = dataset.drop_vars(\n {\"UTC_start\", \"Profile_time\"} - set(user_fields),\n )\n\n if user_mapping is not None:\n dataset = dataset.rename(user_mapping)\n\n return dataset\n\n def _get_time_field(self, dataset, file_info):\n # This gives us the starting time of the first profile in seconds\n # since midnight in UTC:\n first_profile_time = round(dataset['UTC_start'].item(0))\n\n # This gives us the starting time of all other profiles in seconds\n # since the start of the first profile.\n profile_times = dataset['Profile_time']\n\n # Convert the seconds to milliseconds\n profile_times *= 1000\n profile_times = profile_times.astype(\"int\")\n\n try:\n date = file_info.times[0].date()\n except AttributeError:\n # We have to load the info by ourselves:\n date = self.get_info(file_info).times[0].date()\n\n # Put all times together so we obtain one full timestamp\n # (date + time) for each data point. We are using the\n # starting date coming from parsing the filename.\n profile_times = \\\n np.datetime64(date) \\\n + np.timedelta64(first_profile_time, \"s\") \\\n + profile_times.astype(\"timedelta64[ms]\")\n\n return profile_times\n",
"step-ids": [
4,
7,
8,
9,
11
]
}
|
[
4,
7,
8,
9,
11
] |
import time
class SequenceHeuristic(object):
def __init__(self, minChanges, minDuration, noMotionDelay):
self._minChanges = minChanges
self._minDuration = minDuration
self._noMotionDelay = noMotionDelay
self._duration = 0
def isValid(self, image, data):
numOfChanges = data['numOfChanges']
if numOfChanges >= self._minChanges:
self._duration += 1
if self._duration >= self._minDuration:
return True
else:
if self._duration > 0: # No sleep if duration is in effect
self._duration -= 1
else:
if self._noMotionDelay:
time.sleep(self._noMotionDelay/1000.0)
return False
|
normal
|
{
"blob_id": "e07bd4cd13209bff8bc1119a619a2954abd52592",
"index": 1515,
"step-1": "<mask token>\n\n\nclass SequenceHeuristic(object):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass SequenceHeuristic(object):\n\n def __init__(self, minChanges, minDuration, noMotionDelay):\n self._minChanges = minChanges\n self._minDuration = minDuration\n self._noMotionDelay = noMotionDelay\n self._duration = 0\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass SequenceHeuristic(object):\n\n def __init__(self, minChanges, minDuration, noMotionDelay):\n self._minChanges = minChanges\n self._minDuration = minDuration\n self._noMotionDelay = noMotionDelay\n self._duration = 0\n\n def isValid(self, image, data):\n numOfChanges = data['numOfChanges']\n if numOfChanges >= self._minChanges:\n self._duration += 1\n if self._duration >= self._minDuration:\n return True\n elif self._duration > 0:\n self._duration -= 1\n elif self._noMotionDelay:\n time.sleep(self._noMotionDelay / 1000.0)\n return False\n",
"step-4": "import time\n\n\nclass SequenceHeuristic(object):\n\n def __init__(self, minChanges, minDuration, noMotionDelay):\n self._minChanges = minChanges\n self._minDuration = minDuration\n self._noMotionDelay = noMotionDelay\n self._duration = 0\n\n def isValid(self, image, data):\n numOfChanges = data['numOfChanges']\n if numOfChanges >= self._minChanges:\n self._duration += 1\n if self._duration >= self._minDuration:\n return True\n elif self._duration > 0:\n self._duration -= 1\n elif self._noMotionDelay:\n time.sleep(self._noMotionDelay / 1000.0)\n return False\n",
"step-5": "import time\n\nclass SequenceHeuristic(object):\n def __init__(self, minChanges, minDuration, noMotionDelay):\n self._minChanges = minChanges\n self._minDuration = minDuration\n self._noMotionDelay = noMotionDelay\n self._duration = 0\n \n def isValid(self, image, data):\n numOfChanges = data['numOfChanges']\n if numOfChanges >= self._minChanges:\n self._duration += 1\n if self._duration >= self._minDuration:\n return True\n else:\n if self._duration > 0: # No sleep if duration is in effect\n self._duration -= 1\n else:\n if self._noMotionDelay:\n time.sleep(self._noMotionDelay/1000.0)\n return False\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
with open('got_info.json', 'w') as f:
f.write(x)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
headers = {'User-Agent':
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36'
}
url = (
'http://api.tvmaze.com/singlesearch/shows?q=game+of+throne&embed=episodes')
response = requests.get(url, headers=headers)
content = json.loads(response.text)
x = json.dumps(content, ensure_ascii=False, indent=2)
with open('got_info.json', 'w') as f:
f.write(x)
<|reserved_special_token_1|>
import requests
from bs4 import BeautifulSoup
import json
headers = {'User-Agent':
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36'
}
url = (
'http://api.tvmaze.com/singlesearch/shows?q=game+of+throne&embed=episodes')
response = requests.get(url, headers=headers)
content = json.loads(response.text)
x = json.dumps(content, ensure_ascii=False, indent=2)
with open('got_info.json', 'w') as f:
f.write(x)
<|reserved_special_token_1|>
import requests
from bs4 import BeautifulSoup
import json
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36'}
url = 'http://api.tvmaze.com/singlesearch/shows?q=game+of+throne&embed=episodes'
response = requests.get(url, headers = headers)
content = json.loads(response.text)
x = json.dumps(content, ensure_ascii=False,indent =2)
with open('got_info.json','w') as f:
f.write(x)
# print(cele_info.split(' '))
|
flexible
|
{
"blob_id": "d625e6724a3fe077a6f80b6de6b1f5bb0b95d47d",
"index": 4612,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open('got_info.json', 'w') as f:\n f.write(x)\n",
"step-3": "<mask token>\nheaders = {'User-Agent':\n 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36'\n }\nurl = (\n 'http://api.tvmaze.com/singlesearch/shows?q=game+of+throne&embed=episodes')\nresponse = requests.get(url, headers=headers)\ncontent = json.loads(response.text)\nx = json.dumps(content, ensure_ascii=False, indent=2)\nwith open('got_info.json', 'w') as f:\n f.write(x)\n",
"step-4": "import requests\nfrom bs4 import BeautifulSoup\nimport json\nheaders = {'User-Agent':\n 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36'\n }\nurl = (\n 'http://api.tvmaze.com/singlesearch/shows?q=game+of+throne&embed=episodes')\nresponse = requests.get(url, headers=headers)\ncontent = json.loads(response.text)\nx = json.dumps(content, ensure_ascii=False, indent=2)\nwith open('got_info.json', 'w') as f:\n f.write(x)\n",
"step-5": "import requests\nfrom bs4 import BeautifulSoup\nimport json\n\nheaders = {'User-Agent':'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36'}\nurl = 'http://api.tvmaze.com/singlesearch/shows?q=game+of+throne&embed=episodes'\n\nresponse = requests.get(url, headers = headers)\n\ncontent = json.loads(response.text)\nx = json.dumps(content, ensure_ascii=False,indent =2)\nwith open('got_info.json','w') as f:\n\tf.write(x)\n\n# print(cele_info.split(' '))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# model class for a sale record
from app.models.product import Product
class Sale(Product):
def __init__(self,product_name,quantity,unit_price,attendant,date):
super(Sale, self).__init__(product_name, quantity, unit_price)
self.attendant = attendant
self.date = date
|
normal
|
{
"blob_id": "8ed14bb9af23055f4689e06df872a1d36185cd09",
"index": 6865,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Sale(Product):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Sale(Product):\n\n def __init__(self, product_name, quantity, unit_price, attendant, date):\n super(Sale, self).__init__(product_name, quantity, unit_price)\n self.attendant = attendant\n self.date = date\n",
"step-4": "from app.models.product import Product\n\n\nclass Sale(Product):\n\n def __init__(self, product_name, quantity, unit_price, attendant, date):\n super(Sale, self).__init__(product_name, quantity, unit_price)\n self.attendant = attendant\n self.date = date\n",
"step-5": "# model class for a sale record\nfrom app.models.product import Product\nclass Sale(Product):\n def __init__(self,product_name,quantity,unit_price,attendant,date):\n super(Sale, self).__init__(product_name, quantity, unit_price)\n self.attendant = attendant\n self.date = date",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python3
data = None
with open('./01-data.txt') as f:
data = f.read().splitlines()
ss = {}
s = 0
ss[s] = True
def check(data):
global ss
global s
for line in data:
s += int(line)
if ss.get(s, False):
return s
ss[s] = True
return None
v = check(data)
print('after first pass:', s)
while v is None:
v = check(data)
print('first duplicate:', v)
|
normal
|
{
"blob_id": "7e1dd242c60ee12dfc4130e379fa35ae626a4d63",
"index": 5217,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef check(data):\n global ss\n global s\n for line in data:\n s += int(line)\n if ss.get(s, False):\n return s\n ss[s] = True\n return None\n\n\n<mask token>\n",
"step-3": "<mask token>\nwith open('./01-data.txt') as f:\n data = f.read().splitlines()\n<mask token>\n\n\ndef check(data):\n global ss\n global s\n for line in data:\n s += int(line)\n if ss.get(s, False):\n return s\n ss[s] = True\n return None\n\n\n<mask token>\nprint('after first pass:', s)\nwhile v is None:\n v = check(data)\nprint('first duplicate:', v)\n",
"step-4": "data = None\nwith open('./01-data.txt') as f:\n data = f.read().splitlines()\nss = {}\ns = 0\nss[s] = True\n\n\ndef check(data):\n global ss\n global s\n for line in data:\n s += int(line)\n if ss.get(s, False):\n return s\n ss[s] = True\n return None\n\n\nv = check(data)\nprint('after first pass:', s)\nwhile v is None:\n v = check(data)\nprint('first duplicate:', v)\n",
"step-5": "#!/usr/bin/env python3\n\ndata = None\n\nwith open('./01-data.txt') as f:\n data = f.read().splitlines()\n\nss = {}\ns = 0\nss[s] = True\n\ndef check(data):\n global ss\n global s\n for line in data:\n s += int(line)\n\n if ss.get(s, False):\n return s\n\n ss[s] = True\n return None\n\n\nv = check(data)\nprint('after first pass:', s)\nwhile v is None:\n v = check(data)\nprint('first duplicate:', v)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os
import json
from google.appengine.ext import webapp
from generic import JsonRpcService
class ViewService(JsonRpcService):
def json_create(self):
return "Hello, World!"
|
normal
|
{
"blob_id": "1b091d139635e90fb53b3fecc09bb879514c7b38",
"index": 7352,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass ViewService(JsonRpcService):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass ViewService(JsonRpcService):\n\n def json_create(self):\n return 'Hello, World!'\n",
"step-4": "import os\nimport json\nfrom google.appengine.ext import webapp\nfrom generic import JsonRpcService\n\n\nclass ViewService(JsonRpcService):\n\n def json_create(self):\n return 'Hello, World!'\n",
"step-5": "import os\nimport json\n\nfrom google.appengine.ext import webapp\nfrom generic import JsonRpcService\n\nclass ViewService(JsonRpcService):\n def json_create(self):\n return \"Hello, World!\"\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def talk(text):
engine.say(text)
engine.runAndWait()
def takeCommand():
try:
with sr.Microphone() as sc:
print('Listening......')
vc = listner.listen(sc)
cmd = listner.recognize_google(vc)
cmd = cmd.lower()
if 'alexa' in cmd:
cmd = cmd.replace('alexa', '')
except:
pass
return cmd
def run_alexa():
command = takeCommand()
print(command)
if 'play' in command:
song = command.replace('play', '')
talk('playing ' + song)
pywhatkit.playonyt(song)
if 'time' in command:
time = datetime.datetime.now().strftime('%I:%M %p')
talk('time is ' + time)
print(time)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
engine.setProperty('voice', voices[10].id)
<|reserved_special_token_0|>
engine.setProperty('rate', 150)
def talk(text):
engine.say(text)
engine.runAndWait()
def takeCommand():
try:
with sr.Microphone() as sc:
print('Listening......')
vc = listner.listen(sc)
cmd = listner.recognize_google(vc)
cmd = cmd.lower()
if 'alexa' in cmd:
cmd = cmd.replace('alexa', '')
except:
pass
return cmd
def run_alexa():
command = takeCommand()
print(command)
if 'play' in command:
song = command.replace('play', '')
talk('playing ' + song)
pywhatkit.playonyt(song)
if 'time' in command:
time = datetime.datetime.now().strftime('%I:%M %p')
talk('time is ' + time)
print(time)
run_alexa()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
listner = sr.Recognizer()
engine = pyttsx3.init()
voices = engine.getProperty('voices')
engine.setProperty('voice', voices[10].id)
rate = engine.getProperty('rate')
engine.setProperty('rate', 150)
def talk(text):
engine.say(text)
engine.runAndWait()
def takeCommand():
try:
with sr.Microphone() as sc:
print('Listening......')
vc = listner.listen(sc)
cmd = listner.recognize_google(vc)
cmd = cmd.lower()
if 'alexa' in cmd:
cmd = cmd.replace('alexa', '')
except:
pass
return cmd
def run_alexa():
command = takeCommand()
print(command)
if 'play' in command:
song = command.replace('play', '')
talk('playing ' + song)
pywhatkit.playonyt(song)
if 'time' in command:
time = datetime.datetime.now().strftime('%I:%M %p')
talk('time is ' + time)
print(time)
run_alexa()
<|reserved_special_token_1|>
import speech_recognition as sr
import pyttsx3
import pywhatkit
import datetime
listner = sr.Recognizer()
engine = pyttsx3.init()
voices = engine.getProperty('voices')
engine.setProperty('voice', voices[10].id)
rate = engine.getProperty('rate')
engine.setProperty('rate', 150)
def talk(text):
engine.say(text)
engine.runAndWait()
def takeCommand():
try:
with sr.Microphone() as sc:
print('Listening......')
vc = listner.listen(sc)
cmd = listner.recognize_google(vc)
cmd = cmd.lower()
if 'alexa' in cmd:
cmd = cmd.replace('alexa', '')
except:
pass
return cmd
def run_alexa():
command = takeCommand()
print(command)
if 'play' in command:
song = command.replace('play', '')
talk('playing ' + song)
pywhatkit.playonyt(song)
if 'time' in command:
time = datetime.datetime.now().strftime('%I:%M %p')
talk('time is ' + time)
print(time)
run_alexa()
<|reserved_special_token_1|>
import speech_recognition as sr
import pyttsx3
import pywhatkit
import datetime
listner = sr.Recognizer()
engine = pyttsx3.init()
#change voices
voices = engine.getProperty('voices')
engine.setProperty('voice',voices[10].id)
rate = engine.getProperty('rate')
engine.setProperty('rate', 150)
#for machine to say
def talk(text):
engine.say(text)
engine.runAndWait()
def takeCommand():
try:
with sr.Microphone() as sc:
print("Listening......")
vc = listner.listen(sc)
cmd = listner.recognize_google(vc)
cmd = cmd.lower()
if 'alexa' in cmd:
cmd = cmd.replace('alexa','')
except:
pass
return cmd
def run_alexa():
command = takeCommand()
print(command)
if 'play' in command:
song = command.replace('play','')
talk('playing '+song)
pywhatkit.playonyt(song)
if 'time' in command:
time = datetime.datetime.now().strftime('%I:%M %p')
talk('time is '+time)
print(time)
run_alexa()
|
flexible
|
{
"blob_id": "c4f437e6f5aaeccb6dd0948c3ed1f1d465bb29ce",
"index": 1200,
"step-1": "<mask token>\n\n\ndef talk(text):\n engine.say(text)\n engine.runAndWait()\n\n\ndef takeCommand():\n try:\n with sr.Microphone() as sc:\n print('Listening......')\n vc = listner.listen(sc)\n cmd = listner.recognize_google(vc)\n cmd = cmd.lower()\n if 'alexa' in cmd:\n cmd = cmd.replace('alexa', '')\n except:\n pass\n return cmd\n\n\ndef run_alexa():\n command = takeCommand()\n print(command)\n if 'play' in command:\n song = command.replace('play', '')\n talk('playing ' + song)\n pywhatkit.playonyt(song)\n if 'time' in command:\n time = datetime.datetime.now().strftime('%I:%M %p')\n talk('time is ' + time)\n print(time)\n\n\n<mask token>\n",
"step-2": "<mask token>\nengine.setProperty('voice', voices[10].id)\n<mask token>\nengine.setProperty('rate', 150)\n\n\ndef talk(text):\n engine.say(text)\n engine.runAndWait()\n\n\ndef takeCommand():\n try:\n with sr.Microphone() as sc:\n print('Listening......')\n vc = listner.listen(sc)\n cmd = listner.recognize_google(vc)\n cmd = cmd.lower()\n if 'alexa' in cmd:\n cmd = cmd.replace('alexa', '')\n except:\n pass\n return cmd\n\n\ndef run_alexa():\n command = takeCommand()\n print(command)\n if 'play' in command:\n song = command.replace('play', '')\n talk('playing ' + song)\n pywhatkit.playonyt(song)\n if 'time' in command:\n time = datetime.datetime.now().strftime('%I:%M %p')\n talk('time is ' + time)\n print(time)\n\n\nrun_alexa()\n",
"step-3": "<mask token>\nlistner = sr.Recognizer()\nengine = pyttsx3.init()\nvoices = engine.getProperty('voices')\nengine.setProperty('voice', voices[10].id)\nrate = engine.getProperty('rate')\nengine.setProperty('rate', 150)\n\n\ndef talk(text):\n engine.say(text)\n engine.runAndWait()\n\n\ndef takeCommand():\n try:\n with sr.Microphone() as sc:\n print('Listening......')\n vc = listner.listen(sc)\n cmd = listner.recognize_google(vc)\n cmd = cmd.lower()\n if 'alexa' in cmd:\n cmd = cmd.replace('alexa', '')\n except:\n pass\n return cmd\n\n\ndef run_alexa():\n command = takeCommand()\n print(command)\n if 'play' in command:\n song = command.replace('play', '')\n talk('playing ' + song)\n pywhatkit.playonyt(song)\n if 'time' in command:\n time = datetime.datetime.now().strftime('%I:%M %p')\n talk('time is ' + time)\n print(time)\n\n\nrun_alexa()\n",
"step-4": "import speech_recognition as sr\nimport pyttsx3\nimport pywhatkit\nimport datetime\nlistner = sr.Recognizer()\nengine = pyttsx3.init()\nvoices = engine.getProperty('voices')\nengine.setProperty('voice', voices[10].id)\nrate = engine.getProperty('rate')\nengine.setProperty('rate', 150)\n\n\ndef talk(text):\n engine.say(text)\n engine.runAndWait()\n\n\ndef takeCommand():\n try:\n with sr.Microphone() as sc:\n print('Listening......')\n vc = listner.listen(sc)\n cmd = listner.recognize_google(vc)\n cmd = cmd.lower()\n if 'alexa' in cmd:\n cmd = cmd.replace('alexa', '')\n except:\n pass\n return cmd\n\n\ndef run_alexa():\n command = takeCommand()\n print(command)\n if 'play' in command:\n song = command.replace('play', '')\n talk('playing ' + song)\n pywhatkit.playonyt(song)\n if 'time' in command:\n time = datetime.datetime.now().strftime('%I:%M %p')\n talk('time is ' + time)\n print(time)\n\n\nrun_alexa()\n",
"step-5": "import speech_recognition as sr\nimport pyttsx3\nimport pywhatkit\nimport datetime\n\n\nlistner = sr.Recognizer()\nengine = pyttsx3.init()\n\n#change voices\nvoices = engine.getProperty('voices')\nengine.setProperty('voice',voices[10].id)\nrate = engine.getProperty('rate')\nengine.setProperty('rate', 150)\n\n#for machine to say\ndef talk(text):\n engine.say(text)\n engine.runAndWait()\n\ndef takeCommand():\n try:\n with sr.Microphone() as sc:\n print(\"Listening......\")\n vc = listner.listen(sc)\n cmd = listner.recognize_google(vc)\n cmd = cmd.lower()\n if 'alexa' in cmd:\n cmd = cmd.replace('alexa','')\n except:\n pass\n return cmd\n\ndef run_alexa():\n command = takeCommand()\n print(command)\n if 'play' in command:\n song = command.replace('play','')\n talk('playing '+song)\n pywhatkit.playonyt(song)\n \n if 'time' in command:\n time = datetime.datetime.now().strftime('%I:%M %p')\n talk('time is '+time)\n print(time)\n\nrun_alexa()",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
def GetPrediction(X, regr):
return regr.predict(X)
def GetRMSE(Y, YP):
return sqrt(mean_squared_error(Y, YP))
def SplitFitGKRR(X, Y):
Xt, XT, Yt, YT = cross_validation.train_test_split(X, Y, test_size=0.2)
regr = setBestParameters(len(Xt))
regr.fit(Xt, Yt)
return GetRMSE(YT, GetPrediction(XT, regr))
def setBestParameters(L, ker=0, npts=NPTS, f=GSFOLDS):
if f > L:
f = L - 2
return GridSearchCV(KernelRidge(kernel='rbf'), cv=f, param_grid={
'alpha': np.logspace(-6, 3, npts), 'gamma': np.logspace(-10, 0, npts)})
<|reserved_special_token_0|>
def FWDS(i=50):
X, Y, L = imp.FullImport(1)
T = len(X[0])
BestDesc = []
while len(BestDesc) != 2 and len(BestDesc) < T:
D = np.arange(T)
TestRMS = []
for z in range(T):
TestSet = []
setRMS = []
for n in range(len(BestDesc)):
desc, _ = GODesc(X, BestDesc[n])
TestSet.append(desc)
tryfit = True
if BestDesc.count(z) == 0:
desc, _ = GODesc(X, z)
TestSet.append(desc)
else:
tryfit = False
print(TestSet)
if tryfit == True:
TestSet = np.swapaxes(TestSet, 0, 1)
for n in range(i):
rms = MethodSelect(TestSet, Y)
setRMS.append(rms)
TestRMS.append(np.mean(setRMS))
print('Descr ', z, '', L[0][z] + ' has rms', np.mean(setRMS))
else:
TestRMS.append(10)
print('\n')
TestRMS, D = (list(t) for t in zip(*sorted(zip(TestRMS, D))))
for num in range(len(TestRMS)):
print('Descr ', D[num], '', L[0][D[num]] + ' has rms', TestRMS[num]
)
BestDesc.append(D[t])
print('Desc', D[t], '(' + str(L[0][D[t]]) +
') added as descriptor number', len(BestDesc), '\n\n')
print('Best Descs are:')
for n in range(len(BestDesc) - 1):
print(L[0][BestDesc[n]])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def MethodSelect(TestSet, Y):
return SplitFitGKRR(TestSet, Y)
def GetPrediction(X, regr):
return regr.predict(X)
def GetRMSE(Y, YP):
return sqrt(mean_squared_error(Y, YP))
def SplitFitGKRR(X, Y):
Xt, XT, Yt, YT = cross_validation.train_test_split(X, Y, test_size=0.2)
regr = setBestParameters(len(Xt))
regr.fit(Xt, Yt)
return GetRMSE(YT, GetPrediction(XT, regr))
def setBestParameters(L, ker=0, npts=NPTS, f=GSFOLDS):
if f > L:
f = L - 2
return GridSearchCV(KernelRidge(kernel='rbf'), cv=f, param_grid={
'alpha': np.logspace(-6, 3, npts), 'gamma': np.logspace(-10, 0, npts)})
def GODesc(X, n, label=None):
if label != None:
L = label[0][n]
else:
L = 'None'
X = X[:, n]
return list(X), L
def FWDS(i=50):
X, Y, L = imp.FullImport(1)
T = len(X[0])
BestDesc = []
while len(BestDesc) != 2 and len(BestDesc) < T:
D = np.arange(T)
TestRMS = []
for z in range(T):
TestSet = []
setRMS = []
for n in range(len(BestDesc)):
desc, _ = GODesc(X, BestDesc[n])
TestSet.append(desc)
tryfit = True
if BestDesc.count(z) == 0:
desc, _ = GODesc(X, z)
TestSet.append(desc)
else:
tryfit = False
print(TestSet)
if tryfit == True:
TestSet = np.swapaxes(TestSet, 0, 1)
for n in range(i):
rms = MethodSelect(TestSet, Y)
setRMS.append(rms)
TestRMS.append(np.mean(setRMS))
print('Descr ', z, '', L[0][z] + ' has rms', np.mean(setRMS))
else:
TestRMS.append(10)
print('\n')
TestRMS, D = (list(t) for t in zip(*sorted(zip(TestRMS, D))))
for num in range(len(TestRMS)):
print('Descr ', D[num], '', L[0][D[num]] + ' has rms', TestRMS[num]
)
BestDesc.append(D[t])
print('Desc', D[t], '(' + str(L[0][D[t]]) +
') added as descriptor number', len(BestDesc), '\n\n')
print('Best Descs are:')
for n in range(len(BestDesc) - 1):
print(L[0][BestDesc[n]])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def MethodSelect(TestSet, Y):
return SplitFitGKRR(TestSet, Y)
def GetPrediction(X, regr):
return regr.predict(X)
def GetRMSE(Y, YP):
return sqrt(mean_squared_error(Y, YP))
def SplitFitGKRR(X, Y):
Xt, XT, Yt, YT = cross_validation.train_test_split(X, Y, test_size=0.2)
regr = setBestParameters(len(Xt))
regr.fit(Xt, Yt)
return GetRMSE(YT, GetPrediction(XT, regr))
def setBestParameters(L, ker=0, npts=NPTS, f=GSFOLDS):
if f > L:
f = L - 2
return GridSearchCV(KernelRidge(kernel='rbf'), cv=f, param_grid={
'alpha': np.logspace(-6, 3, npts), 'gamma': np.logspace(-10, 0, npts)})
def GODesc(X, n, label=None):
if label != None:
L = label[0][n]
else:
L = 'None'
X = X[:, n]
return list(X), L
def FWDS(i=50):
X, Y, L = imp.FullImport(1)
T = len(X[0])
BestDesc = []
while len(BestDesc) != 2 and len(BestDesc) < T:
D = np.arange(T)
TestRMS = []
for z in range(T):
TestSet = []
setRMS = []
for n in range(len(BestDesc)):
desc, _ = GODesc(X, BestDesc[n])
TestSet.append(desc)
tryfit = True
if BestDesc.count(z) == 0:
desc, _ = GODesc(X, z)
TestSet.append(desc)
else:
tryfit = False
print(TestSet)
if tryfit == True:
TestSet = np.swapaxes(TestSet, 0, 1)
for n in range(i):
rms = MethodSelect(TestSet, Y)
setRMS.append(rms)
TestRMS.append(np.mean(setRMS))
print('Descr ', z, '', L[0][z] + ' has rms', np.mean(setRMS))
else:
TestRMS.append(10)
print('\n')
TestRMS, D = (list(t) for t in zip(*sorted(zip(TestRMS, D))))
for num in range(len(TestRMS)):
print('Descr ', D[num], '', L[0][D[num]] + ' has rms', TestRMS[num]
)
BestDesc.append(D[t])
print('Desc', D[t], '(' + str(L[0][D[t]]) +
') added as descriptor number', len(BestDesc), '\n\n')
print('Best Descs are:')
for n in range(len(BestDesc) - 1):
print(L[0][BestDesc[n]])
if __name__ == '__main__':
FWS(1)
<|reserved_special_token_1|>
import FitImport as imp
import numpy as np
from math import *
from sklearn.kernel_ridge import KernelRidge
from sklearn.grid_search import GridSearchCV
from sklearn import cross_validation
from sklearn.cross_validation import train_test_split
from sklearn.metrics import mean_squared_error
GSFOLDS = 3
FOLDS = 5
NPTS = 25
def MethodSelect(TestSet, Y):
return SplitFitGKRR(TestSet, Y)
def GetPrediction(X, regr):
return regr.predict(X)
def GetRMSE(Y, YP):
return sqrt(mean_squared_error(Y, YP))
def SplitFitGKRR(X, Y):
Xt, XT, Yt, YT = cross_validation.train_test_split(X, Y, test_size=0.2)
regr = setBestParameters(len(Xt))
regr.fit(Xt, Yt)
return GetRMSE(YT, GetPrediction(XT, regr))
def setBestParameters(L, ker=0, npts=NPTS, f=GSFOLDS):
if f > L:
f = L - 2
return GridSearchCV(KernelRidge(kernel='rbf'), cv=f, param_grid={
'alpha': np.logspace(-6, 3, npts), 'gamma': np.logspace(-10, 0, npts)})
def GODesc(X, n, label=None):
if label != None:
L = label[0][n]
else:
L = 'None'
X = X[:, n]
return list(X), L
def FWDS(i=50):
X, Y, L = imp.FullImport(1)
T = len(X[0])
BestDesc = []
while len(BestDesc) != 2 and len(BestDesc) < T:
D = np.arange(T)
TestRMS = []
for z in range(T):
TestSet = []
setRMS = []
for n in range(len(BestDesc)):
desc, _ = GODesc(X, BestDesc[n])
TestSet.append(desc)
tryfit = True
if BestDesc.count(z) == 0:
desc, _ = GODesc(X, z)
TestSet.append(desc)
else:
tryfit = False
print(TestSet)
if tryfit == True:
TestSet = np.swapaxes(TestSet, 0, 1)
for n in range(i):
rms = MethodSelect(TestSet, Y)
setRMS.append(rms)
TestRMS.append(np.mean(setRMS))
print('Descr ', z, '', L[0][z] + ' has rms', np.mean(setRMS))
else:
TestRMS.append(10)
print('\n')
TestRMS, D = (list(t) for t in zip(*sorted(zip(TestRMS, D))))
for num in range(len(TestRMS)):
print('Descr ', D[num], '', L[0][D[num]] + ' has rms', TestRMS[num]
)
BestDesc.append(D[t])
print('Desc', D[t], '(' + str(L[0][D[t]]) +
') added as descriptor number', len(BestDesc), '\n\n')
print('Best Descs are:')
for n in range(len(BestDesc) - 1):
print(L[0][BestDesc[n]])
if __name__ == '__main__':
FWS(1)
<|reserved_special_token_1|>
import FitImport as imp
import numpy as np
from math import *
from sklearn.kernel_ridge import KernelRidge
from sklearn.grid_search import GridSearchCV
from sklearn import cross_validation
from sklearn.cross_validation import train_test_split
from sklearn.metrics import mean_squared_error
GSFOLDS = 3
FOLDS = 5
NPTS = 25
#Method Select function that allows you tyo use a differnet learning method
# (NN,decision trees, etc) Simply replace GKRR Methods section with functions
#that run your method and have method select return the rms. MethodSelect
#provides the X to test in TestSet and the target values in Y.
def MethodSelect(TestSet,Y):
return SplitFitGKRR(TestSet,Y)
##--------GKRR METHODS---------#
def GetPrediction(X,regr):
return regr.predict(X)
def GetRMSE(Y,YP):
return sqrt(mean_squared_error(Y,YP))
def SplitFitGKRR(X,Y):
Xt,XT,Yt,YT = cross_validation.train_test_split(
X, Y, test_size = 0.2)
regr = setBestParameters(len(Xt))
regr.fit(Xt,Yt)
return GetRMSE(YT,GetPrediction(XT,regr))
def setBestParameters(L,ker=0,npts=NPTS,f=GSFOLDS):
if f > L:
f = L-2
return GridSearchCV(KernelRidge(kernel='rbf'), cv=f,
param_grid={"alpha": np.logspace(-6,3,npts),
"gamma": np.logspace(-10,0,npts)})
#--------Descriptor Methods---------#
#Gets Descriptor from position n and returns it.
#If a label Array from fitimport is passed in as well gets grabs the label that
#corresponds to the descriptor returned
def GODesc(X,n,label=None):
if label != None:
L = label[0][n]
else:
L = "None"
X = X[:,n]
return list(X),L
##---Main-----##
def FWDS(i = 50):
X,Y,L = imp.FullImport(1)
T = len(X[0]) #Number Of Descriptors
BestDesc = [] #Holds Best Descriptors
while ((len(BestDesc) != 2) and len(BestDesc) < T) :
#continue until found 30 Best Descriptors or until no more Descr to add
D = np.arange(T)
TestRMS = []
for z in range(T): #Test Each descriptor
#print("Testing Descr ",z,"",L[0][z])
TestSet = []
setRMS = []
for n in range(len(BestDesc)): #Add already known bests
desc,_ = GODesc(X,BestDesc[n])
TestSet.append(desc)
tryfit = True #add step new descriptors?
if BestDesc.count(z) == 0: #Test if already one of best before try
desc,_ = GODesc(X,z)
TestSet.append(desc)
else: tryfit = False
print(TestSet)
if tryfit == True: #If not best, test with current best
TestSet = np.swapaxes(TestSet,0,1)
for n in range(i):
rms = MethodSelect(TestSet,Y)
setRMS.append(rms)
TestRMS.append(np.mean(setRMS))
print("Descr ",z,"",L[0][z]+" has rms",np.mean(setRMS))
else: #Skip if already a best descriptor
TestRMS.append(10)
##print("Descr ",z,"",L[0][z]+" already added")
#Sorts and prints sorted list by worst Descriptor
print("\n");
TestRMS, D = (list(t) for t in zip(*sorted(zip(TestRMS, D))))
for num in range(len(TestRMS)):
print("Descr ",D[num],"",L[0][D[num]]+" has rms",TestRMS[num])
#Add best Descriptor to list
BestDesc.append(D[t])
print("Desc",D[t],"("+str(L[0][D[t]])+
") added as descriptor number",len(BestDesc),"\n\n")
#When done print best Descriptors in order
print("Best Descs are:")
for n in range(len(BestDesc)-1):
print(L[0][BestDesc[n]])
#FWS take 1 argument which is the number of iterations to test each Descriptor
#each time will be with a different training and testing set.
if __name__ == '__main__':
FWS(1)
|
flexible
|
{
"blob_id": "7d3a33968a375141c1c451ecd531ce8d97906c7f",
"index": 3065,
"step-1": "<mask token>\n\n\ndef GetPrediction(X, regr):\n return regr.predict(X)\n\n\ndef GetRMSE(Y, YP):\n return sqrt(mean_squared_error(Y, YP))\n\n\ndef SplitFitGKRR(X, Y):\n Xt, XT, Yt, YT = cross_validation.train_test_split(X, Y, test_size=0.2)\n regr = setBestParameters(len(Xt))\n regr.fit(Xt, Yt)\n return GetRMSE(YT, GetPrediction(XT, regr))\n\n\ndef setBestParameters(L, ker=0, npts=NPTS, f=GSFOLDS):\n if f > L:\n f = L - 2\n return GridSearchCV(KernelRidge(kernel='rbf'), cv=f, param_grid={\n 'alpha': np.logspace(-6, 3, npts), 'gamma': np.logspace(-10, 0, npts)})\n\n\n<mask token>\n\n\ndef FWDS(i=50):\n X, Y, L = imp.FullImport(1)\n T = len(X[0])\n BestDesc = []\n while len(BestDesc) != 2 and len(BestDesc) < T:\n D = np.arange(T)\n TestRMS = []\n for z in range(T):\n TestSet = []\n setRMS = []\n for n in range(len(BestDesc)):\n desc, _ = GODesc(X, BestDesc[n])\n TestSet.append(desc)\n tryfit = True\n if BestDesc.count(z) == 0:\n desc, _ = GODesc(X, z)\n TestSet.append(desc)\n else:\n tryfit = False\n print(TestSet)\n if tryfit == True:\n TestSet = np.swapaxes(TestSet, 0, 1)\n for n in range(i):\n rms = MethodSelect(TestSet, Y)\n setRMS.append(rms)\n TestRMS.append(np.mean(setRMS))\n print('Descr ', z, '', L[0][z] + ' has rms', np.mean(setRMS))\n else:\n TestRMS.append(10)\n print('\\n')\n TestRMS, D = (list(t) for t in zip(*sorted(zip(TestRMS, D))))\n for num in range(len(TestRMS)):\n print('Descr ', D[num], '', L[0][D[num]] + ' has rms', TestRMS[num]\n )\n BestDesc.append(D[t])\n print('Desc', D[t], '(' + str(L[0][D[t]]) +\n ') added as descriptor number', len(BestDesc), '\\n\\n')\n print('Best Descs are:')\n for n in range(len(BestDesc) - 1):\n print(L[0][BestDesc[n]])\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef MethodSelect(TestSet, Y):\n return SplitFitGKRR(TestSet, Y)\n\n\ndef GetPrediction(X, regr):\n return regr.predict(X)\n\n\ndef GetRMSE(Y, YP):\n return sqrt(mean_squared_error(Y, YP))\n\n\ndef SplitFitGKRR(X, Y):\n Xt, XT, Yt, YT = cross_validation.train_test_split(X, Y, test_size=0.2)\n regr = setBestParameters(len(Xt))\n regr.fit(Xt, Yt)\n return GetRMSE(YT, GetPrediction(XT, regr))\n\n\ndef setBestParameters(L, ker=0, npts=NPTS, f=GSFOLDS):\n if f > L:\n f = L - 2\n return GridSearchCV(KernelRidge(kernel='rbf'), cv=f, param_grid={\n 'alpha': np.logspace(-6, 3, npts), 'gamma': np.logspace(-10, 0, npts)})\n\n\ndef GODesc(X, n, label=None):\n if label != None:\n L = label[0][n]\n else:\n L = 'None'\n X = X[:, n]\n return list(X), L\n\n\ndef FWDS(i=50):\n X, Y, L = imp.FullImport(1)\n T = len(X[0])\n BestDesc = []\n while len(BestDesc) != 2 and len(BestDesc) < T:\n D = np.arange(T)\n TestRMS = []\n for z in range(T):\n TestSet = []\n setRMS = []\n for n in range(len(BestDesc)):\n desc, _ = GODesc(X, BestDesc[n])\n TestSet.append(desc)\n tryfit = True\n if BestDesc.count(z) == 0:\n desc, _ = GODesc(X, z)\n TestSet.append(desc)\n else:\n tryfit = False\n print(TestSet)\n if tryfit == True:\n TestSet = np.swapaxes(TestSet, 0, 1)\n for n in range(i):\n rms = MethodSelect(TestSet, Y)\n setRMS.append(rms)\n TestRMS.append(np.mean(setRMS))\n print('Descr ', z, '', L[0][z] + ' has rms', np.mean(setRMS))\n else:\n TestRMS.append(10)\n print('\\n')\n TestRMS, D = (list(t) for t in zip(*sorted(zip(TestRMS, D))))\n for num in range(len(TestRMS)):\n print('Descr ', D[num], '', L[0][D[num]] + ' has rms', TestRMS[num]\n )\n BestDesc.append(D[t])\n print('Desc', D[t], '(' + str(L[0][D[t]]) +\n ') added as descriptor number', len(BestDesc), '\\n\\n')\n print('Best Descs are:')\n for n in range(len(BestDesc) - 1):\n print(L[0][BestDesc[n]])\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef MethodSelect(TestSet, Y):\n return SplitFitGKRR(TestSet, Y)\n\n\ndef GetPrediction(X, regr):\n return regr.predict(X)\n\n\ndef GetRMSE(Y, YP):\n return sqrt(mean_squared_error(Y, YP))\n\n\ndef SplitFitGKRR(X, Y):\n Xt, XT, Yt, YT = cross_validation.train_test_split(X, Y, test_size=0.2)\n regr = setBestParameters(len(Xt))\n regr.fit(Xt, Yt)\n return GetRMSE(YT, GetPrediction(XT, regr))\n\n\ndef setBestParameters(L, ker=0, npts=NPTS, f=GSFOLDS):\n if f > L:\n f = L - 2\n return GridSearchCV(KernelRidge(kernel='rbf'), cv=f, param_grid={\n 'alpha': np.logspace(-6, 3, npts), 'gamma': np.logspace(-10, 0, npts)})\n\n\ndef GODesc(X, n, label=None):\n if label != None:\n L = label[0][n]\n else:\n L = 'None'\n X = X[:, n]\n return list(X), L\n\n\ndef FWDS(i=50):\n X, Y, L = imp.FullImport(1)\n T = len(X[0])\n BestDesc = []\n while len(BestDesc) != 2 and len(BestDesc) < T:\n D = np.arange(T)\n TestRMS = []\n for z in range(T):\n TestSet = []\n setRMS = []\n for n in range(len(BestDesc)):\n desc, _ = GODesc(X, BestDesc[n])\n TestSet.append(desc)\n tryfit = True\n if BestDesc.count(z) == 0:\n desc, _ = GODesc(X, z)\n TestSet.append(desc)\n else:\n tryfit = False\n print(TestSet)\n if tryfit == True:\n TestSet = np.swapaxes(TestSet, 0, 1)\n for n in range(i):\n rms = MethodSelect(TestSet, Y)\n setRMS.append(rms)\n TestRMS.append(np.mean(setRMS))\n print('Descr ', z, '', L[0][z] + ' has rms', np.mean(setRMS))\n else:\n TestRMS.append(10)\n print('\\n')\n TestRMS, D = (list(t) for t in zip(*sorted(zip(TestRMS, D))))\n for num in range(len(TestRMS)):\n print('Descr ', D[num], '', L[0][D[num]] + ' has rms', TestRMS[num]\n )\n BestDesc.append(D[t])\n print('Desc', D[t], '(' + str(L[0][D[t]]) +\n ') added as descriptor number', len(BestDesc), '\\n\\n')\n print('Best Descs are:')\n for n in range(len(BestDesc) - 1):\n print(L[0][BestDesc[n]])\n\n\nif __name__ == '__main__':\n FWS(1)\n",
"step-4": "import FitImport as imp\nimport numpy as np\nfrom math import *\nfrom sklearn.kernel_ridge import KernelRidge\nfrom sklearn.grid_search import GridSearchCV\nfrom sklearn import cross_validation\nfrom sklearn.cross_validation import train_test_split\nfrom sklearn.metrics import mean_squared_error\nGSFOLDS = 3\nFOLDS = 5\nNPTS = 25\n\n\ndef MethodSelect(TestSet, Y):\n return SplitFitGKRR(TestSet, Y)\n\n\ndef GetPrediction(X, regr):\n return regr.predict(X)\n\n\ndef GetRMSE(Y, YP):\n return sqrt(mean_squared_error(Y, YP))\n\n\ndef SplitFitGKRR(X, Y):\n Xt, XT, Yt, YT = cross_validation.train_test_split(X, Y, test_size=0.2)\n regr = setBestParameters(len(Xt))\n regr.fit(Xt, Yt)\n return GetRMSE(YT, GetPrediction(XT, regr))\n\n\ndef setBestParameters(L, ker=0, npts=NPTS, f=GSFOLDS):\n if f > L:\n f = L - 2\n return GridSearchCV(KernelRidge(kernel='rbf'), cv=f, param_grid={\n 'alpha': np.logspace(-6, 3, npts), 'gamma': np.logspace(-10, 0, npts)})\n\n\ndef GODesc(X, n, label=None):\n if label != None:\n L = label[0][n]\n else:\n L = 'None'\n X = X[:, n]\n return list(X), L\n\n\ndef FWDS(i=50):\n X, Y, L = imp.FullImport(1)\n T = len(X[0])\n BestDesc = []\n while len(BestDesc) != 2 and len(BestDesc) < T:\n D = np.arange(T)\n TestRMS = []\n for z in range(T):\n TestSet = []\n setRMS = []\n for n in range(len(BestDesc)):\n desc, _ = GODesc(X, BestDesc[n])\n TestSet.append(desc)\n tryfit = True\n if BestDesc.count(z) == 0:\n desc, _ = GODesc(X, z)\n TestSet.append(desc)\n else:\n tryfit = False\n print(TestSet)\n if tryfit == True:\n TestSet = np.swapaxes(TestSet, 0, 1)\n for n in range(i):\n rms = MethodSelect(TestSet, Y)\n setRMS.append(rms)\n TestRMS.append(np.mean(setRMS))\n print('Descr ', z, '', L[0][z] + ' has rms', np.mean(setRMS))\n else:\n TestRMS.append(10)\n print('\\n')\n TestRMS, D = (list(t) for t in zip(*sorted(zip(TestRMS, D))))\n for num in range(len(TestRMS)):\n print('Descr ', D[num], '', L[0][D[num]] + ' has rms', TestRMS[num]\n )\n BestDesc.append(D[t])\n print('Desc', D[t], '(' + str(L[0][D[t]]) +\n ') added as descriptor number', len(BestDesc), '\\n\\n')\n print('Best Descs are:')\n for n in range(len(BestDesc) - 1):\n print(L[0][BestDesc[n]])\n\n\nif __name__ == '__main__':\n FWS(1)\n",
"step-5": "import FitImport as imp\nimport numpy as np\nfrom math import *\nfrom sklearn.kernel_ridge import KernelRidge\nfrom sklearn.grid_search import GridSearchCV\nfrom sklearn import cross_validation\nfrom sklearn.cross_validation import train_test_split\nfrom sklearn.metrics import mean_squared_error\n\nGSFOLDS = 3\nFOLDS = 5\nNPTS = 25\n\n\n#Method Select function that allows you tyo use a differnet learning method\n# (NN,decision trees, etc) Simply replace GKRR Methods section with functions\n#that run your method and have method select return the rms. MethodSelect\n#provides the X to test in TestSet and the target values in Y.\n\ndef MethodSelect(TestSet,Y):\n return SplitFitGKRR(TestSet,Y)\n\n##--------GKRR METHODS---------#\n\ndef GetPrediction(X,regr):\n return regr.predict(X)\n\ndef GetRMSE(Y,YP):\n return sqrt(mean_squared_error(Y,YP))\n\ndef SplitFitGKRR(X,Y):\n Xt,XT,Yt,YT = cross_validation.train_test_split(\n X, Y, test_size = 0.2)\n regr = setBestParameters(len(Xt))\n regr.fit(Xt,Yt)\n return GetRMSE(YT,GetPrediction(XT,regr))\n\ndef setBestParameters(L,ker=0,npts=NPTS,f=GSFOLDS):\n if f > L:\n f = L-2\n return GridSearchCV(KernelRidge(kernel='rbf'), cv=f,\n param_grid={\"alpha\": np.logspace(-6,3,npts),\n \"gamma\": np.logspace(-10,0,npts)})\n\n\n#--------Descriptor Methods---------#\n\n#Gets Descriptor from position n and returns it.\n#If a label Array from fitimport is passed in as well gets grabs the label that\n#corresponds to the descriptor returned \n\ndef GODesc(X,n,label=None):\n if label != None:\n L = label[0][n]\n else:\n L = \"None\"\n X = X[:,n] \n return list(X),L\n\n\n\n##---Main-----##\n\ndef FWDS(i = 50):\n X,Y,L = imp.FullImport(1)\n T = len(X[0]) #Number Of Descriptors\n BestDesc = [] #Holds Best Descriptors\n \n while ((len(BestDesc) != 2) and len(BestDesc) < T) :\n #continue until found 30 Best Descriptors or until no more Descr to add\n D = np.arange(T)\n TestRMS = []\n \n for z in range(T): #Test Each descriptor\n #print(\"Testing Descr \",z,\"\",L[0][z])\n TestSet = []\n setRMS = []\n for n in range(len(BestDesc)): #Add already known bests\n desc,_ = GODesc(X,BestDesc[n])\n TestSet.append(desc)\n \n tryfit = True #add step new descriptors?\n if BestDesc.count(z) == 0: #Test if already one of best before try\n desc,_ = GODesc(X,z)\n TestSet.append(desc)\n else: tryfit = False\n\n print(TestSet)\n \n if tryfit == True: #If not best, test with current best\n TestSet = np.swapaxes(TestSet,0,1)\n for n in range(i):\n rms = MethodSelect(TestSet,Y)\n setRMS.append(rms)\n TestRMS.append(np.mean(setRMS))\n print(\"Descr \",z,\"\",L[0][z]+\" has rms\",np.mean(setRMS))\n \n else: #Skip if already a best descriptor\n TestRMS.append(10)\n ##print(\"Descr \",z,\"\",L[0][z]+\" already added\")\n\n #Sorts and prints sorted list by worst Descriptor\n print(\"\\n\");\n TestRMS, D = (list(t) for t in zip(*sorted(zip(TestRMS, D))))\n for num in range(len(TestRMS)):\n print(\"Descr \",D[num],\"\",L[0][D[num]]+\" has rms\",TestRMS[num])\n\n #Add best Descriptor to list\n BestDesc.append(D[t])\n print(\"Desc\",D[t],\"(\"+str(L[0][D[t]])+\n \") added as descriptor number\",len(BestDesc),\"\\n\\n\")\n\n #When done print best Descriptors in order\n print(\"Best Descs are:\")\n for n in range(len(BestDesc)-1):\n print(L[0][BestDesc[n]])\n\n#FWS take 1 argument which is the number of iterations to test each Descriptor\n#each time will be with a different training and testing set.\n\nif __name__ == '__main__':\n FWS(1)\n",
"step-ids": [
5,
7,
8,
10,
11
]
}
|
[
5,
7,
8,
10,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@view_defaults(context=IndividualResource)
class IndividualView(CRUDCommonView):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@view_defaults(context=IndividualResource)
class IndividualView(CRUDCommonView):
@view_config(name='equipment', request_method='GET', renderer='json',
permission='read')
def getEquipment(self):
return self.context.getEquipment()
<|reserved_special_token_1|>
from pyramid.view import view_config, view_defaults
from ecoreleve_server.core.base_view import CRUDCommonView
from .individual_resource import IndividualResource, IndividualsResource, IndividualLocationsResource
@view_defaults(context=IndividualResource)
class IndividualView(CRUDCommonView):
@view_config(name='equipment', request_method='GET', renderer='json',
permission='read')
def getEquipment(self):
return self.context.getEquipment()
|
flexible
|
{
"blob_id": "a3cfd507e30cf232f351fbc66d347aaca99a0447",
"index": 4059,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@view_defaults(context=IndividualResource)\nclass IndividualView(CRUDCommonView):\n <mask token>\n",
"step-3": "<mask token>\n\n\n@view_defaults(context=IndividualResource)\nclass IndividualView(CRUDCommonView):\n\n @view_config(name='equipment', request_method='GET', renderer='json',\n permission='read')\n def getEquipment(self):\n return self.context.getEquipment()\n",
"step-4": "from pyramid.view import view_config, view_defaults\nfrom ecoreleve_server.core.base_view import CRUDCommonView\nfrom .individual_resource import IndividualResource, IndividualsResource, IndividualLocationsResource\n\n\n@view_defaults(context=IndividualResource)\nclass IndividualView(CRUDCommonView):\n\n @view_config(name='equipment', request_method='GET', renderer='json',\n permission='read')\n def getEquipment(self):\n return self.context.getEquipment()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def solution2(n):
return sum([i for i in range(1, n + 1) if n % i == 0])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def solution(n):
answer = []
for i in range(1, n + 1):
if n % i == 0:
answer.append(i)
return sum(answer)
def solution2(n):
return sum([i for i in range(1, n + 1) if n % i == 0])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def solution(n):
answer = []
for i in range(1, n + 1):
if n % i == 0:
answer.append(i)
return sum(answer)
def solution2(n):
return sum([i for i in range(1, n + 1) if n % i == 0])
print(solution(12))
print(solution(5))
print(solution2(12))
print(solution2(5))
<|reserved_special_token_1|>
def solution(n):
answer = []
for i in range(1,n+1):
if n % i == 0:
answer.append(i)
return sum(answer)
def solution2(n):
return sum([i for i in range(1,n+1) if n % i == 0])
print(solution(12))
print(solution(5))
print(solution2(12))
print(solution2(5))
# n return
# 12 28
# 5 6
|
flexible
|
{
"blob_id": "7cfbc36cc6cd6ff7c30f02d979667448f2003546",
"index": 9267,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef solution2(n):\n return sum([i for i in range(1, n + 1) if n % i == 0])\n\n\n<mask token>\n",
"step-3": "def solution(n):\n answer = []\n for i in range(1, n + 1):\n if n % i == 0:\n answer.append(i)\n return sum(answer)\n\n\ndef solution2(n):\n return sum([i for i in range(1, n + 1) if n % i == 0])\n\n\n<mask token>\n",
"step-4": "def solution(n):\n answer = []\n for i in range(1, n + 1):\n if n % i == 0:\n answer.append(i)\n return sum(answer)\n\n\ndef solution2(n):\n return sum([i for i in range(1, n + 1) if n % i == 0])\n\n\nprint(solution(12))\nprint(solution(5))\nprint(solution2(12))\nprint(solution2(5))\n",
"step-5": "def solution(n):\n answer = []\n for i in range(1,n+1):\n if n % i == 0:\n answer.append(i)\n\n return sum(answer)\n\ndef solution2(n):\n return sum([i for i in range(1,n+1) if n % i == 0])\n\nprint(solution(12))\nprint(solution(5))\nprint(solution2(12))\nprint(solution2(5))\n# n\treturn\n# 12\t28\n# 5\t6",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from ShazamAPI import Shazam
import json
import sys
print("oi")
|
normal
|
{
"blob_id": "c248d653556ecdf27e56b57930832eb293dfd579",
"index": 5413,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('oi')\n",
"step-3": "from ShazamAPI import Shazam\nimport json\nimport sys\nprint('oi')\n",
"step-4": "from ShazamAPI import Shazam\nimport json\nimport sys\n\nprint(\"oi\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
@mod.route('/shutdown')
def shutdown():
flash(
'Shutting down.<br>When the LEDs on the board stop flashing, it should be safe to unplug your Raspberry Pi.'
)
subprocess.call(['sudo', 'halt'])
return redirect(url_for('system.index'))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def check_output(*args):
return subprocess.Popen(*args, stdout=subprocess.PIPE).communicate()[0]
<|reserved_special_token_0|>
@mod.route('/')
def index():
uptime = check_output(['uptime'])
return render_template('system/system.html', uptime=uptime)
@mod.route('/shutdown')
def shutdown():
flash(
'Shutting down.<br>When the LEDs on the board stop flashing, it should be safe to unplug your Raspberry Pi.'
)
subprocess.call(['sudo', 'halt'])
return redirect(url_for('system.index'))
@mod.route('/reboot')
def reboot():
flash('Rebooting... please wait.<br>This will take approx. one minute.')
subprocess.call(['sudo', 'reboot'])
return redirect(url_for('system.index'))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def check_output(*args):
return subprocess.Popen(*args, stdout=subprocess.PIPE).communicate()[0]
mod = Blueprint('system', __name__)
@mod.route('/')
def index():
uptime = check_output(['uptime'])
return render_template('system/system.html', uptime=uptime)
@mod.route('/shutdown')
def shutdown():
flash(
'Shutting down.<br>When the LEDs on the board stop flashing, it should be safe to unplug your Raspberry Pi.'
)
subprocess.call(['sudo', 'halt'])
return redirect(url_for('system.index'))
@mod.route('/reboot')
def reboot():
flash('Rebooting... please wait.<br>This will take approx. one minute.')
subprocess.call(['sudo', 'reboot'])
return redirect(url_for('system.index'))
<|reserved_special_token_1|>
from flask import Blueprint, render_template, redirect, url_for, flash
import subprocess
def check_output(*args):
return subprocess.Popen(*args, stdout=subprocess.PIPE).communicate()[0]
mod = Blueprint('system', __name__)
@mod.route('/')
def index():
uptime = check_output(['uptime'])
return render_template('system/system.html', uptime=uptime)
@mod.route('/shutdown')
def shutdown():
flash(
'Shutting down.<br>When the LEDs on the board stop flashing, it should be safe to unplug your Raspberry Pi.'
)
subprocess.call(['sudo', 'halt'])
return redirect(url_for('system.index'))
@mod.route('/reboot')
def reboot():
flash('Rebooting... please wait.<br>This will take approx. one minute.')
subprocess.call(['sudo', 'reboot'])
return redirect(url_for('system.index'))
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
from flask import Blueprint, render_template, redirect, url_for, flash
import subprocess
def check_output(*args):
return subprocess.Popen(*args, stdout=subprocess.PIPE).communicate()[0]
mod = Blueprint('system', __name__)
@mod.route('/')
def index():
uptime = check_output(["uptime"])
return render_template('system/system.html', uptime=uptime)
@mod.route('/shutdown')
def shutdown():
flash("Shutting down.<br>When the LEDs on the board stop flashing, \
it should be safe to unplug your Raspberry Pi.")
subprocess.call(["sudo", "halt"])
return redirect(url_for('system.index'))
@mod.route('/reboot')
def reboot():
flash("Rebooting... please wait.<br>This will take approx. one minute.")
subprocess.call(["sudo", "reboot"])
return redirect(url_for('system.index'))
|
flexible
|
{
"blob_id": "e056a1600b620519e729c597dcec57793284019a",
"index": 1470,
"step-1": "<mask token>\n\n\n@mod.route('/shutdown')\ndef shutdown():\n flash(\n 'Shutting down.<br>When the LEDs on the board stop flashing, it should be safe to unplug your Raspberry Pi.'\n )\n subprocess.call(['sudo', 'halt'])\n return redirect(url_for('system.index'))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef check_output(*args):\n return subprocess.Popen(*args, stdout=subprocess.PIPE).communicate()[0]\n\n\n<mask token>\n\n\n@mod.route('/')\ndef index():\n uptime = check_output(['uptime'])\n return render_template('system/system.html', uptime=uptime)\n\n\n@mod.route('/shutdown')\ndef shutdown():\n flash(\n 'Shutting down.<br>When the LEDs on the board stop flashing, it should be safe to unplug your Raspberry Pi.'\n )\n subprocess.call(['sudo', 'halt'])\n return redirect(url_for('system.index'))\n\n\n@mod.route('/reboot')\ndef reboot():\n flash('Rebooting... please wait.<br>This will take approx. one minute.')\n subprocess.call(['sudo', 'reboot'])\n return redirect(url_for('system.index'))\n",
"step-3": "<mask token>\n\n\ndef check_output(*args):\n return subprocess.Popen(*args, stdout=subprocess.PIPE).communicate()[0]\n\n\nmod = Blueprint('system', __name__)\n\n\n@mod.route('/')\ndef index():\n uptime = check_output(['uptime'])\n return render_template('system/system.html', uptime=uptime)\n\n\n@mod.route('/shutdown')\ndef shutdown():\n flash(\n 'Shutting down.<br>When the LEDs on the board stop flashing, it should be safe to unplug your Raspberry Pi.'\n )\n subprocess.call(['sudo', 'halt'])\n return redirect(url_for('system.index'))\n\n\n@mod.route('/reboot')\ndef reboot():\n flash('Rebooting... please wait.<br>This will take approx. one minute.')\n subprocess.call(['sudo', 'reboot'])\n return redirect(url_for('system.index'))\n",
"step-4": "from flask import Blueprint, render_template, redirect, url_for, flash\nimport subprocess\n\n\ndef check_output(*args):\n return subprocess.Popen(*args, stdout=subprocess.PIPE).communicate()[0]\n\n\nmod = Blueprint('system', __name__)\n\n\n@mod.route('/')\ndef index():\n uptime = check_output(['uptime'])\n return render_template('system/system.html', uptime=uptime)\n\n\n@mod.route('/shutdown')\ndef shutdown():\n flash(\n 'Shutting down.<br>When the LEDs on the board stop flashing, it should be safe to unplug your Raspberry Pi.'\n )\n subprocess.call(['sudo', 'halt'])\n return redirect(url_for('system.index'))\n\n\n@mod.route('/reboot')\ndef reboot():\n flash('Rebooting... please wait.<br>This will take approx. one minute.')\n subprocess.call(['sudo', 'reboot'])\n return redirect(url_for('system.index'))\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom flask import Blueprint, render_template, redirect, url_for, flash\nimport subprocess\n\ndef check_output(*args):\n return subprocess.Popen(*args, stdout=subprocess.PIPE).communicate()[0]\n\nmod = Blueprint('system', __name__)\n\n@mod.route('/')\ndef index():\n uptime = check_output([\"uptime\"])\n return render_template('system/system.html', uptime=uptime)\n\n@mod.route('/shutdown')\ndef shutdown():\n flash(\"Shutting down.<br>When the LEDs on the board stop flashing, \\\n it should be safe to unplug your Raspberry Pi.\")\n subprocess.call([\"sudo\", \"halt\"])\n return redirect(url_for('system.index'))\n\n@mod.route('/reboot')\ndef reboot():\n flash(\"Rebooting... please wait.<br>This will take approx. one minute.\")\n subprocess.call([\"sudo\", \"reboot\"])\n return redirect(url_for('system.index'))\n",
"step-ids": [
1,
4,
5,
6,
7
]
}
|
[
1,
4,
5,
6,
7
] |
from .models import Video, VideoClass
from rest_framework import serializers
# Video 정보
class VideoSerializer(serializers.ModelSerializer):
class Meta:
model = Video
fields = ['videoURL','subTitle', 'numOfLike', 'numOfPlay']
# Video 분류
class VideoClassSerializer(serializers.ModelSerializer):
video_set = serializers.SerializerMethodField()
class Meta:
model = VideoClass
fields = ('title', 'video_set')
def get_video_set(self, instance):
videos = instance.video_set.all()
return VideoSerializer(videos, many=True).data
|
normal
|
{
"blob_id": "b20a8160ba455a39e990b8b37c5017645530ced3",
"index": 1545,
"step-1": "<mask token>\n\n\nclass VideoClassSerializer(serializers.ModelSerializer):\n <mask token>\n\n\n class Meta:\n model = VideoClass\n fields = 'title', 'video_set'\n\n def get_video_set(self, instance):\n videos = instance.video_set.all()\n return VideoSerializer(videos, many=True).data\n",
"step-2": "<mask token>\n\n\nclass VideoClassSerializer(serializers.ModelSerializer):\n video_set = serializers.SerializerMethodField()\n\n\n class Meta:\n model = VideoClass\n fields = 'title', 'video_set'\n\n def get_video_set(self, instance):\n videos = instance.video_set.all()\n return VideoSerializer(videos, many=True).data\n",
"step-3": "<mask token>\n\n\nclass VideoSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Video\n fields = ['videoURL', 'subTitle', 'numOfLike', 'numOfPlay']\n\n\nclass VideoClassSerializer(serializers.ModelSerializer):\n video_set = serializers.SerializerMethodField()\n\n\n class Meta:\n model = VideoClass\n fields = 'title', 'video_set'\n\n def get_video_set(self, instance):\n videos = instance.video_set.all()\n return VideoSerializer(videos, many=True).data\n",
"step-4": "from .models import Video, VideoClass\nfrom rest_framework import serializers\n\n\nclass VideoSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Video\n fields = ['videoURL', 'subTitle', 'numOfLike', 'numOfPlay']\n\n\nclass VideoClassSerializer(serializers.ModelSerializer):\n video_set = serializers.SerializerMethodField()\n\n\n class Meta:\n model = VideoClass\n fields = 'title', 'video_set'\n\n def get_video_set(self, instance):\n videos = instance.video_set.all()\n return VideoSerializer(videos, many=True).data\n",
"step-5": "from .models import Video, VideoClass\nfrom rest_framework import serializers\n\n\n# Video 정보\nclass VideoSerializer(serializers.ModelSerializer): \n class Meta:\n model = Video\n fields = ['videoURL','subTitle', 'numOfLike', 'numOfPlay']\n\n# Video 분류\nclass VideoClassSerializer(serializers.ModelSerializer):\n video_set = serializers.SerializerMethodField()\n\n class Meta:\n model = VideoClass\n fields = ('title', 'video_set')\n\n def get_video_set(self, instance):\n videos = instance.video_set.all()\n return VideoSerializer(videos, many=True).data\n\n\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from typing import Any
from electionguard.ballot import CiphertextAcceptedBallot
from electionguard.decryption import compute_decryption_share_for_ballot
from electionguard.election import CiphertextElectionContext
from electionguard.scheduler import Scheduler
from electionguard.serializable import write_json_object
from fastapi import APIRouter, Body, Depends
from app.core.scheduler import get_scheduler
from ..models import (
convert_guardian,
DecryptBallotSharesRequest,
DecryptBallotSharesResponse,
)
from ..tags import TALLY
router = APIRouter()
@router.post("/decrypt-shares", tags=[TALLY])
def decrypt_ballot_shares(
request: DecryptBallotSharesRequest = Body(...),
scheduler: Scheduler = Depends(get_scheduler),
) -> Any:
"""
Decrypt this guardian's share of one or more ballots
"""
ballots = [
CiphertextAcceptedBallot.from_json_object(ballot)
for ballot in request.encrypted_ballots
]
context = CiphertextElectionContext.from_json_object(request.context)
guardian = convert_guardian(request.guardian)
shares = [
compute_decryption_share_for_ballot(guardian, ballot, context, scheduler)
for ballot in ballots
]
response = DecryptBallotSharesResponse(
shares=[write_json_object(share) for share in shares]
)
return response
|
normal
|
{
"blob_id": "0544c67cb14549e32b6ff8ea3215c6c65c8416ec",
"index": 5542,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@router.post('/decrypt-shares', tags=[TALLY])\ndef decrypt_ballot_shares(request: DecryptBallotSharesRequest=Body(...),\n scheduler: Scheduler=Depends(get_scheduler)) ->Any:\n \"\"\"\n Decrypt this guardian's share of one or more ballots\n \"\"\"\n ballots = [CiphertextAcceptedBallot.from_json_object(ballot) for ballot in\n request.encrypted_ballots]\n context = CiphertextElectionContext.from_json_object(request.context)\n guardian = convert_guardian(request.guardian)\n shares = [compute_decryption_share_for_ballot(guardian, ballot, context,\n scheduler) for ballot in ballots]\n response = DecryptBallotSharesResponse(shares=[write_json_object(share) for\n share in shares])\n return response\n",
"step-3": "<mask token>\nrouter = APIRouter()\n\n\n@router.post('/decrypt-shares', tags=[TALLY])\ndef decrypt_ballot_shares(request: DecryptBallotSharesRequest=Body(...),\n scheduler: Scheduler=Depends(get_scheduler)) ->Any:\n \"\"\"\n Decrypt this guardian's share of one or more ballots\n \"\"\"\n ballots = [CiphertextAcceptedBallot.from_json_object(ballot) for ballot in\n request.encrypted_ballots]\n context = CiphertextElectionContext.from_json_object(request.context)\n guardian = convert_guardian(request.guardian)\n shares = [compute_decryption_share_for_ballot(guardian, ballot, context,\n scheduler) for ballot in ballots]\n response = DecryptBallotSharesResponse(shares=[write_json_object(share) for\n share in shares])\n return response\n",
"step-4": "from typing import Any\nfrom electionguard.ballot import CiphertextAcceptedBallot\nfrom electionguard.decryption import compute_decryption_share_for_ballot\nfrom electionguard.election import CiphertextElectionContext\nfrom electionguard.scheduler import Scheduler\nfrom electionguard.serializable import write_json_object\nfrom fastapi import APIRouter, Body, Depends\nfrom app.core.scheduler import get_scheduler\nfrom ..models import convert_guardian, DecryptBallotSharesRequest, DecryptBallotSharesResponse\nfrom ..tags import TALLY\nrouter = APIRouter()\n\n\n@router.post('/decrypt-shares', tags=[TALLY])\ndef decrypt_ballot_shares(request: DecryptBallotSharesRequest=Body(...),\n scheduler: Scheduler=Depends(get_scheduler)) ->Any:\n \"\"\"\n Decrypt this guardian's share of one or more ballots\n \"\"\"\n ballots = [CiphertextAcceptedBallot.from_json_object(ballot) for ballot in\n request.encrypted_ballots]\n context = CiphertextElectionContext.from_json_object(request.context)\n guardian = convert_guardian(request.guardian)\n shares = [compute_decryption_share_for_ballot(guardian, ballot, context,\n scheduler) for ballot in ballots]\n response = DecryptBallotSharesResponse(shares=[write_json_object(share) for\n share in shares])\n return response\n",
"step-5": "from typing import Any\nfrom electionguard.ballot import CiphertextAcceptedBallot\nfrom electionguard.decryption import compute_decryption_share_for_ballot\nfrom electionguard.election import CiphertextElectionContext\nfrom electionguard.scheduler import Scheduler\nfrom electionguard.serializable import write_json_object\nfrom fastapi import APIRouter, Body, Depends\n\nfrom app.core.scheduler import get_scheduler\nfrom ..models import (\n convert_guardian,\n DecryptBallotSharesRequest,\n DecryptBallotSharesResponse,\n)\nfrom ..tags import TALLY\n\nrouter = APIRouter()\n\n\n@router.post(\"/decrypt-shares\", tags=[TALLY])\ndef decrypt_ballot_shares(\n request: DecryptBallotSharesRequest = Body(...),\n scheduler: Scheduler = Depends(get_scheduler),\n) -> Any:\n \"\"\"\n Decrypt this guardian's share of one or more ballots\n \"\"\"\n ballots = [\n CiphertextAcceptedBallot.from_json_object(ballot)\n for ballot in request.encrypted_ballots\n ]\n context = CiphertextElectionContext.from_json_object(request.context)\n guardian = convert_guardian(request.guardian)\n\n shares = [\n compute_decryption_share_for_ballot(guardian, ballot, context, scheduler)\n for ballot in ballots\n ]\n\n response = DecryptBallotSharesResponse(\n shares=[write_json_object(share) for share in shares]\n )\n\n return response\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
start=0
last=100
middle=50
counter=1
print(" Guess a number between 0 and 100")
condition = int(input("Is your guess " + str(middle) + "? (0 means it's too low, 1 means it's your guess and 2 means it's too high) "))
while condition != 1:
counter += 1
if condition == 0:
last = middle
elif condition == 2:
start = middle
middle=int((start+last)/2)
condition = int(input("Is your guess " + str(middle) + "? (0 means it's too low, 1 means it's your guess and 2 means it's too high) "))
print("It took us {} guesses to get it right! Cheers!".format(counter))
|
normal
|
{
"blob_id": "42d03aabef7d75c813f30bb6d8a835d76fd1fc83",
"index": 603,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(' Guess a number between 0 and 100')\n<mask token>\nwhile condition != 1:\n counter += 1\n if condition == 0:\n last = middle\n elif condition == 2:\n start = middle\n middle = int((start + last) / 2)\n condition = int(input('Is your guess ' + str(middle) +\n \"? (0 means it's too low, 1 means it's your guess and 2 means it's too high) \"\n ))\nprint('It took us {} guesses to get it right! Cheers!'.format(counter))\n",
"step-3": "start = 0\nlast = 100\nmiddle = 50\ncounter = 1\nprint(' Guess a number between 0 and 100')\ncondition = int(input('Is your guess ' + str(middle) +\n \"? (0 means it's too low, 1 means it's your guess and 2 means it's too high) \"\n ))\nwhile condition != 1:\n counter += 1\n if condition == 0:\n last = middle\n elif condition == 2:\n start = middle\n middle = int((start + last) / 2)\n condition = int(input('Is your guess ' + str(middle) +\n \"? (0 means it's too low, 1 means it's your guess and 2 means it's too high) \"\n ))\nprint('It took us {} guesses to get it right! Cheers!'.format(counter))\n",
"step-4": "start=0\nlast=100\nmiddle=50\ncounter=1\n\nprint(\" Guess a number between 0 and 100\")\ncondition = int(input(\"Is your guess \" + str(middle) + \"? (0 means it's too low, 1 means it's your guess and 2 means it's too high) \"))\n\nwhile condition != 1:\n counter += 1\n\n if condition == 0:\n last = middle\n elif condition == 2:\n start = middle\n\n middle=int((start+last)/2)\n condition = int(input(\"Is your guess \" + str(middle) + \"? (0 means it's too low, 1 means it's your guess and 2 means it's too high) \"))\n\nprint(\"It took us {} guesses to get it right! Cheers!\".format(counter))\n\n\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2023 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **standard Python module globals** (i.e., global constants
describing modules and packages bundled with CPython's standard library).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
# ....................{ NAMES }....................
BUILTINS_MODULE_NAME = 'builtins'
'''
Fully-qualified name of the **builtins module** (i.e., objects defined by the
standard :mod:`builtins` module and thus globally available by default
*without* requiring explicit importation).
'''
|
normal
|
{
"blob_id": "a42f36fca2f65d0c5c9b65055af1814d8b4b3d42",
"index": 89,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nBUILTINS_MODULE_NAME = 'builtins'\n<mask token>\n",
"step-3": "#!/usr/bin/env python3\n# --------------------( LICENSE )--------------------\n# Copyright (c) 2014-2023 Beartype authors.\n# See \"LICENSE\" for further details.\n\n'''\nProject-wide **standard Python module globals** (i.e., global constants\ndescribing modules and packages bundled with CPython's standard library).\n\nThis private submodule is *not* intended for importation by downstream callers.\n'''\n\n# ....................{ IMPORTS }....................\n\n# ....................{ NAMES }....................\nBUILTINS_MODULE_NAME = 'builtins'\n'''\nFully-qualified name of the **builtins module** (i.e., objects defined by the\nstandard :mod:`builtins` module and thus globally available by default\n*without* requiring explicit importation).\n'''\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
"""component URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls.static import static
from product.views import product_list_view, component, product_detail_view
from django.conf.urls import url, include
from django.contrib import admin
from django.views.generic import TemplateView
from .views import home_page, login_page, register_page, logout_page
from tracker.views import tracker
urlpatterns = [
url(r'^login/$', login_page, name='login'),
url(r'^logout/$', logout_page, name='logout'),
url(r'^register/$', register_page, name='register'),
url(r'^product/$', product_list_view, name='product'),
url(r'^component/$', component, name='component'),
url(r'^tracker/$', tracker, name='tracker'),
url(r'^cart/', include(('cart.urls', 'cart'), namespace='cart')),
#url(r'^detail/$', product_detail_view, name='detail'),
#url(r'^product/product-(?P<parameter>[\w-]+).html', 'views.product', name="product"),
#url(r'^stores/\w+/',.....)
url(r'^detail/(?P<parameter>[\w-]+)/$', product_detail_view, name='detail'),
url(r'^$', home_page, name='home'),
url(r'^admin/', admin.site.urls),
]
|
normal
|
{
"blob_id": "0de735647cf87f64ab64af081da6e11b0ed8a7a7",
"index": 1173,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [url('^login/$', login_page, name='login'), url('^logout/$',\n logout_page, name='logout'), url('^register/$', register_page, name=\n 'register'), url('^product/$', product_list_view, name='product'), url(\n '^component/$', component, name='component'), url('^tracker/$', tracker,\n name='tracker'), url('^cart/', include(('cart.urls', 'cart'), namespace\n ='cart')), url('^detail/(?P<parameter>[\\\\w-]+)/$', product_detail_view,\n name='detail'), url('^$', home_page, name='home'), url('^admin/', admin\n .site.urls)]\n",
"step-3": "<mask token>\nfrom django.conf import settings\nfrom django.conf.urls.static import static\nfrom product.views import product_list_view, component, product_detail_view\nfrom django.conf.urls import url, include\nfrom django.contrib import admin\nfrom django.views.generic import TemplateView\nfrom .views import home_page, login_page, register_page, logout_page\nfrom tracker.views import tracker\nurlpatterns = [url('^login/$', login_page, name='login'), url('^logout/$',\n logout_page, name='logout'), url('^register/$', register_page, name=\n 'register'), url('^product/$', product_list_view, name='product'), url(\n '^component/$', component, name='component'), url('^tracker/$', tracker,\n name='tracker'), url('^cart/', include(('cart.urls', 'cart'), namespace\n ='cart')), url('^detail/(?P<parameter>[\\\\w-]+)/$', product_detail_view,\n name='detail'), url('^$', home_page, name='home'), url('^admin/', admin\n .site.urls)]\n",
"step-4": "\"\"\"component URL Configuration\r\n\r\nThe `urlpatterns` list routes URLs to views. For more information please see:\r\n https://docs.djangoproject.com/en/1.11/topics/http/urls/\r\nExamples:\r\nFunction views\r\n 1. Add an import: from my_app import views\r\n 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')\r\nClass-based views\r\n 1. Add an import: from other_app.views import Home\r\n 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')\r\nIncluding another URLconf\r\n 1. Import the include() function: from django.conf.urls import url, include\r\n 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))\r\n\"\"\"\r\nfrom django.conf import settings\r\nfrom django.conf.urls.static import static\r\nfrom product.views import product_list_view, component, product_detail_view\r\nfrom django.conf.urls import url, include\r\nfrom django.contrib import admin\r\n\r\nfrom django.views.generic import TemplateView\r\nfrom .views import home_page, login_page, register_page, logout_page\r\nfrom tracker.views import tracker\r\n\r\nurlpatterns = [\r\n url(r'^login/$', login_page, name='login'),\r\n url(r'^logout/$', logout_page, name='logout'),\r\n url(r'^register/$', register_page, name='register'),\r\n url(r'^product/$', product_list_view, name='product'),\r\n\r\n url(r'^component/$', component, name='component'),\r\n url(r'^tracker/$', tracker, name='tracker'),\r\n\r\n url(r'^cart/', include(('cart.urls', 'cart'), namespace='cart')),\r\n #url(r'^detail/$', product_detail_view, name='detail'),\r\n #url(r'^product/product-(?P<parameter>[\\w-]+).html', 'views.product', name=\"product\"),\r\n #url(r'^stores/\\w+/',.....)\r\n url(r'^detail/(?P<parameter>[\\w-]+)/$', product_detail_view, name='detail'),\r\n url(r'^$', home_page, name='home'),\r\n url(r'^admin/', admin.site.urls),\r\n\r\n]\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class MessageUnpacker:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def unpack_json_to_dict(self, incoming_json):
record_as_dict = json.loads(incoming_json)
return record_as_dict
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MessageUnpacker:
<|reserved_special_token_0|>
def unpack_string_to_dict(self, incoming_values):
FIELD_DELIMITER = ';'
fields = ['message_num', 'time_stamp', 'car_id', 'device_id',
'data_type', 'error_flag', 'data']
values = incoming_values.split(FIELD_DELIMITER)
record_as_dict = {}
for f, v in zip(fields, values):
record_as_dict[f] = v
record_as_dict['data'] = record_as_dict['data'].strip('\n')
return record_as_dict
def unpack_json_to_dict(self, incoming_json):
record_as_dict = json.loads(incoming_json)
return record_as_dict
def unpickle_to_dict(self, pickled_message):
record_as_dict = pickle.loads(pickled_message)
return record_as_dict
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MessageUnpacker:
def __init__(self):
print('Generating message unpacker...')
def unpack_string_to_dict(self, incoming_values):
FIELD_DELIMITER = ';'
fields = ['message_num', 'time_stamp', 'car_id', 'device_id',
'data_type', 'error_flag', 'data']
values = incoming_values.split(FIELD_DELIMITER)
record_as_dict = {}
for f, v in zip(fields, values):
record_as_dict[f] = v
record_as_dict['data'] = record_as_dict['data'].strip('\n')
return record_as_dict
def unpack_json_to_dict(self, incoming_json):
record_as_dict = json.loads(incoming_json)
return record_as_dict
def unpickle_to_dict(self, pickled_message):
record_as_dict = pickle.loads(pickled_message)
return record_as_dict
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import pickle
import json
class MessageUnpacker:
def __init__(self):
print('Generating message unpacker...')
def unpack_string_to_dict(self, incoming_values):
FIELD_DELIMITER = ';'
fields = ['message_num', 'time_stamp', 'car_id', 'device_id',
'data_type', 'error_flag', 'data']
values = incoming_values.split(FIELD_DELIMITER)
record_as_dict = {}
for f, v in zip(fields, values):
record_as_dict[f] = v
record_as_dict['data'] = record_as_dict['data'].strip('\n')
return record_as_dict
def unpack_json_to_dict(self, incoming_json):
record_as_dict = json.loads(incoming_json)
return record_as_dict
def unpickle_to_dict(self, pickled_message):
record_as_dict = pickle.loads(pickled_message)
return record_as_dict
<|reserved_special_token_1|>
'''CLASS message_unpacker
Message bodies sent through RabbitMQ may take various forms. They were packed
accordingly by the message_packager.
This class reverses the process. Currently, only implemented for message bodies
represented as strings, but could also handle various image formats in a real use
situation
Encapsulating the "unpacking" aspect into this class makes it easier to extend the
functionality of methods needed for unpacking data as a function of the data types
(e.g. lidar, radar, numeric, GPS) that are packaged by message_packager.
'''
import pickle
import json
class MessageUnpacker():
def __init__(self):
print('Generating message unpacker...')
# Unpacks messages that were packaged as a field-delimited (';') string representation
def unpack_string_to_dict(self, incoming_values):
FIELD_DELIMITER = ';'
fields = ['message_num', 'time_stamp', 'car_id', 'device_id', 'data_type', 'error_flag', 'data']
values = incoming_values.split(FIELD_DELIMITER)
record_as_dict = {}
for f, v in zip(fields, values):
record_as_dict[f] = v
record_as_dict['data'] = record_as_dict['data'].strip('\n') # artifact of message body
return record_as_dict
# Unpacks messages that were packaged as JSON
def unpack_json_to_dict(self, incoming_json):
record_as_dict = json.loads(incoming_json)
return record_as_dict
# Unpacks messages that were pickled
def unpickle_to_dict(self, pickled_message):
record_as_dict = pickle.loads(pickled_message)
return record_as_dict
|
flexible
|
{
"blob_id": "2afc1027c6866e8ab9584a5f7feef4470661f763",
"index": 4246,
"step-1": "<mask token>\n\n\nclass MessageUnpacker:\n <mask token>\n <mask token>\n\n def unpack_json_to_dict(self, incoming_json):\n record_as_dict = json.loads(incoming_json)\n return record_as_dict\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass MessageUnpacker:\n <mask token>\n\n def unpack_string_to_dict(self, incoming_values):\n FIELD_DELIMITER = ';'\n fields = ['message_num', 'time_stamp', 'car_id', 'device_id',\n 'data_type', 'error_flag', 'data']\n values = incoming_values.split(FIELD_DELIMITER)\n record_as_dict = {}\n for f, v in zip(fields, values):\n record_as_dict[f] = v\n record_as_dict['data'] = record_as_dict['data'].strip('\\n')\n return record_as_dict\n\n def unpack_json_to_dict(self, incoming_json):\n record_as_dict = json.loads(incoming_json)\n return record_as_dict\n\n def unpickle_to_dict(self, pickled_message):\n record_as_dict = pickle.loads(pickled_message)\n return record_as_dict\n",
"step-3": "<mask token>\n\n\nclass MessageUnpacker:\n\n def __init__(self):\n print('Generating message unpacker...')\n\n def unpack_string_to_dict(self, incoming_values):\n FIELD_DELIMITER = ';'\n fields = ['message_num', 'time_stamp', 'car_id', 'device_id',\n 'data_type', 'error_flag', 'data']\n values = incoming_values.split(FIELD_DELIMITER)\n record_as_dict = {}\n for f, v in zip(fields, values):\n record_as_dict[f] = v\n record_as_dict['data'] = record_as_dict['data'].strip('\\n')\n return record_as_dict\n\n def unpack_json_to_dict(self, incoming_json):\n record_as_dict = json.loads(incoming_json)\n return record_as_dict\n\n def unpickle_to_dict(self, pickled_message):\n record_as_dict = pickle.loads(pickled_message)\n return record_as_dict\n",
"step-4": "<mask token>\nimport pickle\nimport json\n\n\nclass MessageUnpacker:\n\n def __init__(self):\n print('Generating message unpacker...')\n\n def unpack_string_to_dict(self, incoming_values):\n FIELD_DELIMITER = ';'\n fields = ['message_num', 'time_stamp', 'car_id', 'device_id',\n 'data_type', 'error_flag', 'data']\n values = incoming_values.split(FIELD_DELIMITER)\n record_as_dict = {}\n for f, v in zip(fields, values):\n record_as_dict[f] = v\n record_as_dict['data'] = record_as_dict['data'].strip('\\n')\n return record_as_dict\n\n def unpack_json_to_dict(self, incoming_json):\n record_as_dict = json.loads(incoming_json)\n return record_as_dict\n\n def unpickle_to_dict(self, pickled_message):\n record_as_dict = pickle.loads(pickled_message)\n return record_as_dict\n",
"step-5": "'''CLASS message_unpacker\n\n Message bodies sent through RabbitMQ may take various forms. They were packed\n accordingly by the message_packager.\n\n This class reverses the process. Currently, only implemented for message bodies\n represented as strings, but could also handle various image formats in a real use\n situation\n\n Encapsulating the \"unpacking\" aspect into this class makes it easier to extend the\n functionality of methods needed for unpacking data as a function of the data types \n (e.g. lidar, radar, numeric, GPS) that are packaged by message_packager.\n'''\nimport pickle\nimport json\n\nclass MessageUnpacker():\n\n def __init__(self):\n print('Generating message unpacker...')\n\n # Unpacks messages that were packaged as a field-delimited (';') string representation\n def unpack_string_to_dict(self, incoming_values):\n FIELD_DELIMITER = ';'\n fields = ['message_num', 'time_stamp', 'car_id', 'device_id', 'data_type', 'error_flag', 'data']\n values = incoming_values.split(FIELD_DELIMITER)\n record_as_dict = {}\n\n for f, v in zip(fields, values):\n record_as_dict[f] = v\n record_as_dict['data'] = record_as_dict['data'].strip('\\n') # artifact of message body\n\n return record_as_dict \n\n # Unpacks messages that were packaged as JSON\n def unpack_json_to_dict(self, incoming_json):\n record_as_dict = json.loads(incoming_json)\n return record_as_dict\n\n # Unpacks messages that were pickled\n def unpickle_to_dict(self, pickled_message):\n record_as_dict = pickle.loads(pickled_message)\n return record_as_dict\n\n\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@bp_admin.route('/profile', methods=['GET', 'POST'])
@login_required
def profile_edit():
"""Show user profile edition form."""
form = ProfileForm(obj=current_user)
if form.validate_on_submit():
form.populate_obj(current_user)
try:
correct = True
db.session.commit()
flash(_('Profile updated correctly'), 'success')
return render_template('admin/profile/edit.html', form=form)
except IntegrityError:
correct = False
form.errors.email.append(_('Email is already registered'))
return render_template('admin/profile/edit.html', form=form)
except Exception:
correct = False
flash(_('Failed to update profile, contact an administrator'),
'error')
return render_template('admin/profile/edit.html', form=form)
finally:
if not correct:
db.session.rollback()
return render_template('admin/profile/edit.html', form=form)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@bp_admin.route('/profile', methods=['GET', 'POST'])
@login_required
def profile_edit():
"""Show user profile edition form."""
form = ProfileForm(obj=current_user)
if form.validate_on_submit():
form.populate_obj(current_user)
try:
correct = True
db.session.commit()
flash(_('Profile updated correctly'), 'success')
return render_template('admin/profile/edit.html', form=form)
except IntegrityError:
correct = False
form.errors.email.append(_('Email is already registered'))
return render_template('admin/profile/edit.html', form=form)
except Exception:
correct = False
flash(_('Failed to update profile, contact an administrator'),
'error')
return render_template('admin/profile/edit.html', form=form)
finally:
if not correct:
db.session.rollback()
return render_template('admin/profile/edit.html', form=form)
@bp_admin.route('/profile/change-password', methods=['GET', 'POST'])
@fresh_login_required
def change_password():
"""Show form to update user password.
Requires confirming current password.
"""
form = PasswordResetForm()
if form.validate_on_submit():
current_user.password = crypto_manager.hash(form.password.data)
try:
correct = True
db.session.commit()
flash(_('Password updated correctly'), 'success')
return redirect(url_for('admin.profile_edit'))
except Exception:
correct = False
current_app.logger.exception('Failed to update user password')
flash(_('Error updating password, contact an administrator'),
'error')
return render_template('admin/profile/change_password.html',
form=form)
finally:
if not correct:
db.session.rollback()
return render_template('admin/profile/change_password.html', form=form)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from flask import current_app, flash, redirect, render_template, url_for
from flask_babel import _
from flask_login import current_user, fresh_login_required, login_required
from sqlalchemy.exc import IntegrityError
from akamatsu import crypto_manager, db
from akamatsu.views.admin import bp_admin
from akamatsu.forms import PasswordResetForm, ProfileForm
@bp_admin.route('/profile', methods=['GET', 'POST'])
@login_required
def profile_edit():
"""Show user profile edition form."""
form = ProfileForm(obj=current_user)
if form.validate_on_submit():
form.populate_obj(current_user)
try:
correct = True
db.session.commit()
flash(_('Profile updated correctly'), 'success')
return render_template('admin/profile/edit.html', form=form)
except IntegrityError:
correct = False
form.errors.email.append(_('Email is already registered'))
return render_template('admin/profile/edit.html', form=form)
except Exception:
correct = False
flash(_('Failed to update profile, contact an administrator'),
'error')
return render_template('admin/profile/edit.html', form=form)
finally:
if not correct:
db.session.rollback()
return render_template('admin/profile/edit.html', form=form)
@bp_admin.route('/profile/change-password', methods=['GET', 'POST'])
@fresh_login_required
def change_password():
"""Show form to update user password.
Requires confirming current password.
"""
form = PasswordResetForm()
if form.validate_on_submit():
current_user.password = crypto_manager.hash(form.password.data)
try:
correct = True
db.session.commit()
flash(_('Password updated correctly'), 'success')
return redirect(url_for('admin.profile_edit'))
except Exception:
correct = False
current_app.logger.exception('Failed to update user password')
flash(_('Error updating password, contact an administrator'),
'error')
return render_template('admin/profile/change_password.html',
form=form)
finally:
if not correct:
db.session.rollback()
return render_template('admin/profile/change_password.html', form=form)
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
#
# Akamatsu CMS
# https://github.com/rmed/akamatsu
#
# MIT License
#
# Copyright (c) 2020 Rafael Medina García <rafamedgar@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""This module contains user profile views."""
from flask import current_app, flash, redirect, render_template, url_for
from flask_babel import _
from flask_login import current_user, fresh_login_required, login_required
from sqlalchemy.exc import IntegrityError
from akamatsu import crypto_manager, db
from akamatsu.views.admin import bp_admin
from akamatsu.forms import PasswordResetForm, ProfileForm
@bp_admin.route('/profile', methods=['GET', 'POST'])
@login_required
def profile_edit():
"""Show user profile edition form."""
form = ProfileForm(obj=current_user)
if form.validate_on_submit():
form.populate_obj(current_user)
try:
correct = True
db.session.commit()
flash(_('Profile updated correctly'), 'success')
return render_template('admin/profile/edit.html', form=form)
except IntegrityError:
# Email already exists
correct = False
form.errors.email.append(_('Email is already registered'))
return render_template('admin/profile/edit.html', form=form)
except Exception:
# Catch anything unknown
correct = False
flash(_('Failed to update profile, contact an administrator'), 'error')
return render_template('admin/profile/edit.html', form=form)
finally:
if not correct:
db.session.rollback()
return render_template('admin/profile/edit.html', form=form)
@bp_admin.route('/profile/change-password', methods=['GET', 'POST'])
@fresh_login_required
def change_password():
"""Show form to update user password.
Requires confirming current password.
"""
form = PasswordResetForm()
if form.validate_on_submit():
# Update user
current_user.password = crypto_manager.hash(form.password.data)
try:
correct = True
db.session.commit()
flash(_('Password updated correctly'), 'success')
return redirect(url_for('admin.profile_edit'))
except Exception:
correct = False
current_app.logger.exception('Failed to update user password')
flash(_('Error updating password, contact an administrator'), 'error')
return render_template('admin/profile/change_password.html', form=form)
finally:
if not correct:
db.session.rollback()
return render_template('admin/profile/change_password.html', form=form)
|
flexible
|
{
"blob_id": "cde62c5032109bb22aa81d813e30097dad80a9c3",
"index": 4924,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@bp_admin.route('/profile', methods=['GET', 'POST'])\n@login_required\ndef profile_edit():\n \"\"\"Show user profile edition form.\"\"\"\n form = ProfileForm(obj=current_user)\n if form.validate_on_submit():\n form.populate_obj(current_user)\n try:\n correct = True\n db.session.commit()\n flash(_('Profile updated correctly'), 'success')\n return render_template('admin/profile/edit.html', form=form)\n except IntegrityError:\n correct = False\n form.errors.email.append(_('Email is already registered'))\n return render_template('admin/profile/edit.html', form=form)\n except Exception:\n correct = False\n flash(_('Failed to update profile, contact an administrator'),\n 'error')\n return render_template('admin/profile/edit.html', form=form)\n finally:\n if not correct:\n db.session.rollback()\n return render_template('admin/profile/edit.html', form=form)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@bp_admin.route('/profile', methods=['GET', 'POST'])\n@login_required\ndef profile_edit():\n \"\"\"Show user profile edition form.\"\"\"\n form = ProfileForm(obj=current_user)\n if form.validate_on_submit():\n form.populate_obj(current_user)\n try:\n correct = True\n db.session.commit()\n flash(_('Profile updated correctly'), 'success')\n return render_template('admin/profile/edit.html', form=form)\n except IntegrityError:\n correct = False\n form.errors.email.append(_('Email is already registered'))\n return render_template('admin/profile/edit.html', form=form)\n except Exception:\n correct = False\n flash(_('Failed to update profile, contact an administrator'),\n 'error')\n return render_template('admin/profile/edit.html', form=form)\n finally:\n if not correct:\n db.session.rollback()\n return render_template('admin/profile/edit.html', form=form)\n\n\n@bp_admin.route('/profile/change-password', methods=['GET', 'POST'])\n@fresh_login_required\ndef change_password():\n \"\"\"Show form to update user password.\n\n Requires confirming current password.\n \"\"\"\n form = PasswordResetForm()\n if form.validate_on_submit():\n current_user.password = crypto_manager.hash(form.password.data)\n try:\n correct = True\n db.session.commit()\n flash(_('Password updated correctly'), 'success')\n return redirect(url_for('admin.profile_edit'))\n except Exception:\n correct = False\n current_app.logger.exception('Failed to update user password')\n flash(_('Error updating password, contact an administrator'),\n 'error')\n return render_template('admin/profile/change_password.html',\n form=form)\n finally:\n if not correct:\n db.session.rollback()\n return render_template('admin/profile/change_password.html', form=form)\n",
"step-4": "<mask token>\nfrom flask import current_app, flash, redirect, render_template, url_for\nfrom flask_babel import _\nfrom flask_login import current_user, fresh_login_required, login_required\nfrom sqlalchemy.exc import IntegrityError\nfrom akamatsu import crypto_manager, db\nfrom akamatsu.views.admin import bp_admin\nfrom akamatsu.forms import PasswordResetForm, ProfileForm\n\n\n@bp_admin.route('/profile', methods=['GET', 'POST'])\n@login_required\ndef profile_edit():\n \"\"\"Show user profile edition form.\"\"\"\n form = ProfileForm(obj=current_user)\n if form.validate_on_submit():\n form.populate_obj(current_user)\n try:\n correct = True\n db.session.commit()\n flash(_('Profile updated correctly'), 'success')\n return render_template('admin/profile/edit.html', form=form)\n except IntegrityError:\n correct = False\n form.errors.email.append(_('Email is already registered'))\n return render_template('admin/profile/edit.html', form=form)\n except Exception:\n correct = False\n flash(_('Failed to update profile, contact an administrator'),\n 'error')\n return render_template('admin/profile/edit.html', form=form)\n finally:\n if not correct:\n db.session.rollback()\n return render_template('admin/profile/edit.html', form=form)\n\n\n@bp_admin.route('/profile/change-password', methods=['GET', 'POST'])\n@fresh_login_required\ndef change_password():\n \"\"\"Show form to update user password.\n\n Requires confirming current password.\n \"\"\"\n form = PasswordResetForm()\n if form.validate_on_submit():\n current_user.password = crypto_manager.hash(form.password.data)\n try:\n correct = True\n db.session.commit()\n flash(_('Password updated correctly'), 'success')\n return redirect(url_for('admin.profile_edit'))\n except Exception:\n correct = False\n current_app.logger.exception('Failed to update user password')\n flash(_('Error updating password, contact an administrator'),\n 'error')\n return render_template('admin/profile/change_password.html',\n form=form)\n finally:\n if not correct:\n db.session.rollback()\n return render_template('admin/profile/change_password.html', form=form)\n",
"step-5": "# -*- coding: utf-8 -*-\n#\n# Akamatsu CMS\n# https://github.com/rmed/akamatsu\n#\n# MIT License\n#\n# Copyright (c) 2020 Rafael Medina García <rafamedgar@gmail.com>\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in all\n# copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\n\"\"\"This module contains user profile views.\"\"\"\n\nfrom flask import current_app, flash, redirect, render_template, url_for\nfrom flask_babel import _\nfrom flask_login import current_user, fresh_login_required, login_required\nfrom sqlalchemy.exc import IntegrityError\n\nfrom akamatsu import crypto_manager, db\nfrom akamatsu.views.admin import bp_admin\nfrom akamatsu.forms import PasswordResetForm, ProfileForm\n\n\n@bp_admin.route('/profile', methods=['GET', 'POST'])\n@login_required\ndef profile_edit():\n \"\"\"Show user profile edition form.\"\"\"\n form = ProfileForm(obj=current_user)\n\n if form.validate_on_submit():\n form.populate_obj(current_user)\n\n try:\n correct = True\n db.session.commit()\n\n flash(_('Profile updated correctly'), 'success')\n\n return render_template('admin/profile/edit.html', form=form)\n\n except IntegrityError:\n # Email already exists\n correct = False\n form.errors.email.append(_('Email is already registered'))\n\n return render_template('admin/profile/edit.html', form=form)\n\n except Exception:\n # Catch anything unknown\n correct = False\n\n flash(_('Failed to update profile, contact an administrator'), 'error')\n\n return render_template('admin/profile/edit.html', form=form)\n\n finally:\n if not correct:\n db.session.rollback()\n\n return render_template('admin/profile/edit.html', form=form)\n\n\n@bp_admin.route('/profile/change-password', methods=['GET', 'POST'])\n@fresh_login_required\ndef change_password():\n \"\"\"Show form to update user password.\n\n Requires confirming current password.\n \"\"\"\n form = PasswordResetForm()\n\n if form.validate_on_submit():\n # Update user\n current_user.password = crypto_manager.hash(form.password.data)\n\n try:\n correct = True\n db.session.commit()\n\n flash(_('Password updated correctly'), 'success')\n\n return redirect(url_for('admin.profile_edit'))\n\n except Exception:\n correct = False\n current_app.logger.exception('Failed to update user password')\n\n flash(_('Error updating password, contact an administrator'), 'error')\n\n return render_template('admin/profile/change_password.html', form=form)\n\n finally:\n if not correct:\n db.session.rollback()\n\n return render_template('admin/profile/change_password.html', form=form)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
input_object.close()
<|reserved_special_token_0|>
for line in input_data:
cleaned_data.append(int(line.strip()))
<|reserved_special_token_0|>
for i in range(0, input_size):
for j in range(i, input_size):
for k in range(j, input_size):
if cleaned_data[i] + cleaned_data[j] + cleaned_data[k] == 2020:
ans = cleaned_data[i] * cleaned_data[j] * cleaned_data[k]
print(ans)
break
<|reserved_special_token_1|>
input_object = open('input.txt', 'r')
input_data = input_object.readlines()
input_object.close()
cleaned_data = []
for line in input_data:
cleaned_data.append(int(line.strip()))
input_size = len(cleaned_data)
for i in range(0, input_size):
for j in range(i, input_size):
for k in range(j, input_size):
if cleaned_data[i] + cleaned_data[j] + cleaned_data[k] == 2020:
ans = cleaned_data[i] * cleaned_data[j] * cleaned_data[k]
print(ans)
break
<|reserved_special_token_1|>
input_object = open("input.txt", "r")
input_data = input_object.readlines()
input_object.close()
cleaned_data = []
for line in input_data:
cleaned_data.append(int(line.strip()))
input_size = len(cleaned_data)
for i in range(0, input_size):
for j in range(i, input_size):
for k in range(j, input_size):
if cleaned_data[i] + cleaned_data[j] + cleaned_data[k] == 2020:
ans = cleaned_data[i]*cleaned_data[j]*cleaned_data[k]
print(ans)
break
|
flexible
|
{
"blob_id": "72f3ae476581ff5acd6c7101764f4764285a47bd",
"index": 4426,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ninput_object.close()\n<mask token>\nfor line in input_data:\n cleaned_data.append(int(line.strip()))\n<mask token>\nfor i in range(0, input_size):\n for j in range(i, input_size):\n for k in range(j, input_size):\n if cleaned_data[i] + cleaned_data[j] + cleaned_data[k] == 2020:\n ans = cleaned_data[i] * cleaned_data[j] * cleaned_data[k]\n print(ans)\n break\n",
"step-3": "input_object = open('input.txt', 'r')\ninput_data = input_object.readlines()\ninput_object.close()\ncleaned_data = []\nfor line in input_data:\n cleaned_data.append(int(line.strip()))\ninput_size = len(cleaned_data)\nfor i in range(0, input_size):\n for j in range(i, input_size):\n for k in range(j, input_size):\n if cleaned_data[i] + cleaned_data[j] + cleaned_data[k] == 2020:\n ans = cleaned_data[i] * cleaned_data[j] * cleaned_data[k]\n print(ans)\n break\n",
"step-4": "input_object = open(\"input.txt\", \"r\")\ninput_data = input_object.readlines()\ninput_object.close()\ncleaned_data = []\n\nfor line in input_data:\n cleaned_data.append(int(line.strip()))\ninput_size = len(cleaned_data)\n\n\nfor i in range(0, input_size):\n for j in range(i, input_size):\n for k in range(j, input_size):\n if cleaned_data[i] + cleaned_data[j] + cleaned_data[k] == 2020:\n ans = cleaned_data[i]*cleaned_data[j]*cleaned_data[k]\n print(ans)\n break",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.http import response
from django.shortcuts import render
from rest_framework.views import APIView
from rest_framework.response import Response
from .models import User
from .serializers import UserSerializer,UserCreationSerialier,UserEditionSerializer
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
class Users(APIView):
# permission_classes = [IsAuthenticated]
def get(self,request):
users = User.objects.filter(is_removed=False)
serialized_users = UserSerializer(instance=users,many=True)
return Response(serialized_users.data,status=status.HTTP_200_OK)
class UserDetail(APIView):
def get(self,request,pk):
user = User.objects.filter(pk=pk,is_removed=False).first()
if user is None:
return Response({'error':'User Does Not Exists','success':False},status=status.HTTP_422_UNPROCESSABLE_ENTITY)
serailized_data = UserSerializer(instance=user)
return Response(serailized_data.data,status=status.HTTP_200_OK)
class CreateUser(APIView):
def post(self,request):
serialized_data = UserCreationSerialier(data=request.data)
if serialized_data.is_valid():
data = serialized_data.validated_data
user = User.objects.filter(email=data['email'],is_removed=False).first()
if user is not None:
return Response({'error':'This email is Already Taken!','success':False},status=status.HTTP_400_BAD_REQUEST)
user = User(email=data['email'],full_name=data['full_name'])
user.set_password(data['password'])
user.save()
serialized_user = UserSerializer(instance=user)
return Response(serialized_user.data,status=status.HTTP_201_CREATED)
return Response(serialized_data.errors,status=status.HTTP_400_BAD_REQUEST)
class EditUser(APIView):
def put(self,request,pk):
user = User.objects.filter(pk=pk,is_removed=False).first()
if user is None:
return Response({'error':'User Does Not Exists','success':False},status=status.HTTP_422_UNPROCESSABLE_ENTITY)
serialized_user = UserEditionSerializer(data=request.data,instance=user)
if serialized_user.is_valid():
user = serialized_user.save()
return Response(UserSerializer(instance=user).data,status=status.HTTP_202_ACCEPTED)
return Response(serialized_user.errors,status=status.HTTP_400_BAD_REQUEST)
class RemoveUser(APIView):
def delete(self,request,pk):
user = User.objects.filter(pk=pk,is_removed=False).first()
if user is None:
return Response({'error':'User Does Not Exists','success':False},status=status.HTTP_422_UNPROCESSABLE_ENTITY)
user.is_removed = True
user.save()
return Response(status=status.HTTP_204_NO_CONTENT)
class GetUserFromToken(APIView):
permission_classes = [IsAuthenticated]
def get(self,request):
user = request.user
serialized_user = UserSerializer(instance=user)
return Response(serialized_user.data)
|
normal
|
{
"blob_id": "dff454cbde985a08b34377b80dd8e3b22f1cc13a",
"index": 3948,
"step-1": "<mask token>\n\n\nclass CreateUser(APIView):\n <mask token>\n\n\nclass EditUser(APIView):\n\n def put(self, request, pk):\n user = User.objects.filter(pk=pk, is_removed=False).first()\n if user is None:\n return Response({'error': 'User Does Not Exists', 'success': \n False}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)\n serialized_user = UserEditionSerializer(data=request.data, instance\n =user)\n if serialized_user.is_valid():\n user = serialized_user.save()\n return Response(UserSerializer(instance=user).data, status=\n status.HTTP_202_ACCEPTED)\n return Response(serialized_user.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass RemoveUser(APIView):\n\n def delete(self, request, pk):\n user = User.objects.filter(pk=pk, is_removed=False).first()\n if user is None:\n return Response({'error': 'User Does Not Exists', 'success': \n False}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)\n user.is_removed = True\n user.save()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\nclass GetUserFromToken(APIView):\n permission_classes = [IsAuthenticated]\n\n def get(self, request):\n user = request.user\n serialized_user = UserSerializer(instance=user)\n return Response(serialized_user.data)\n",
"step-2": "<mask token>\n\n\nclass UserDetail(APIView):\n <mask token>\n\n\nclass CreateUser(APIView):\n\n def post(self, request):\n serialized_data = UserCreationSerialier(data=request.data)\n if serialized_data.is_valid():\n data = serialized_data.validated_data\n user = User.objects.filter(email=data['email'], is_removed=False\n ).first()\n if user is not None:\n return Response({'error': 'This email is Already Taken!',\n 'success': False}, status=status.HTTP_400_BAD_REQUEST)\n user = User(email=data['email'], full_name=data['full_name'])\n user.set_password(data['password'])\n user.save()\n serialized_user = UserSerializer(instance=user)\n return Response(serialized_user.data, status=status.\n HTTP_201_CREATED)\n return Response(serialized_data.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass EditUser(APIView):\n\n def put(self, request, pk):\n user = User.objects.filter(pk=pk, is_removed=False).first()\n if user is None:\n return Response({'error': 'User Does Not Exists', 'success': \n False}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)\n serialized_user = UserEditionSerializer(data=request.data, instance\n =user)\n if serialized_user.is_valid():\n user = serialized_user.save()\n return Response(UserSerializer(instance=user).data, status=\n status.HTTP_202_ACCEPTED)\n return Response(serialized_user.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass RemoveUser(APIView):\n\n def delete(self, request, pk):\n user = User.objects.filter(pk=pk, is_removed=False).first()\n if user is None:\n return Response({'error': 'User Does Not Exists', 'success': \n False}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)\n user.is_removed = True\n user.save()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\nclass GetUserFromToken(APIView):\n permission_classes = [IsAuthenticated]\n\n def get(self, request):\n user = request.user\n serialized_user = UserSerializer(instance=user)\n return Response(serialized_user.data)\n",
"step-3": "<mask token>\n\n\nclass UserDetail(APIView):\n\n def get(self, request, pk):\n user = User.objects.filter(pk=pk, is_removed=False).first()\n if user is None:\n return Response({'error': 'User Does Not Exists', 'success': \n False}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)\n serailized_data = UserSerializer(instance=user)\n return Response(serailized_data.data, status=status.HTTP_200_OK)\n\n\nclass CreateUser(APIView):\n\n def post(self, request):\n serialized_data = UserCreationSerialier(data=request.data)\n if serialized_data.is_valid():\n data = serialized_data.validated_data\n user = User.objects.filter(email=data['email'], is_removed=False\n ).first()\n if user is not None:\n return Response({'error': 'This email is Already Taken!',\n 'success': False}, status=status.HTTP_400_BAD_REQUEST)\n user = User(email=data['email'], full_name=data['full_name'])\n user.set_password(data['password'])\n user.save()\n serialized_user = UserSerializer(instance=user)\n return Response(serialized_user.data, status=status.\n HTTP_201_CREATED)\n return Response(serialized_data.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass EditUser(APIView):\n\n def put(self, request, pk):\n user = User.objects.filter(pk=pk, is_removed=False).first()\n if user is None:\n return Response({'error': 'User Does Not Exists', 'success': \n False}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)\n serialized_user = UserEditionSerializer(data=request.data, instance\n =user)\n if serialized_user.is_valid():\n user = serialized_user.save()\n return Response(UserSerializer(instance=user).data, status=\n status.HTTP_202_ACCEPTED)\n return Response(serialized_user.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass RemoveUser(APIView):\n\n def delete(self, request, pk):\n user = User.objects.filter(pk=pk, is_removed=False).first()\n if user is None:\n return Response({'error': 'User Does Not Exists', 'success': \n False}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)\n user.is_removed = True\n user.save()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\nclass GetUserFromToken(APIView):\n permission_classes = [IsAuthenticated]\n\n def get(self, request):\n user = request.user\n serialized_user = UserSerializer(instance=user)\n return Response(serialized_user.data)\n",
"step-4": "<mask token>\n\n\nclass Users(APIView):\n\n def get(self, request):\n users = User.objects.filter(is_removed=False)\n serialized_users = UserSerializer(instance=users, many=True)\n return Response(serialized_users.data, status=status.HTTP_200_OK)\n\n\nclass UserDetail(APIView):\n\n def get(self, request, pk):\n user = User.objects.filter(pk=pk, is_removed=False).first()\n if user is None:\n return Response({'error': 'User Does Not Exists', 'success': \n False}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)\n serailized_data = UserSerializer(instance=user)\n return Response(serailized_data.data, status=status.HTTP_200_OK)\n\n\nclass CreateUser(APIView):\n\n def post(self, request):\n serialized_data = UserCreationSerialier(data=request.data)\n if serialized_data.is_valid():\n data = serialized_data.validated_data\n user = User.objects.filter(email=data['email'], is_removed=False\n ).first()\n if user is not None:\n return Response({'error': 'This email is Already Taken!',\n 'success': False}, status=status.HTTP_400_BAD_REQUEST)\n user = User(email=data['email'], full_name=data['full_name'])\n user.set_password(data['password'])\n user.save()\n serialized_user = UserSerializer(instance=user)\n return Response(serialized_user.data, status=status.\n HTTP_201_CREATED)\n return Response(serialized_data.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass EditUser(APIView):\n\n def put(self, request, pk):\n user = User.objects.filter(pk=pk, is_removed=False).first()\n if user is None:\n return Response({'error': 'User Does Not Exists', 'success': \n False}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)\n serialized_user = UserEditionSerializer(data=request.data, instance\n =user)\n if serialized_user.is_valid():\n user = serialized_user.save()\n return Response(UserSerializer(instance=user).data, status=\n status.HTTP_202_ACCEPTED)\n return Response(serialized_user.errors, status=status.\n HTTP_400_BAD_REQUEST)\n\n\nclass RemoveUser(APIView):\n\n def delete(self, request, pk):\n user = User.objects.filter(pk=pk, is_removed=False).first()\n if user is None:\n return Response({'error': 'User Does Not Exists', 'success': \n False}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)\n user.is_removed = True\n user.save()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\nclass GetUserFromToken(APIView):\n permission_classes = [IsAuthenticated]\n\n def get(self, request):\n user = request.user\n serialized_user = UserSerializer(instance=user)\n return Response(serialized_user.data)\n",
"step-5": "from django.http import response\nfrom django.shortcuts import render\nfrom rest_framework.views import APIView\nfrom rest_framework.response import Response\nfrom .models import User\nfrom .serializers import UserSerializer,UserCreationSerialier,UserEditionSerializer\nfrom rest_framework import status\nfrom rest_framework.permissions import IsAuthenticated\n\nclass Users(APIView):\n # permission_classes = [IsAuthenticated]\n\n def get(self,request):\n users = User.objects.filter(is_removed=False)\n serialized_users = UserSerializer(instance=users,many=True)\n return Response(serialized_users.data,status=status.HTTP_200_OK)\n \n\nclass UserDetail(APIView):\n\n def get(self,request,pk):\n user = User.objects.filter(pk=pk,is_removed=False).first()\n if user is None:\n return Response({'error':'User Does Not Exists','success':False},status=status.HTTP_422_UNPROCESSABLE_ENTITY)\n serailized_data = UserSerializer(instance=user)\n return Response(serailized_data.data,status=status.HTTP_200_OK)\n\n\nclass CreateUser(APIView):\n \n def post(self,request):\n serialized_data = UserCreationSerialier(data=request.data)\n if serialized_data.is_valid():\n data = serialized_data.validated_data\n user = User.objects.filter(email=data['email'],is_removed=False).first()\n if user is not None:\n return Response({'error':'This email is Already Taken!','success':False},status=status.HTTP_400_BAD_REQUEST)\n \n user = User(email=data['email'],full_name=data['full_name'])\n user.set_password(data['password'])\n user.save()\n serialized_user = UserSerializer(instance=user)\n return Response(serialized_user.data,status=status.HTTP_201_CREATED)\n return Response(serialized_data.errors,status=status.HTTP_400_BAD_REQUEST)\n\n\nclass EditUser(APIView):\n \n def put(self,request,pk):\n user = User.objects.filter(pk=pk,is_removed=False).first()\n if user is None:\n return Response({'error':'User Does Not Exists','success':False},status=status.HTTP_422_UNPROCESSABLE_ENTITY)\n serialized_user = UserEditionSerializer(data=request.data,instance=user)\n if serialized_user.is_valid():\n user = serialized_user.save()\n return Response(UserSerializer(instance=user).data,status=status.HTTP_202_ACCEPTED)\n return Response(serialized_user.errors,status=status.HTTP_400_BAD_REQUEST)\n\n\nclass RemoveUser(APIView):\n\n def delete(self,request,pk):\n user = User.objects.filter(pk=pk,is_removed=False).first()\n if user is None:\n return Response({'error':'User Does Not Exists','success':False},status=status.HTTP_422_UNPROCESSABLE_ENTITY)\n user.is_removed = True\n user.save()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\n\nclass GetUserFromToken(APIView):\n permission_classes = [IsAuthenticated]\n \n def get(self,request):\n user = request.user\n serialized_user = UserSerializer(instance=user)\n return Response(serialized_user.data)",
"step-ids": [
8,
10,
11,
13,
15
]
}
|
[
8,
10,
11,
13,
15
] |
ii = [('CookGHP3.py', 1), ('AubePRP2.py', 1), ('WilkJMC3.py', 1), (
'LeakWTI3.py', 1), ('AubePRP.py', 2), ('GellWPT.py', 2), ('AdamWEP.py',
1), ('KiddJAE.py', 1), ('CoolWHM.py', 1), ('WadeJEB.py', 1), (
'SoutRD.py', 2), ('WheeJPT.py', 1), ('HowiWRL2.py', 1), ('WilkJMC.py',
1), ('WestJIT.py', 1), ('DequTKM.py', 2), ('StorJCC.py', 1), (
'DibdTRL.py', 1), ('TaylIF.py', 1), ('ThomWEC.py', 1)]
|
normal
|
{
"blob_id": "dce496c9ae6605e95ffbbb2885ec15b19fb756ef",
"index": 2799,
"step-1": "<mask token>\n",
"step-2": "ii = [('CookGHP3.py', 1), ('AubePRP2.py', 1), ('WilkJMC3.py', 1), (\n 'LeakWTI3.py', 1), ('AubePRP.py', 2), ('GellWPT.py', 2), ('AdamWEP.py',\n 1), ('KiddJAE.py', 1), ('CoolWHM.py', 1), ('WadeJEB.py', 1), (\n 'SoutRD.py', 2), ('WheeJPT.py', 1), ('HowiWRL2.py', 1), ('WilkJMC.py', \n 1), ('WestJIT.py', 1), ('DequTKM.py', 2), ('StorJCC.py', 1), (\n 'DibdTRL.py', 1), ('TaylIF.py', 1), ('ThomWEC.py', 1)]\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
from django.db import models
from datetime import datetime
class Folder(models.Model):
folder = models.CharField(max_length=200, default = "misc")
num_of_entries = models.IntegerField(default=0)
def __str__(self):
return self.folder
class Meta:
verbose_name_plural = "Folders/Categories"
class Bookmark(models.Model):
name = models.CharField(max_length=200)
url = models.CharField(max_length=400)
folder = models.ForeignKey(Folder, on_delete=models.CASCADE)
date_of_creation = models.DateTimeField(default=datetime.now())
notes = models.TextField()
def __str__(self):
return self.name
|
normal
|
{
"blob_id": "ca3cdbd5d5d30be4f40925366994c3ea9d9b9614",
"index": 3195,
"step-1": "<mask token>\n\n\nclass Folder(models.Model):\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n verbose_name_plural = 'Folders/Categories'\n\n\nclass Bookmark(models.Model):\n name = models.CharField(max_length=200)\n url = models.CharField(max_length=400)\n folder = models.ForeignKey(Folder, on_delete=models.CASCADE)\n date_of_creation = models.DateTimeField(default=datetime.now())\n notes = models.TextField()\n\n def __str__(self):\n return self.name\n",
"step-2": "<mask token>\n\n\nclass Folder(models.Model):\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.folder\n\n\n class Meta:\n verbose_name_plural = 'Folders/Categories'\n\n\nclass Bookmark(models.Model):\n name = models.CharField(max_length=200)\n url = models.CharField(max_length=400)\n folder = models.ForeignKey(Folder, on_delete=models.CASCADE)\n date_of_creation = models.DateTimeField(default=datetime.now())\n notes = models.TextField()\n\n def __str__(self):\n return self.name\n",
"step-3": "<mask token>\n\n\nclass Folder(models.Model):\n folder = models.CharField(max_length=200, default='misc')\n num_of_entries = models.IntegerField(default=0)\n\n def __str__(self):\n return self.folder\n\n\n class Meta:\n verbose_name_plural = 'Folders/Categories'\n\n\nclass Bookmark(models.Model):\n name = models.CharField(max_length=200)\n url = models.CharField(max_length=400)\n folder = models.ForeignKey(Folder, on_delete=models.CASCADE)\n date_of_creation = models.DateTimeField(default=datetime.now())\n notes = models.TextField()\n\n def __str__(self):\n return self.name\n",
"step-4": "from django.db import models\nfrom datetime import datetime\n\n\nclass Folder(models.Model):\n folder = models.CharField(max_length=200, default='misc')\n num_of_entries = models.IntegerField(default=0)\n\n def __str__(self):\n return self.folder\n\n\n class Meta:\n verbose_name_plural = 'Folders/Categories'\n\n\nclass Bookmark(models.Model):\n name = models.CharField(max_length=200)\n url = models.CharField(max_length=400)\n folder = models.ForeignKey(Folder, on_delete=models.CASCADE)\n date_of_creation = models.DateTimeField(default=datetime.now())\n notes = models.TextField()\n\n def __str__(self):\n return self.name\n",
"step-5": "from django.db import models\nfrom datetime import datetime\n\nclass Folder(models.Model):\n\tfolder = models.CharField(max_length=200, default = \"misc\")\n\tnum_of_entries = models.IntegerField(default=0)\n\n\tdef __str__(self):\n\t\treturn self.folder\n\n\tclass Meta:\n\t\tverbose_name_plural = \"Folders/Categories\"\n\nclass Bookmark(models.Model):\n\tname = models.CharField(max_length=200)\n\turl = models.CharField(max_length=400)\n\tfolder = models.ForeignKey(Folder, on_delete=models.CASCADE)\n\tdate_of_creation = models.DateTimeField(default=datetime.now())\n\tnotes = models.TextField()\n\n\tdef __str__(self):\n\t\treturn self.name\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for i in range(N):
a = A[i]
if a < 400:
C[0] += 1
elif a < 800:
C[1] += 1
elif a < 1200:
C[2] += 1
elif a < 1600:
C[3] += 1
elif a < 2000:
C[4] += 1
elif a < 2400:
C[5] += 1
elif a < 2800:
C[6] += 1
elif a < 3200:
C[7] += 1
else:
C[8] += 1
<|reserved_special_token_0|>
for i in range(8):
if C[i] > 0:
cmin += 1
if cmin == 0:
cmin = 1
cmax = C[8]
else:
cmax = cmin + C[8]
print(cmin, cmax)
<|reserved_special_token_1|>
C = {i: (0) for i in range(9)}
N = int(input())
A = list(map(int, input().split()))
for i in range(N):
a = A[i]
if a < 400:
C[0] += 1
elif a < 800:
C[1] += 1
elif a < 1200:
C[2] += 1
elif a < 1600:
C[3] += 1
elif a < 2000:
C[4] += 1
elif a < 2400:
C[5] += 1
elif a < 2800:
C[6] += 1
elif a < 3200:
C[7] += 1
else:
C[8] += 1
cmin = 0
for i in range(8):
if C[i] > 0:
cmin += 1
if cmin == 0:
cmin = 1
cmax = C[8]
else:
cmax = cmin + C[8]
print(cmin, cmax)
<|reserved_special_token_1|>
C = {i:0 for i in range(9)}
N = int(input())
A = list(map(int,input().split()))
for i in range(N):
a = A[i]
if a<400:
C[0] += 1
elif a<800:
C[1] += 1
elif a<1200:
C[2] += 1
elif a<1600:
C[3] += 1
elif a<2000:
C[4] += 1
elif a<2400:
C[5] += 1
elif a<2800:
C[6] += 1
elif a<3200:
C[7] += 1
else:
C[8] += 1
cmin = 0
for i in range(8):
if C[i]>0:
cmin += 1
if cmin==0:
cmin = 1
cmax = C[8]
else:
cmax = cmin+C[8]
print(cmin,cmax)
|
flexible
|
{
"blob_id": "a1ca6c258298feda99b568f236611c1c496e3262",
"index": 8993,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(N):\n a = A[i]\n if a < 400:\n C[0] += 1\n elif a < 800:\n C[1] += 1\n elif a < 1200:\n C[2] += 1\n elif a < 1600:\n C[3] += 1\n elif a < 2000:\n C[4] += 1\n elif a < 2400:\n C[5] += 1\n elif a < 2800:\n C[6] += 1\n elif a < 3200:\n C[7] += 1\n else:\n C[8] += 1\n<mask token>\nfor i in range(8):\n if C[i] > 0:\n cmin += 1\nif cmin == 0:\n cmin = 1\n cmax = C[8]\nelse:\n cmax = cmin + C[8]\nprint(cmin, cmax)\n",
"step-3": "C = {i: (0) for i in range(9)}\nN = int(input())\nA = list(map(int, input().split()))\nfor i in range(N):\n a = A[i]\n if a < 400:\n C[0] += 1\n elif a < 800:\n C[1] += 1\n elif a < 1200:\n C[2] += 1\n elif a < 1600:\n C[3] += 1\n elif a < 2000:\n C[4] += 1\n elif a < 2400:\n C[5] += 1\n elif a < 2800:\n C[6] += 1\n elif a < 3200:\n C[7] += 1\n else:\n C[8] += 1\ncmin = 0\nfor i in range(8):\n if C[i] > 0:\n cmin += 1\nif cmin == 0:\n cmin = 1\n cmax = C[8]\nelse:\n cmax = cmin + C[8]\nprint(cmin, cmax)\n",
"step-4": "C = {i:0 for i in range(9)}\nN = int(input())\nA = list(map(int,input().split()))\nfor i in range(N):\n a = A[i]\n if a<400:\n C[0] += 1\n elif a<800:\n C[1] += 1\n elif a<1200:\n C[2] += 1\n elif a<1600:\n C[3] += 1\n elif a<2000:\n C[4] += 1\n elif a<2400:\n C[5] += 1\n elif a<2800:\n C[6] += 1\n elif a<3200:\n C[7] += 1\n else:\n C[8] += 1\ncmin = 0\nfor i in range(8):\n if C[i]>0:\n cmin += 1\nif cmin==0:\n cmin = 1\n cmax = C[8]\nelse:\n cmax = cmin+C[8]\nprint(cmin,cmax)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This script reads in video information frame-by-frame, and then calculates
visual edge information for each frame, storing the information in a vector.
This can be averaged within TRs in an fMRI analysis to 'regress out'
high-frequency visual information in the video.
@author: zreagh
"""
import cv2
import numpy as np
# Can uncomment this pyplot import for frame plotting - see below
#from matplotlib import pyplot as plt
# Define the paths to your video file and eventual JPEG image files
vidpath = '/Users/zreagh/Desktop/edge_vector_analysis/test.mov'
imgpath = '/Users/zreagh/Desktop/edge_vector_analysis/'
edge_outfile = open('edge_outfile.csv','w')
edge_outfile.write('frame,prop_edge_pix\n')
# Function to extract video info including frames
def AnalyzeFrames(vidpath):
print("\nGetting video info & writing out image files for each frame...\n")
# Path to video file
vidObj = cv2.VideoCapture(vidpath)
# Get FPS
fps = vidObj.get(cv2.CAP_PROP_FPS)
print("Frames per second: {0}\n".format(fps))
# Used as counter variable
count = 0
# Create an empty list to be filled with image names for calculations below
jpeglist = []
# Checks whether frames were extracted
success = 1
# Make sure vidObj call is read
while success:
# Function extract frames
success, frame = vidObj.read()
# Saves the frames indexed with frame number as jpeg frames
cv2.imwrite("frame{0}.jpg".format(count), frame)
# Iteratively fill our list to be called in frame analyses below
jpeglist.append("frame{0}.jpg".format(count))
# Tick up our counter with each frame
count += 1
# Drop the video from the buffer
vidObj.release()
# Print some useful info to the console
print('Total number of frames: {0}\n'.format(count))
print('Video duration in seconds: {0}\n'.format(round(count/fps)))
# Loop through the images and do edge calculations
# NOTE: I am constraining to range 0:193 here because my 193rd image is
# empty for some reason. You can probably delete this for your purposes
# so that it reads "for jpeg in jpeglist:" instead!
print("Analyzing visual edges and writing output file...\n")
for jpeg in jpeglist[0:193]:
img = cv2.imread(imgpath + jpeg,0)
edges = cv2.Canny(img,100,200)
# Get the total number of pixels for each image
n_pix = np.sum(edges > -1)
# Get the proportion of white (edge) pixels for each image
n_white_pix = np.sum(edges == 255)
# Calculate the proportion of edge pixels (white/total) for each image
prop_edge_pix = float(n_white_pix/n_pix)
edge_outfile.write('{0},{1}\n'.format(jpeg,prop_edge_pix))
# Prints out relevant calculations above for each image - uncomment to
# debug or peek under the hood
# print('\nFrame image:', jpeg)
# print('Total number of pixels:', n_pix)
# print('Number of white pixels:', n_white_pix)
# print('Proportion of edge pixels:', prop_edge_pix)
# Plot each raw frame and edge frame side-by-side - uncomment to
# peek under the hood (will slow things down a bunch FYI)
# plt.subplot(121),plt.imshow(img,cmap = 'gray')
# plt.title('Original Image'), plt.xticks([]), plt.yticks([])
# plt.subplot(122),plt.imshow(edges,cmap = 'gray')
# plt.title('Edge Image'), plt.xticks([]), plt.yticks([])
# plt.show()
print("Done! Check your output file: edge_outfile.csv")
# Do the damn thing
if __name__ == '__main__':
# Calling the function
AnalyzeFrames(vidpath)
|
normal
|
{
"blob_id": "d70d3d8eef711441ac89c2d98c72a5f95e0ab20d",
"index": 5261,
"step-1": "<mask token>\n\n\ndef AnalyzeFrames(vidpath):\n print('\\nGetting video info & writing out image files for each frame...\\n')\n vidObj = cv2.VideoCapture(vidpath)\n fps = vidObj.get(cv2.CAP_PROP_FPS)\n print('Frames per second: {0}\\n'.format(fps))\n count = 0\n jpeglist = []\n success = 1\n while success:\n success, frame = vidObj.read()\n cv2.imwrite('frame{0}.jpg'.format(count), frame)\n jpeglist.append('frame{0}.jpg'.format(count))\n count += 1\n vidObj.release()\n print('Total number of frames: {0}\\n'.format(count))\n print('Video duration in seconds: {0}\\n'.format(round(count / fps)))\n print('Analyzing visual edges and writing output file...\\n')\n for jpeg in jpeglist[0:193]:\n img = cv2.imread(imgpath + jpeg, 0)\n edges = cv2.Canny(img, 100, 200)\n n_pix = np.sum(edges > -1)\n n_white_pix = np.sum(edges == 255)\n prop_edge_pix = float(n_white_pix / n_pix)\n edge_outfile.write('{0},{1}\\n'.format(jpeg, prop_edge_pix))\n print('Done! Check your output file: edge_outfile.csv')\n\n\n<mask token>\n",
"step-2": "<mask token>\nedge_outfile.write('frame,prop_edge_pix\\n')\n\n\ndef AnalyzeFrames(vidpath):\n print('\\nGetting video info & writing out image files for each frame...\\n')\n vidObj = cv2.VideoCapture(vidpath)\n fps = vidObj.get(cv2.CAP_PROP_FPS)\n print('Frames per second: {0}\\n'.format(fps))\n count = 0\n jpeglist = []\n success = 1\n while success:\n success, frame = vidObj.read()\n cv2.imwrite('frame{0}.jpg'.format(count), frame)\n jpeglist.append('frame{0}.jpg'.format(count))\n count += 1\n vidObj.release()\n print('Total number of frames: {0}\\n'.format(count))\n print('Video duration in seconds: {0}\\n'.format(round(count / fps)))\n print('Analyzing visual edges and writing output file...\\n')\n for jpeg in jpeglist[0:193]:\n img = cv2.imread(imgpath + jpeg, 0)\n edges = cv2.Canny(img, 100, 200)\n n_pix = np.sum(edges > -1)\n n_white_pix = np.sum(edges == 255)\n prop_edge_pix = float(n_white_pix / n_pix)\n edge_outfile.write('{0},{1}\\n'.format(jpeg, prop_edge_pix))\n print('Done! Check your output file: edge_outfile.csv')\n\n\nif __name__ == '__main__':\n AnalyzeFrames(vidpath)\n",
"step-3": "<mask token>\nvidpath = '/Users/zreagh/Desktop/edge_vector_analysis/test.mov'\nimgpath = '/Users/zreagh/Desktop/edge_vector_analysis/'\nedge_outfile = open('edge_outfile.csv', 'w')\nedge_outfile.write('frame,prop_edge_pix\\n')\n\n\ndef AnalyzeFrames(vidpath):\n print('\\nGetting video info & writing out image files for each frame...\\n')\n vidObj = cv2.VideoCapture(vidpath)\n fps = vidObj.get(cv2.CAP_PROP_FPS)\n print('Frames per second: {0}\\n'.format(fps))\n count = 0\n jpeglist = []\n success = 1\n while success:\n success, frame = vidObj.read()\n cv2.imwrite('frame{0}.jpg'.format(count), frame)\n jpeglist.append('frame{0}.jpg'.format(count))\n count += 1\n vidObj.release()\n print('Total number of frames: {0}\\n'.format(count))\n print('Video duration in seconds: {0}\\n'.format(round(count / fps)))\n print('Analyzing visual edges and writing output file...\\n')\n for jpeg in jpeglist[0:193]:\n img = cv2.imread(imgpath + jpeg, 0)\n edges = cv2.Canny(img, 100, 200)\n n_pix = np.sum(edges > -1)\n n_white_pix = np.sum(edges == 255)\n prop_edge_pix = float(n_white_pix / n_pix)\n edge_outfile.write('{0},{1}\\n'.format(jpeg, prop_edge_pix))\n print('Done! Check your output file: edge_outfile.csv')\n\n\nif __name__ == '__main__':\n AnalyzeFrames(vidpath)\n",
"step-4": "<mask token>\nimport cv2\nimport numpy as np\nvidpath = '/Users/zreagh/Desktop/edge_vector_analysis/test.mov'\nimgpath = '/Users/zreagh/Desktop/edge_vector_analysis/'\nedge_outfile = open('edge_outfile.csv', 'w')\nedge_outfile.write('frame,prop_edge_pix\\n')\n\n\ndef AnalyzeFrames(vidpath):\n print('\\nGetting video info & writing out image files for each frame...\\n')\n vidObj = cv2.VideoCapture(vidpath)\n fps = vidObj.get(cv2.CAP_PROP_FPS)\n print('Frames per second: {0}\\n'.format(fps))\n count = 0\n jpeglist = []\n success = 1\n while success:\n success, frame = vidObj.read()\n cv2.imwrite('frame{0}.jpg'.format(count), frame)\n jpeglist.append('frame{0}.jpg'.format(count))\n count += 1\n vidObj.release()\n print('Total number of frames: {0}\\n'.format(count))\n print('Video duration in seconds: {0}\\n'.format(round(count / fps)))\n print('Analyzing visual edges and writing output file...\\n')\n for jpeg in jpeglist[0:193]:\n img = cv2.imread(imgpath + jpeg, 0)\n edges = cv2.Canny(img, 100, 200)\n n_pix = np.sum(edges > -1)\n n_white_pix = np.sum(edges == 255)\n prop_edge_pix = float(n_white_pix / n_pix)\n edge_outfile.write('{0},{1}\\n'.format(jpeg, prop_edge_pix))\n print('Done! Check your output file: edge_outfile.csv')\n\n\nif __name__ == '__main__':\n AnalyzeFrames(vidpath)\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nThis script reads in video information frame-by-frame, and then calculates\nvisual edge information for each frame, storing the information in a vector.\nThis can be averaged within TRs in an fMRI analysis to 'regress out'\nhigh-frequency visual information in the video.\n\n@author: zreagh\n\"\"\"\n\nimport cv2\nimport numpy as np\n\n# Can uncomment this pyplot import for frame plotting - see below\n#from matplotlib import pyplot as plt\n\n# Define the paths to your video file and eventual JPEG image files\nvidpath = '/Users/zreagh/Desktop/edge_vector_analysis/test.mov'\nimgpath = '/Users/zreagh/Desktop/edge_vector_analysis/'\n\nedge_outfile = open('edge_outfile.csv','w')\nedge_outfile.write('frame,prop_edge_pix\\n')\n \n# Function to extract video info including frames\ndef AnalyzeFrames(vidpath): \n \n print(\"\\nGetting video info & writing out image files for each frame...\\n\")\n \n # Path to video file \n vidObj = cv2.VideoCapture(vidpath) \n \n # Get FPS\n fps = vidObj.get(cv2.CAP_PROP_FPS)\n print(\"Frames per second: {0}\\n\".format(fps))\n \n # Used as counter variable \n count = 0\n \n # Create an empty list to be filled with image names for calculations below\n jpeglist = []\n \n # Checks whether frames were extracted \n success = 1\n \n # Make sure vidObj call is read\n while success: \n \n # Function extract frames \n success, frame = vidObj.read()\n \n # Saves the frames indexed with frame number as jpeg frames\n cv2.imwrite(\"frame{0}.jpg\".format(count), frame)\n \n # Iteratively fill our list to be called in frame analyses below\n jpeglist.append(\"frame{0}.jpg\".format(count))\n \n # Tick up our counter with each frame\n count += 1\n \n # Drop the video from the buffer\n vidObj.release()\n\n # Print some useful info to the console\n print('Total number of frames: {0}\\n'.format(count))\n print('Video duration in seconds: {0}\\n'.format(round(count/fps)))\n \n # Loop through the images and do edge calculations\n # NOTE: I am constraining to range 0:193 here because my 193rd image is\n # empty for some reason. You can probably delete this for your purposes\n # so that it reads \"for jpeg in jpeglist:\" instead!\n print(\"Analyzing visual edges and writing output file...\\n\")\n \n for jpeg in jpeglist[0:193]:\n \n img = cv2.imread(imgpath + jpeg,0)\n edges = cv2.Canny(img,100,200)\n \n # Get the total number of pixels for each image\n n_pix = np.sum(edges > -1)\n \n # Get the proportion of white (edge) pixels for each image\n n_white_pix = np.sum(edges == 255)\n \n # Calculate the proportion of edge pixels (white/total) for each image\n prop_edge_pix = float(n_white_pix/n_pix)\n \n edge_outfile.write('{0},{1}\\n'.format(jpeg,prop_edge_pix))\n\n # Prints out relevant calculations above for each image - uncomment to\n # debug or peek under the hood\n# print('\\nFrame image:', jpeg)\n# print('Total number of pixels:', n_pix)\n# print('Number of white pixels:', n_white_pix)\n# print('Proportion of edge pixels:', prop_edge_pix)\n \n # Plot each raw frame and edge frame side-by-side - uncomment to\n # peek under the hood (will slow things down a bunch FYI)\n# plt.subplot(121),plt.imshow(img,cmap = 'gray')\n# plt.title('Original Image'), plt.xticks([]), plt.yticks([])\n# plt.subplot(122),plt.imshow(edges,cmap = 'gray')\n# plt.title('Edge Image'), plt.xticks([]), plt.yticks([])\n# plt.show()\n\n print(\"Done! Check your output file: edge_outfile.csv\")\n \n# Do the damn thing\nif __name__ == '__main__': \n \n # Calling the function \n AnalyzeFrames(vidpath) ",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
"""
默认查询所有
> db.test1000.find()
{ "_id" : ObjectId("5c3559ab648171cce9135dd6"), "name" : "zhangdapeng" }
{ "_id" : ObjectId("5c3559af648171cce9135dd7"), "name" : "zhangdapeng1" }
{ "_id" : ObjectId("5c3559b2648171cce9135dd8"), "name" : "zhangdapeng2" }
{ "_id" : ObjectId("5c3559b4648171cce9135dd9"), "name" : "zhangdapeng3" }
查询匹配参数
> db.test1000.find({'name':'zhangdapeng'})
{ "_id" : ObjectId("5c3559ab648171cce9135dd6"), "name" : "zhangdapeng" }
>
"""
"""
小于$lt
小于等于$lte
大于$gt
大于等于$gte
不等于$ne
查询年龄小于等于18岁的
> db.test1000.find({age:{$lte:18}})
{ "_id" : ObjectId("5c355a61648171cce9135dda"), "name" : "zhangdapeng3", "age" : 18 }
{ "_id" : ObjectId("5c355a69648171cce9135ddc"), "name" : "zhangdapeng3", "age" : 17 }
>
查询年龄大于等于18岁的
> db.test1000.find({age:{$gte:18}})
{ "_id" : ObjectId("5c355a61648171cce9135dda"), "name" : "zhangdapeng3", "age" : 18 }
{ "_id" : ObjectId("5c355a65648171cce9135ddb"), "name" : "zhangdapeng3", "age" : 19 }
范围 $in $nin不在某个范围类
> db.test1000.find({age:{$in:[17,18,19]}})
{ "_id" : ObjectId("5c355a61648171cce9135dda"), "name" : "zhangdapeng3", "age" : 18 }
{ "_id" : ObjectId("5c355a65648171cce9135ddb"), "name" : "zhangdapeng3", "age" : 19 }
{ "_id" : ObjectId("5c355a69648171cce9135ddc"), "name" : "zhangdapeng3", "age" : 17 }
逻辑查询
并且关系直接用,逗号
或关系$or
> db.test1000.find({$or:[{'age':18},{'age':19}]})
{ "_id" : ObjectId("5c355a61648171cce9135dda"), "name" : "zhangdapeng3", "age" : 18 }
{ "_id" : ObjectId("5c355a65648171cce9135ddb"), "name" : "zhangdapeng3", "age" : 19 }
>
正则表达式
直接用两个/正则表达式就行/
> db.test1000.find({'name':/zhangdapeng*/})
{ "_id" : ObjectId("5c3559ab648171cce9135dd6"), "name" : "zhangdapeng" }
{ "_id" : ObjectId("5c3559af648171cce9135dd7"), "name" : "zhangdapeng1" }
{ "_id" : ObjectId("5c3559b2648171cce9135dd8"), "name" : "zhangdapeng2" }
{ "_id" : ObjectId("5c3559b4648171cce9135dd9"), "name" : "zhangdapeng3" }
{ "_id" : ObjectId("5c355a61648171cce9135dda"), "name" : "zhangdapeng3", "age" : 18 }
{ "_id" : ObjectId("5c355a65648171cce9135ddb"), "name" : "zhangdapeng3", "age" : 19 }
{ "_id" : ObjectId("5c355a69648171cce9135ddc"), "name" : "zhangdapeng3", "age" : 17 }
>
限制内容-输出控制
find().limit(数字)
find().skip(数字)
同时使用可以实现翻页
find().skip(5).limit(20)
自定义查询
db.stu.find({
$where:function(){
return this.age>30;
}
})
"""
|
normal
|
{
"blob_id": "d8e0198244c3df77fa0258cc97a55042e36d056f",
"index": 7756,
"step-1": "<mask token>\n",
"step-2": "\"\"\"\n默认查询所有\n > db.test1000.find()\n { \"_id\" : ObjectId(\"5c3559ab648171cce9135dd6\"), \"name\" : \"zhangdapeng\" }\n { \"_id\" : ObjectId(\"5c3559af648171cce9135dd7\"), \"name\" : \"zhangdapeng1\" }\n { \"_id\" : ObjectId(\"5c3559b2648171cce9135dd8\"), \"name\" : \"zhangdapeng2\" }\n { \"_id\" : ObjectId(\"5c3559b4648171cce9135dd9\"), \"name\" : \"zhangdapeng3\" }\n\n查询匹配参数\n > db.test1000.find({'name':'zhangdapeng'})\n { \"_id\" : ObjectId(\"5c3559ab648171cce9135dd6\"), \"name\" : \"zhangdapeng\" }\n >\n\n\"\"\"\n\n\"\"\"\n小于$lt\n小于等于$lte\n大于$gt\n大于等于$gte\n不等于$ne\n\n查询年龄小于等于18岁的\n > db.test1000.find({age:{$lte:18}})\n { \"_id\" : ObjectId(\"5c355a61648171cce9135dda\"), \"name\" : \"zhangdapeng3\", \"age\" : 18 }\n { \"_id\" : ObjectId(\"5c355a69648171cce9135ddc\"), \"name\" : \"zhangdapeng3\", \"age\" : 17 }\n > \n查询年龄大于等于18岁的\n > db.test1000.find({age:{$gte:18}})\n { \"_id\" : ObjectId(\"5c355a61648171cce9135dda\"), \"name\" : \"zhangdapeng3\", \"age\" : 18 }\n { \"_id\" : ObjectId(\"5c355a65648171cce9135ddb\"), \"name\" : \"zhangdapeng3\", \"age\" : 19 }\n\n范围 $in $nin不在某个范围类\n > db.test1000.find({age:{$in:[17,18,19]}})\n { \"_id\" : ObjectId(\"5c355a61648171cce9135dda\"), \"name\" : \"zhangdapeng3\", \"age\" : 18 }\n { \"_id\" : ObjectId(\"5c355a65648171cce9135ddb\"), \"name\" : \"zhangdapeng3\", \"age\" : 19 }\n { \"_id\" : ObjectId(\"5c355a69648171cce9135ddc\"), \"name\" : \"zhangdapeng3\", \"age\" : 17 }\n\n\n逻辑查询\n 并且关系直接用,逗号\n 或关系$or\n > db.test1000.find({$or:[{'age':18},{'age':19}]})\n { \"_id\" : ObjectId(\"5c355a61648171cce9135dda\"), \"name\" : \"zhangdapeng3\", \"age\" : 18 }\n { \"_id\" : ObjectId(\"5c355a65648171cce9135ddb\"), \"name\" : \"zhangdapeng3\", \"age\" : 19 }\n > \n正则表达式\n 直接用两个/正则表达式就行/ \n > db.test1000.find({'name':/zhangdapeng*/})\n { \"_id\" : ObjectId(\"5c3559ab648171cce9135dd6\"), \"name\" : \"zhangdapeng\" }\n { \"_id\" : ObjectId(\"5c3559af648171cce9135dd7\"), \"name\" : \"zhangdapeng1\" }\n { \"_id\" : ObjectId(\"5c3559b2648171cce9135dd8\"), \"name\" : \"zhangdapeng2\" }\n { \"_id\" : ObjectId(\"5c3559b4648171cce9135dd9\"), \"name\" : \"zhangdapeng3\" }\n { \"_id\" : ObjectId(\"5c355a61648171cce9135dda\"), \"name\" : \"zhangdapeng3\", \"age\" : 18 }\n { \"_id\" : ObjectId(\"5c355a65648171cce9135ddb\"), \"name\" : \"zhangdapeng3\", \"age\" : 19 }\n { \"_id\" : ObjectId(\"5c355a69648171cce9135ddc\"), \"name\" : \"zhangdapeng3\", \"age\" : 17 }\n > \n限制内容-输出控制\n find().limit(数字)\n find().skip(数字)\n 同时使用可以实现翻页\n find().skip(5).limit(20)\n \n自定义查询\n db.stu.find({\n $where:function(){\n return this.age>30;\n }\n })\n\"\"\"\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
Base.metadata.create_all(bind=engine)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
SQLALCHEMY_DATABASE_URL = (
f'sqlite:///{Path(__name__).parent.absolute()}/sql_app.db')
engine = create_engine(SQLALCHEMY_DATABASE_URL, connect_args={
'check_same_thread': False})
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
<|reserved_special_token_0|>
Base.metadata.create_all(bind=engine)
<|reserved_special_token_1|>
from pathlib import Path
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, Session
SQLALCHEMY_DATABASE_URL = (
f'sqlite:///{Path(__name__).parent.absolute()}/sql_app.db')
engine = create_engine(SQLALCHEMY_DATABASE_URL, connect_args={
'check_same_thread': False})
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
from flashcards_core.database.algorithms.model import Algorithm
from flashcards_core.database.algorithm_params.model import AlgorithmParam
from flashcards_core.database.cards.model import Card
from flashcards_core.database.decks.model import Deck
from flashcards_core.database.faces.model import Face
from flashcards_core.database.facts.model import Fact
from flashcards_core.database.reviews.model import Review
from flashcards_core.database.tags.model import Tag
from flashcards_core.database.many_to_many.model import FaceFact, DeckTag, CardTag, FaceTag, FactTag
Base.metadata.create_all(bind=engine)
<|reserved_special_token_1|>
from pathlib import Path
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, Session
# SQLALCHEMY_DATABASE_URL = "postgresql://user:password@postgresserver/db"
SQLALCHEMY_DATABASE_URL = f"sqlite:///{Path(__name__).parent.absolute()}/sql_app.db"
engine = create_engine(
SQLALCHEMY_DATABASE_URL,
connect_args={"check_same_thread": False} # Needed only for SQLite
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# # FastAPI "Dependency" (used with Depends)
# def get_db():
# db = SessionLocal()
# try:
# yield db
# finally:
# db.close()
Base = declarative_base()
from flashcards_core.database.algorithms.model import Algorithm
from flashcards_core.database.algorithm_params.model import AlgorithmParam
from flashcards_core.database.cards.model import Card
from flashcards_core.database.decks.model import Deck
from flashcards_core.database.faces.model import Face
from flashcards_core.database.facts.model import Fact
from flashcards_core.database.reviews.model import Review
from flashcards_core.database.tags.model import Tag
from flashcards_core.database.many_to_many.model import FaceFact, DeckTag, CardTag, FaceTag, FactTag
# Create all the tables imported above
Base.metadata.create_all(bind=engine)
|
flexible
|
{
"blob_id": "0656c3e1d8f84cfb33c4531e41efb4a349d08aac",
"index": 6747,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nBase.metadata.create_all(bind=engine)\n",
"step-3": "<mask token>\nSQLALCHEMY_DATABASE_URL = (\n f'sqlite:///{Path(__name__).parent.absolute()}/sql_app.db')\nengine = create_engine(SQLALCHEMY_DATABASE_URL, connect_args={\n 'check_same_thread': False})\nSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)\nBase = declarative_base()\n<mask token>\nBase.metadata.create_all(bind=engine)\n",
"step-4": "from pathlib import Path\nfrom sqlalchemy import create_engine\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy.orm import sessionmaker, Session\nSQLALCHEMY_DATABASE_URL = (\n f'sqlite:///{Path(__name__).parent.absolute()}/sql_app.db')\nengine = create_engine(SQLALCHEMY_DATABASE_URL, connect_args={\n 'check_same_thread': False})\nSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)\nBase = declarative_base()\nfrom flashcards_core.database.algorithms.model import Algorithm\nfrom flashcards_core.database.algorithm_params.model import AlgorithmParam\nfrom flashcards_core.database.cards.model import Card\nfrom flashcards_core.database.decks.model import Deck\nfrom flashcards_core.database.faces.model import Face\nfrom flashcards_core.database.facts.model import Fact\nfrom flashcards_core.database.reviews.model import Review\nfrom flashcards_core.database.tags.model import Tag\nfrom flashcards_core.database.many_to_many.model import FaceFact, DeckTag, CardTag, FaceTag, FactTag\nBase.metadata.create_all(bind=engine)\n",
"step-5": "from pathlib import Path\nfrom sqlalchemy import create_engine\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy.orm import sessionmaker, Session\n\n# SQLALCHEMY_DATABASE_URL = \"postgresql://user:password@postgresserver/db\"\nSQLALCHEMY_DATABASE_URL = f\"sqlite:///{Path(__name__).parent.absolute()}/sql_app.db\"\n\nengine = create_engine(\n SQLALCHEMY_DATABASE_URL, \n connect_args={\"check_same_thread\": False} # Needed only for SQLite\n)\n\nSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)\n\n# # FastAPI \"Dependency\" (used with Depends)\n# def get_db():\n# db = SessionLocal()\n# try:\n# yield db\n# finally:\n# db.close()\n\nBase = declarative_base()\n\nfrom flashcards_core.database.algorithms.model import Algorithm\nfrom flashcards_core.database.algorithm_params.model import AlgorithmParam\nfrom flashcards_core.database.cards.model import Card\nfrom flashcards_core.database.decks.model import Deck\nfrom flashcards_core.database.faces.model import Face\nfrom flashcards_core.database.facts.model import Fact\nfrom flashcards_core.database.reviews.model import Review\nfrom flashcards_core.database.tags.model import Tag\nfrom flashcards_core.database.many_to_many.model import FaceFact, DeckTag, CardTag, FaceTag, FactTag\n\n# Create all the tables imported above\nBase.metadata.create_all(bind=engine)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
np.random.seed(50)
<|reserved_special_token_0|>
rl.load_weight(load_file_name)
<|reserved_special_token_0|>
print(rl.w)
<|reserved_special_token_0|>
plt.scatter(x, track.y[0][:, 0], c='b', label='random')
plt.scatter(x, track.y[1][:, 0], c='b')
plt.scatter(x, track.y[2][:, 0], c='b')
plt.scatter(x, track.y[3][:, 0], c='b')
plt.scatter(x, track.y[4][:, 0], c='b')
plt.scatter(x, traj_init[0][:, 0], c='r', label='initial')
plt.xlabel('time(s)')
plt.ylabel('raw (rad)')
plt.legend(loc=4)
plt.show()
plt.scatter(x, track.y[0][:, 1], c='b', label='random')
plt.scatter(x, track.y[1][:, 1], c='b')
plt.scatter(x, track.y[2][:, 1], c='b')
plt.scatter(x, track.y[3][:, 1], c='b')
plt.scatter(x, track.y[4][:, 1], c='b')
plt.scatter(x, traj_init[0][:, 1], c='r', label='initial')
plt.xlabel('time(s)')
plt.ylabel('yaw (rad)')
plt.legend(loc=4)
plt.show()
plt.scatter(x, track.y[0][:, 2], c='b', label='random')
plt.scatter(x, track.y[1][:, 2], c='b')
plt.scatter(x, track.y[2][:, 2], c='b')
plt.scatter(x, track.y[3][:, 2], c='b')
plt.scatter(x, track.y[4][:, 2], c='b')
plt.scatter(x, traj_init[0][:, 2], c='r', label='initial')
plt.xlabel('time(s)')
plt.ylabel('pitch (rad)')
plt.legend(loc=4)
plt.show()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
np.random.seed(50)
dmp_y0 = np.array([-1.52017496, 0.04908739, 1.41433029])
dmp_goal = np.array([-1.50848603, 0.0591503, 1.44347592])
load_file_name = 'w_0_2_right_3_100_1000.0_0.01_4'
load_file_name_list = load_file_name.split('_')
ep = int(load_file_name_list[1])
numofball = int(load_file_name_list[2])
pour_arm = load_file_name_list[3]
n_dmps = int(load_file_name_list[4])
n_bfs = int(load_file_name_list[5])
decay = float(load_file_name_list[6])
dt = float(load_file_name_list[7])
rl = RLDMPs(n_dmps=n_dmps, n_bfs=n_bfs, decay=decay, y0=dmp_y0, goal=
dmp_goal, ay=np.ones(n_dmps) * 10.0, dt=dt)
rl.load_weight(load_file_name)
traj_init = rl.predict().y
track = rl.rollout()
print(rl.w)
x = np.linspace(0, 1, len(traj_init[0][:, 0]))
plt.scatter(x, track.y[0][:, 0], c='b', label='random')
plt.scatter(x, track.y[1][:, 0], c='b')
plt.scatter(x, track.y[2][:, 0], c='b')
plt.scatter(x, track.y[3][:, 0], c='b')
plt.scatter(x, track.y[4][:, 0], c='b')
plt.scatter(x, traj_init[0][:, 0], c='r', label='initial')
plt.xlabel('time(s)')
plt.ylabel('raw (rad)')
plt.legend(loc=4)
plt.show()
plt.scatter(x, track.y[0][:, 1], c='b', label='random')
plt.scatter(x, track.y[1][:, 1], c='b')
plt.scatter(x, track.y[2][:, 1], c='b')
plt.scatter(x, track.y[3][:, 1], c='b')
plt.scatter(x, track.y[4][:, 1], c='b')
plt.scatter(x, traj_init[0][:, 1], c='r', label='initial')
plt.xlabel('time(s)')
plt.ylabel('yaw (rad)')
plt.legend(loc=4)
plt.show()
plt.scatter(x, track.y[0][:, 2], c='b', label='random')
plt.scatter(x, track.y[1][:, 2], c='b')
plt.scatter(x, track.y[2][:, 2], c='b')
plt.scatter(x, track.y[3][:, 2], c='b')
plt.scatter(x, track.y[4][:, 2], c='b')
plt.scatter(x, traj_init[0][:, 2], c='r', label='initial')
plt.xlabel('time(s)')
plt.ylabel('pitch (rad)')
plt.legend(loc=4)
plt.show()
<|reserved_special_token_1|>
import numpy as np
from DMP.PIDMP import RLDMPs
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
np.random.seed(50)
dmp_y0 = np.array([-1.52017496, 0.04908739, 1.41433029])
dmp_goal = np.array([-1.50848603, 0.0591503, 1.44347592])
load_file_name = 'w_0_2_right_3_100_1000.0_0.01_4'
load_file_name_list = load_file_name.split('_')
ep = int(load_file_name_list[1])
numofball = int(load_file_name_list[2])
pour_arm = load_file_name_list[3]
n_dmps = int(load_file_name_list[4])
n_bfs = int(load_file_name_list[5])
decay = float(load_file_name_list[6])
dt = float(load_file_name_list[7])
rl = RLDMPs(n_dmps=n_dmps, n_bfs=n_bfs, decay=decay, y0=dmp_y0, goal=
dmp_goal, ay=np.ones(n_dmps) * 10.0, dt=dt)
rl.load_weight(load_file_name)
traj_init = rl.predict().y
track = rl.rollout()
print(rl.w)
x = np.linspace(0, 1, len(traj_init[0][:, 0]))
plt.scatter(x, track.y[0][:, 0], c='b', label='random')
plt.scatter(x, track.y[1][:, 0], c='b')
plt.scatter(x, track.y[2][:, 0], c='b')
plt.scatter(x, track.y[3][:, 0], c='b')
plt.scatter(x, track.y[4][:, 0], c='b')
plt.scatter(x, traj_init[0][:, 0], c='r', label='initial')
plt.xlabel('time(s)')
plt.ylabel('raw (rad)')
plt.legend(loc=4)
plt.show()
plt.scatter(x, track.y[0][:, 1], c='b', label='random')
plt.scatter(x, track.y[1][:, 1], c='b')
plt.scatter(x, track.y[2][:, 1], c='b')
plt.scatter(x, track.y[3][:, 1], c='b')
plt.scatter(x, track.y[4][:, 1], c='b')
plt.scatter(x, traj_init[0][:, 1], c='r', label='initial')
plt.xlabel('time(s)')
plt.ylabel('yaw (rad)')
plt.legend(loc=4)
plt.show()
plt.scatter(x, track.y[0][:, 2], c='b', label='random')
plt.scatter(x, track.y[1][:, 2], c='b')
plt.scatter(x, track.y[2][:, 2], c='b')
plt.scatter(x, track.y[3][:, 2], c='b')
plt.scatter(x, track.y[4][:, 2], c='b')
plt.scatter(x, traj_init[0][:, 2], c='r', label='initial')
plt.xlabel('time(s)')
plt.ylabel('pitch (rad)')
plt.legend(loc=4)
plt.show()
<|reserved_special_token_1|>
#!/usr/bin/env python3
import numpy as np
from DMP.PIDMP import RLDMPs
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
np.random.seed(50)
dmp_y0 = np.array([-1.52017496, 0.04908739, 1.41433029])
dmp_goal = np.array([-1.50848603, 0.0591503 , 1.44347592])
load_file_name = "w_0_2_right_3_100_1000.0_0.01_4"
#load_file_name = raw_input('file name: ')
load_file_name_list = load_file_name.split('_')
### learning ep
ep = int(load_file_name_list[1])
### pouring number of ball to the other tube
numofball = int(load_file_name_list[2])
### which arm do the pouring motion
pour_arm = load_file_name_list[3]
n_dmps = int(load_file_name_list[4])
n_bfs = int(load_file_name_list[5])
decay = float(load_file_name_list[6])
dt = float(load_file_name_list[7])
### initial DMP
rl = RLDMPs(n_dmps = n_dmps , n_bfs = n_bfs , decay = decay, y0 = dmp_y0 , goal = dmp_goal,ay=np.ones(n_dmps)*10.0,dt = dt)
rl.load_weight(load_file_name)
traj_init = rl.predict().y
track = rl.rollout()
print(rl.w)
x = np.linspace(0,1,len(traj_init[0][:,0]))
plt.scatter(x,track.y[0][:,0],c='b',label="random")
plt.scatter(x,track.y[1][:,0],c='b')
plt.scatter(x,track.y[2][:,0],c='b')
plt.scatter(x,track.y[3][:,0],c='b')
plt.scatter(x,track.y[4][:,0],c='b')
plt.scatter(x,traj_init[0][:,0],c='r',label="initial")
plt.xlabel("time(s)")
plt.ylabel("raw (rad)")
plt.legend(loc = 4)
plt.show()
plt.scatter(x,track.y[0][:,1],c='b',label="random")
plt.scatter(x,track.y[1][:,1],c='b')
plt.scatter(x,track.y[2][:,1],c='b')
plt.scatter(x,track.y[3][:,1],c='b')
plt.scatter(x,track.y[4][:,1],c='b')
plt.scatter(x,traj_init[0][:,1],c='r',label="initial")
plt.xlabel("time(s)")
plt.ylabel("yaw (rad)")
plt.legend(loc = 4)
plt.show()
plt.scatter(x,track.y[0][:,2],c='b',label="random")
plt.scatter(x,track.y[1][:,2],c='b')
plt.scatter(x,track.y[2][:,2],c='b')
plt.scatter(x,track.y[3][:,2],c='b')
plt.scatter(x,track.y[4][:,2],c='b')
plt.scatter(x,traj_init[0][:,2],c='r',label="initial")
plt.xlabel("time(s)")
plt.ylabel("pitch (rad)")
plt.legend(loc = 4)
plt.show()
|
flexible
|
{
"blob_id": "5e6bbb10ec82e566c749dd4d794eabd2e8f7a648",
"index": 4488,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nnp.random.seed(50)\n<mask token>\nrl.load_weight(load_file_name)\n<mask token>\nprint(rl.w)\n<mask token>\nplt.scatter(x, track.y[0][:, 0], c='b', label='random')\nplt.scatter(x, track.y[1][:, 0], c='b')\nplt.scatter(x, track.y[2][:, 0], c='b')\nplt.scatter(x, track.y[3][:, 0], c='b')\nplt.scatter(x, track.y[4][:, 0], c='b')\nplt.scatter(x, traj_init[0][:, 0], c='r', label='initial')\nplt.xlabel('time(s)')\nplt.ylabel('raw (rad)')\nplt.legend(loc=4)\nplt.show()\nplt.scatter(x, track.y[0][:, 1], c='b', label='random')\nplt.scatter(x, track.y[1][:, 1], c='b')\nplt.scatter(x, track.y[2][:, 1], c='b')\nplt.scatter(x, track.y[3][:, 1], c='b')\nplt.scatter(x, track.y[4][:, 1], c='b')\nplt.scatter(x, traj_init[0][:, 1], c='r', label='initial')\nplt.xlabel('time(s)')\nplt.ylabel('yaw (rad)')\nplt.legend(loc=4)\nplt.show()\nplt.scatter(x, track.y[0][:, 2], c='b', label='random')\nplt.scatter(x, track.y[1][:, 2], c='b')\nplt.scatter(x, track.y[2][:, 2], c='b')\nplt.scatter(x, track.y[3][:, 2], c='b')\nplt.scatter(x, track.y[4][:, 2], c='b')\nplt.scatter(x, traj_init[0][:, 2], c='r', label='initial')\nplt.xlabel('time(s)')\nplt.ylabel('pitch (rad)')\nplt.legend(loc=4)\nplt.show()\n",
"step-3": "<mask token>\nnp.random.seed(50)\ndmp_y0 = np.array([-1.52017496, 0.04908739, 1.41433029])\ndmp_goal = np.array([-1.50848603, 0.0591503, 1.44347592])\nload_file_name = 'w_0_2_right_3_100_1000.0_0.01_4'\nload_file_name_list = load_file_name.split('_')\nep = int(load_file_name_list[1])\nnumofball = int(load_file_name_list[2])\npour_arm = load_file_name_list[3]\nn_dmps = int(load_file_name_list[4])\nn_bfs = int(load_file_name_list[5])\ndecay = float(load_file_name_list[6])\ndt = float(load_file_name_list[7])\nrl = RLDMPs(n_dmps=n_dmps, n_bfs=n_bfs, decay=decay, y0=dmp_y0, goal=\n dmp_goal, ay=np.ones(n_dmps) * 10.0, dt=dt)\nrl.load_weight(load_file_name)\ntraj_init = rl.predict().y\ntrack = rl.rollout()\nprint(rl.w)\nx = np.linspace(0, 1, len(traj_init[0][:, 0]))\nplt.scatter(x, track.y[0][:, 0], c='b', label='random')\nplt.scatter(x, track.y[1][:, 0], c='b')\nplt.scatter(x, track.y[2][:, 0], c='b')\nplt.scatter(x, track.y[3][:, 0], c='b')\nplt.scatter(x, track.y[4][:, 0], c='b')\nplt.scatter(x, traj_init[0][:, 0], c='r', label='initial')\nplt.xlabel('time(s)')\nplt.ylabel('raw (rad)')\nplt.legend(loc=4)\nplt.show()\nplt.scatter(x, track.y[0][:, 1], c='b', label='random')\nplt.scatter(x, track.y[1][:, 1], c='b')\nplt.scatter(x, track.y[2][:, 1], c='b')\nplt.scatter(x, track.y[3][:, 1], c='b')\nplt.scatter(x, track.y[4][:, 1], c='b')\nplt.scatter(x, traj_init[0][:, 1], c='r', label='initial')\nplt.xlabel('time(s)')\nplt.ylabel('yaw (rad)')\nplt.legend(loc=4)\nplt.show()\nplt.scatter(x, track.y[0][:, 2], c='b', label='random')\nplt.scatter(x, track.y[1][:, 2], c='b')\nplt.scatter(x, track.y[2][:, 2], c='b')\nplt.scatter(x, track.y[3][:, 2], c='b')\nplt.scatter(x, track.y[4][:, 2], c='b')\nplt.scatter(x, traj_init[0][:, 2], c='r', label='initial')\nplt.xlabel('time(s)')\nplt.ylabel('pitch (rad)')\nplt.legend(loc=4)\nplt.show()\n",
"step-4": "import numpy as np\nfrom DMP.PIDMP import RLDMPs\nimport matplotlib.pyplot as plt\nfrom mpl_toolkits.mplot3d import Axes3D\nnp.random.seed(50)\ndmp_y0 = np.array([-1.52017496, 0.04908739, 1.41433029])\ndmp_goal = np.array([-1.50848603, 0.0591503, 1.44347592])\nload_file_name = 'w_0_2_right_3_100_1000.0_0.01_4'\nload_file_name_list = load_file_name.split('_')\nep = int(load_file_name_list[1])\nnumofball = int(load_file_name_list[2])\npour_arm = load_file_name_list[3]\nn_dmps = int(load_file_name_list[4])\nn_bfs = int(load_file_name_list[5])\ndecay = float(load_file_name_list[6])\ndt = float(load_file_name_list[7])\nrl = RLDMPs(n_dmps=n_dmps, n_bfs=n_bfs, decay=decay, y0=dmp_y0, goal=\n dmp_goal, ay=np.ones(n_dmps) * 10.0, dt=dt)\nrl.load_weight(load_file_name)\ntraj_init = rl.predict().y\ntrack = rl.rollout()\nprint(rl.w)\nx = np.linspace(0, 1, len(traj_init[0][:, 0]))\nplt.scatter(x, track.y[0][:, 0], c='b', label='random')\nplt.scatter(x, track.y[1][:, 0], c='b')\nplt.scatter(x, track.y[2][:, 0], c='b')\nplt.scatter(x, track.y[3][:, 0], c='b')\nplt.scatter(x, track.y[4][:, 0], c='b')\nplt.scatter(x, traj_init[0][:, 0], c='r', label='initial')\nplt.xlabel('time(s)')\nplt.ylabel('raw (rad)')\nplt.legend(loc=4)\nplt.show()\nplt.scatter(x, track.y[0][:, 1], c='b', label='random')\nplt.scatter(x, track.y[1][:, 1], c='b')\nplt.scatter(x, track.y[2][:, 1], c='b')\nplt.scatter(x, track.y[3][:, 1], c='b')\nplt.scatter(x, track.y[4][:, 1], c='b')\nplt.scatter(x, traj_init[0][:, 1], c='r', label='initial')\nplt.xlabel('time(s)')\nplt.ylabel('yaw (rad)')\nplt.legend(loc=4)\nplt.show()\nplt.scatter(x, track.y[0][:, 2], c='b', label='random')\nplt.scatter(x, track.y[1][:, 2], c='b')\nplt.scatter(x, track.y[2][:, 2], c='b')\nplt.scatter(x, track.y[3][:, 2], c='b')\nplt.scatter(x, track.y[4][:, 2], c='b')\nplt.scatter(x, traj_init[0][:, 2], c='r', label='initial')\nplt.xlabel('time(s)')\nplt.ylabel('pitch (rad)')\nplt.legend(loc=4)\nplt.show()\n",
"step-5": "#!/usr/bin/env python3\nimport numpy as np\nfrom DMP.PIDMP import RLDMPs\nimport matplotlib.pyplot as plt\nfrom mpl_toolkits.mplot3d import Axes3D\n\nnp.random.seed(50)\ndmp_y0 = np.array([-1.52017496, 0.04908739, 1.41433029])\ndmp_goal = np.array([-1.50848603, 0.0591503 , 1.44347592])\n \n\nload_file_name = \"w_0_2_right_3_100_1000.0_0.01_4\"\n#load_file_name = raw_input('file name: ')\nload_file_name_list = load_file_name.split('_')\n### learning ep\nep = int(load_file_name_list[1])\n### pouring number of ball to the other tube\nnumofball = int(load_file_name_list[2])\n### which arm do the pouring motion\npour_arm = load_file_name_list[3]\nn_dmps = int(load_file_name_list[4])\nn_bfs = int(load_file_name_list[5])\ndecay = float(load_file_name_list[6])\ndt = float(load_file_name_list[7])\n\n### initial DMP\nrl = RLDMPs(n_dmps = n_dmps , n_bfs = n_bfs , decay = decay, y0 = dmp_y0 , goal = dmp_goal,ay=np.ones(n_dmps)*10.0,dt = dt)\n\nrl.load_weight(load_file_name)\n\ntraj_init = rl.predict().y\ntrack = rl.rollout()\n\nprint(rl.w)\n\nx = np.linspace(0,1,len(traj_init[0][:,0]))\n\nplt.scatter(x,track.y[0][:,0],c='b',label=\"random\")\nplt.scatter(x,track.y[1][:,0],c='b')\nplt.scatter(x,track.y[2][:,0],c='b')\nplt.scatter(x,track.y[3][:,0],c='b')\nplt.scatter(x,track.y[4][:,0],c='b')\nplt.scatter(x,traj_init[0][:,0],c='r',label=\"initial\")\nplt.xlabel(\"time(s)\")\nplt.ylabel(\"raw (rad)\")\nplt.legend(loc = 4)\nplt.show()\n\n\nplt.scatter(x,track.y[0][:,1],c='b',label=\"random\")\nplt.scatter(x,track.y[1][:,1],c='b')\nplt.scatter(x,track.y[2][:,1],c='b')\nplt.scatter(x,track.y[3][:,1],c='b')\nplt.scatter(x,track.y[4][:,1],c='b')\nplt.scatter(x,traj_init[0][:,1],c='r',label=\"initial\")\nplt.xlabel(\"time(s)\")\nplt.ylabel(\"yaw (rad)\")\nplt.legend(loc = 4)\nplt.show()\n\n\nplt.scatter(x,track.y[0][:,2],c='b',label=\"random\")\nplt.scatter(x,track.y[1][:,2],c='b')\nplt.scatter(x,track.y[2][:,2],c='b')\nplt.scatter(x,track.y[3][:,2],c='b')\nplt.scatter(x,track.y[4][:,2],c='b')\nplt.scatter(x,traj_init[0][:,2],c='r',label=\"initial\")\n\nplt.xlabel(\"time(s)\")\nplt.ylabel(\"pitch (rad)\")\nplt.legend(loc = 4)\nplt.show()\n\n\n\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import numpy as np
catogory = np.array([50, 30, 40, 20])
data = np.array([[20, 50, 10, 15, 20], [30, 40, 20, 65, 35], [75, 30, 42,
70, 45], [40, 25, 35, 22, 55]])
print(catogory)
print(data)
print(catogory.dot(data))
print(data.T.dot(catogory))
|
normal
|
{
"blob_id": "e4b49faaad648c6e85274abb18f994083a74013d",
"index": 7160,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(catogory)\nprint(data)\nprint(catogory.dot(data))\nprint(data.T.dot(catogory))\n",
"step-3": "<mask token>\ncatogory = np.array([50, 30, 40, 20])\ndata = np.array([[20, 50, 10, 15, 20], [30, 40, 20, 65, 35], [75, 30, 42, \n 70, 45], [40, 25, 35, 22, 55]])\nprint(catogory)\nprint(data)\nprint(catogory.dot(data))\nprint(data.T.dot(catogory))\n",
"step-4": "import numpy as np\ncatogory = np.array([50, 30, 40, 20])\ndata = np.array([[20, 50, 10, 15, 20], [30, 40, 20, 65, 35], [75, 30, 42, \n 70, 45], [40, 25, 35, 22, 55]])\nprint(catogory)\nprint(data)\nprint(catogory.dot(data))\nprint(data.T.dot(catogory))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def range_func(measures):
scores = []
for entry in measures:
try:
curr = int(entry[1])
except:
curr = None
if curr is not None:
scores.append(curr)
if len(scores) < 1:
return 0
return max(scores) - min(scores)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def range_func(measures):
scores = []
for entry in measures:
try:
curr = int(entry[1])
except:
curr = None
if curr is not None:
scores.append(curr)
if len(scores) < 1:
return 0
return max(scores) - min(scores)
<|reserved_special_token_0|>
print(top)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sc = SparkContext('local', 'weblog app')
effective_care = sc.textFile('file:///data/exercise1/effective_care').map(
lambda l: l.encode().split(',')).map(lambda x: (x[0], x[1:]))
procedure_care = effective_care.map(lambda p: (p[1][1], [p[0], p[1][2]]))
procedure_care_grouped = procedure_care.groupByKey()
def range_func(measures):
scores = []
for entry in measures:
try:
curr = int(entry[1])
except:
curr = None
if curr is not None:
scores.append(curr)
if len(scores) < 1:
return 0
return max(scores) - min(scores)
measure_dates = sc.textFile('file:///data/exercise1/measure_dates').map(lambda
l: l.encode().split(',')).map(lambda x: (x[1], x[0]))
procedure_score_range = procedure_care_grouped.map(lambda p: (p[0],
range_func(p[1]))).join(measure_dates)
sorted_ranges = procedure_score_range.sortBy(lambda x: x[1], False)
top = sorted_ranges.take(10)
print(top)
<|reserved_special_token_1|>
from pyspark import SparkContext
from pyspark.sql import SQLContext
from pyspark.sql.types import *
sc = SparkContext('local', 'weblog app')
effective_care = sc.textFile('file:///data/exercise1/effective_care').map(
lambda l: l.encode().split(',')).map(lambda x: (x[0], x[1:]))
procedure_care = effective_care.map(lambda p: (p[1][1], [p[0], p[1][2]]))
procedure_care_grouped = procedure_care.groupByKey()
def range_func(measures):
scores = []
for entry in measures:
try:
curr = int(entry[1])
except:
curr = None
if curr is not None:
scores.append(curr)
if len(scores) < 1:
return 0
return max(scores) - min(scores)
measure_dates = sc.textFile('file:///data/exercise1/measure_dates').map(lambda
l: l.encode().split(',')).map(lambda x: (x[1], x[0]))
procedure_score_range = procedure_care_grouped.map(lambda p: (p[0],
range_func(p[1]))).join(measure_dates)
sorted_ranges = procedure_score_range.sortBy(lambda x: x[1], False)
top = sorted_ranges.take(10)
print(top)
<|reserved_special_token_1|>
from pyspark import SparkContext
from pyspark.sql import SQLContext
from pyspark.sql.types import *
sc = SparkContext("local", "weblog app")
effective_care = sc.textFile('file:///data/exercise1/effective_care').map(lambda l:l.encode().split(',')).map(lambda x: (x[0], x[1:]))
procedure_care = effective_care.map(lambda p:(p[1][1], [p[0], p[1][2]]))
procedure_care_grouped = procedure_care.groupByKey()
def range_func(measures):
scores = []
for entry in measures:
try:
curr = int(entry[1])
except:
curr = None
if curr is not None:
scores.append(curr)
if len(scores) < 1:
return 0
return max(scores) - min(scores)
measure_dates = sc.textFile('file:///data/exercise1/measure_dates').map(lambda l:l.encode().split(',')).map(lambda x: (x[1], x[0]))
procedure_score_range = procedure_care_grouped.map(lambda p:(p[0], range_func(p[1]))).join(measure_dates)
sorted_ranges = procedure_score_range.sortBy(lambda x:x[1], False)
top = sorted_ranges.take(10)
print(top)
|
flexible
|
{
"blob_id": "4c60fd123f591bf2a88ca0affe14a3c3ec0d3cf6",
"index": 60,
"step-1": "<mask token>\n\n\ndef range_func(measures):\n scores = []\n for entry in measures:\n try:\n curr = int(entry[1])\n except:\n curr = None\n if curr is not None:\n scores.append(curr)\n if len(scores) < 1:\n return 0\n return max(scores) - min(scores)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef range_func(measures):\n scores = []\n for entry in measures:\n try:\n curr = int(entry[1])\n except:\n curr = None\n if curr is not None:\n scores.append(curr)\n if len(scores) < 1:\n return 0\n return max(scores) - min(scores)\n\n\n<mask token>\nprint(top)\n",
"step-3": "<mask token>\nsc = SparkContext('local', 'weblog app')\neffective_care = sc.textFile('file:///data/exercise1/effective_care').map(\n lambda l: l.encode().split(',')).map(lambda x: (x[0], x[1:]))\nprocedure_care = effective_care.map(lambda p: (p[1][1], [p[0], p[1][2]]))\nprocedure_care_grouped = procedure_care.groupByKey()\n\n\ndef range_func(measures):\n scores = []\n for entry in measures:\n try:\n curr = int(entry[1])\n except:\n curr = None\n if curr is not None:\n scores.append(curr)\n if len(scores) < 1:\n return 0\n return max(scores) - min(scores)\n\n\nmeasure_dates = sc.textFile('file:///data/exercise1/measure_dates').map(lambda\n l: l.encode().split(',')).map(lambda x: (x[1], x[0]))\nprocedure_score_range = procedure_care_grouped.map(lambda p: (p[0],\n range_func(p[1]))).join(measure_dates)\nsorted_ranges = procedure_score_range.sortBy(lambda x: x[1], False)\ntop = sorted_ranges.take(10)\nprint(top)\n",
"step-4": "from pyspark import SparkContext\nfrom pyspark.sql import SQLContext\nfrom pyspark.sql.types import *\nsc = SparkContext('local', 'weblog app')\neffective_care = sc.textFile('file:///data/exercise1/effective_care').map(\n lambda l: l.encode().split(',')).map(lambda x: (x[0], x[1:]))\nprocedure_care = effective_care.map(lambda p: (p[1][1], [p[0], p[1][2]]))\nprocedure_care_grouped = procedure_care.groupByKey()\n\n\ndef range_func(measures):\n scores = []\n for entry in measures:\n try:\n curr = int(entry[1])\n except:\n curr = None\n if curr is not None:\n scores.append(curr)\n if len(scores) < 1:\n return 0\n return max(scores) - min(scores)\n\n\nmeasure_dates = sc.textFile('file:///data/exercise1/measure_dates').map(lambda\n l: l.encode().split(',')).map(lambda x: (x[1], x[0]))\nprocedure_score_range = procedure_care_grouped.map(lambda p: (p[0],\n range_func(p[1]))).join(measure_dates)\nsorted_ranges = procedure_score_range.sortBy(lambda x: x[1], False)\ntop = sorted_ranges.take(10)\nprint(top)\n",
"step-5": "from pyspark import SparkContext\nfrom pyspark.sql import SQLContext\nfrom pyspark.sql.types import *\nsc = SparkContext(\"local\", \"weblog app\")\n\neffective_care = sc.textFile('file:///data/exercise1/effective_care').map(lambda l:l.encode().split(',')).map(lambda x: (x[0], x[1:]))\nprocedure_care = effective_care.map(lambda p:(p[1][1], [p[0], p[1][2]]))\nprocedure_care_grouped = procedure_care.groupByKey()\n\ndef range_func(measures):\n\tscores = []\n\tfor entry in measures:\n\t\ttry:\n\t\t\tcurr = int(entry[1])\n\t\texcept:\n\t\t\tcurr = None\n\t\tif curr is not None:\n\t\t\tscores.append(curr)\n\tif len(scores) < 1:\n\t\treturn 0\n\treturn max(scores) - min(scores)\n\nmeasure_dates = sc.textFile('file:///data/exercise1/measure_dates').map(lambda l:l.encode().split(',')).map(lambda x: (x[1], x[0]))\nprocedure_score_range = procedure_care_grouped.map(lambda p:(p[0], range_func(p[1]))).join(measure_dates)\nsorted_ranges = procedure_score_range.sortBy(lambda x:x[1], False)\ntop = sorted_ranges.take(10)\nprint(top)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def get(i):
with open('晚安.txt', 'r', encoding='utf-8') as f:
line = f.readlines()[i]
return line
<|reserved_special_token_0|>
def main():
for i in range(3650):
send(i)
time.sleep(5)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get(i):
with open('晚安.txt', 'r', encoding='utf-8') as f:
line = f.readlines()[i]
return line
def send(i):
myfriend = bot.friends().search('微信好友昵称')[0]
myfriend.send(get(i))
i += 1
def main():
for i in range(3650):
send(i)
time.sleep(5)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
bot = Bot(cache_path='wxpy.pkl')
def get(i):
with open('晚安.txt', 'r', encoding='utf-8') as f:
line = f.readlines()[i]
return line
def send(i):
myfriend = bot.friends().search('微信好友昵称')[0]
myfriend.send(get(i))
i += 1
def main():
for i in range(3650):
send(i)
time.sleep(5)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import time
from wxpy import *
bot = Bot(cache_path='wxpy.pkl')
def get(i):
with open('晚安.txt', 'r', encoding='utf-8') as f:
line = f.readlines()[i]
return line
def send(i):
myfriend = bot.friends().search('微信好友昵称')[0]
myfriend.send(get(i))
i += 1
def main():
for i in range(3650):
send(i)
time.sleep(5)
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "a7d11f130e0d5d6c9b4ac7c5d3a804fb9f79b943",
"index": 2284,
"step-1": "<mask token>\n\n\ndef get(i):\n with open('晚安.txt', 'r', encoding='utf-8') as f:\n line = f.readlines()[i]\n return line\n\n\n<mask token>\n\n\ndef main():\n for i in range(3650):\n send(i)\n time.sleep(5)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get(i):\n with open('晚安.txt', 'r', encoding='utf-8') as f:\n line = f.readlines()[i]\n return line\n\n\ndef send(i):\n myfriend = bot.friends().search('微信好友昵称')[0]\n myfriend.send(get(i))\n i += 1\n\n\ndef main():\n for i in range(3650):\n send(i)\n time.sleep(5)\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\nbot = Bot(cache_path='wxpy.pkl')\n\n\ndef get(i):\n with open('晚安.txt', 'r', encoding='utf-8') as f:\n line = f.readlines()[i]\n return line\n\n\ndef send(i):\n myfriend = bot.friends().search('微信好友昵称')[0]\n myfriend.send(get(i))\n i += 1\n\n\ndef main():\n for i in range(3650):\n send(i)\n time.sleep(5)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import time\nfrom wxpy import *\nbot = Bot(cache_path='wxpy.pkl')\n\n\ndef get(i):\n with open('晚安.txt', 'r', encoding='utf-8') as f:\n line = f.readlines()[i]\n return line\n\n\ndef send(i):\n myfriend = bot.friends().search('微信好友昵称')[0]\n myfriend.send(get(i))\n i += 1\n\n\ndef main():\n for i in range(3650):\n send(i)\n time.sleep(5)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": null,
"step-ids": [
2,
4,
5,
6
]
}
|
[
2,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for _ in range(t):
n, m = map(int, input().split())
rows = [0] * n
a_column = list()
for r in range(n):
tmp = list(input().split())
rows[r] = tmp
a_column.append(tmp[0])
sorted_a_column = sorted(a_column)
found = False
for c in range(m):
if not found:
tmp_c = list(input().split())
if sorted(tmp_c) == sorted_a_column:
found = True
output = str()
for num in tmp_c:
index = a_column.index(num)
output += ' '.join(rows[index])
output += '\n'
print(output, end='')
else:
stdin.__next__()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
t = int(input())
for _ in range(t):
n, m = map(int, input().split())
rows = [0] * n
a_column = list()
for r in range(n):
tmp = list(input().split())
rows[r] = tmp
a_column.append(tmp[0])
sorted_a_column = sorted(a_column)
found = False
for c in range(m):
if not found:
tmp_c = list(input().split())
if sorted(tmp_c) == sorted_a_column:
found = True
output = str()
for num in tmp_c:
index = a_column.index(num)
output += ' '.join(rows[index])
output += '\n'
print(output, end='')
else:
stdin.__next__()
<|reserved_special_token_1|>
from sys import stdin, stdout
t = int(input())
for _ in range(t):
n, m = map(int, input().split())
rows = [0] * n
a_column = list()
for r in range(n):
tmp = list(input().split())
rows[r] = tmp
a_column.append(tmp[0])
sorted_a_column = sorted(a_column)
found = False
for c in range(m):
if not found:
tmp_c = list(input().split())
if sorted(tmp_c) == sorted_a_column:
found = True
output = str()
for num in tmp_c:
index = a_column.index(num)
output += ' '.join(rows[index])
output += '\n'
print(output, end='')
else:
stdin.__next__()
<|reserved_special_token_1|>
# B. A New Technique
# TLE (Time limit exceeded)
from sys import stdin, stdout
t = int(input())
for _ in range(t):
n, m = map(int, input().split())
rows = [0] * n
a_column = list()
for r in range(n):
tmp = list(input().split())
rows[r] = tmp
a_column.append(tmp[0])
sorted_a_column = sorted(a_column)
found = False
for c in range(m):
if not found:
tmp_c = list(input().split())
if sorted(tmp_c) == sorted_a_column:
found = True
output = str()
for num in tmp_c:
index = a_column.index(num)
output += ' '.join(rows[index])
output += '\n'
print(output, end='')
else:
stdin.__next__()
|
flexible
|
{
"blob_id": "9004314951f77b14bab1aba9ae93eb49c8197a8d",
"index": 4409,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor _ in range(t):\n n, m = map(int, input().split())\n rows = [0] * n\n a_column = list()\n for r in range(n):\n tmp = list(input().split())\n rows[r] = tmp\n a_column.append(tmp[0])\n sorted_a_column = sorted(a_column)\n found = False\n for c in range(m):\n if not found:\n tmp_c = list(input().split())\n if sorted(tmp_c) == sorted_a_column:\n found = True\n output = str()\n for num in tmp_c:\n index = a_column.index(num)\n output += ' '.join(rows[index])\n output += '\\n'\n print(output, end='')\n else:\n stdin.__next__()\n",
"step-3": "<mask token>\nt = int(input())\nfor _ in range(t):\n n, m = map(int, input().split())\n rows = [0] * n\n a_column = list()\n for r in range(n):\n tmp = list(input().split())\n rows[r] = tmp\n a_column.append(tmp[0])\n sorted_a_column = sorted(a_column)\n found = False\n for c in range(m):\n if not found:\n tmp_c = list(input().split())\n if sorted(tmp_c) == sorted_a_column:\n found = True\n output = str()\n for num in tmp_c:\n index = a_column.index(num)\n output += ' '.join(rows[index])\n output += '\\n'\n print(output, end='')\n else:\n stdin.__next__()\n",
"step-4": "from sys import stdin, stdout\nt = int(input())\nfor _ in range(t):\n n, m = map(int, input().split())\n rows = [0] * n\n a_column = list()\n for r in range(n):\n tmp = list(input().split())\n rows[r] = tmp\n a_column.append(tmp[0])\n sorted_a_column = sorted(a_column)\n found = False\n for c in range(m):\n if not found:\n tmp_c = list(input().split())\n if sorted(tmp_c) == sorted_a_column:\n found = True\n output = str()\n for num in tmp_c:\n index = a_column.index(num)\n output += ' '.join(rows[index])\n output += '\\n'\n print(output, end='')\n else:\n stdin.__next__()\n",
"step-5": "# B. A New Technique\n# TLE (Time limit exceeded)\n\nfrom sys import stdin, stdout\n\nt = int(input())\nfor _ in range(t):\n n, m = map(int, input().split())\n\n rows = [0] * n\n\n a_column = list()\n\n for r in range(n):\n tmp = list(input().split())\n rows[r] = tmp\n a_column.append(tmp[0])\n sorted_a_column = sorted(a_column)\n\n found = False\n for c in range(m):\n if not found:\n tmp_c = list(input().split())\n if sorted(tmp_c) == sorted_a_column:\n found = True\n output = str()\n for num in tmp_c:\n index = a_column.index(num)\n output += ' '.join(rows[index])\n output += '\\n'\n print(output, end='')\n else:\n stdin.__next__()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def merge_sort(lists):
if len(lists) <= 1:
return lists
middle = len(lists) // 2
left = merge_sort(lists[:middle])
right = merge_sort(lists[middle:])
return merge(left, right)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def merge(a, b):
global count
c = []
h = j = 0
while j < len(a) and h < len(b):
if a[j] <= b[h]:
c.append(a[j])
j += 1
else:
count += len(a[j:])
c.append(b[h])
h += 1
if j == len(a):
for i in b[h:]:
c.append(i)
else:
for i in a[j:]:
c.append(i)
return c
def merge_sort(lists):
if len(lists) <= 1:
return lists
middle = len(lists) // 2
left = merge_sort(lists[:middle])
right = merge_sort(lists[middle:])
return merge(left, right)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def merge(a, b):
global count
c = []
h = j = 0
while j < len(a) and h < len(b):
if a[j] <= b[h]:
c.append(a[j])
j += 1
else:
count += len(a[j:])
c.append(b[h])
h += 1
if j == len(a):
for i in b[h:]:
c.append(i)
else:
for i in a[j:]:
c.append(i)
return c
def merge_sort(lists):
if len(lists) <= 1:
return lists
middle = len(lists) // 2
left = merge_sort(lists[:middle])
right = merge_sort(lists[middle:])
return merge(left, right)
if __name__ == '__main__':
a = [7, 6, 5, 9, 10, 11]
print(merge_sort(a))
print(count)
hash(i)
<|reserved_special_token_1|>
count = 0
def merge(a, b):
global count
c = []
h = j = 0
while j < len(a) and h < len(b):
if a[j] <= b[h]:
c.append(a[j])
j += 1
else:
count += len(a[j:])
c.append(b[h])
h += 1
if j == len(a):
for i in b[h:]:
c.append(i)
else:
for i in a[j:]:
c.append(i)
return c
def merge_sort(lists):
if len(lists) <= 1:
return lists
middle = len(lists) // 2
left = merge_sort(lists[:middle])
right = merge_sort(lists[middle:])
return merge(left, right)
if __name__ == '__main__':
a = [7, 6, 5, 9, 10, 11]
print(merge_sort(a))
print(count)
hash(i)
<|reserved_special_token_1|>
count=0
def merge(a, b):
global count
c = []
h = j = 0
while j < len(a) and h < len(b):
if a[j] <= b[h]:
c.append(a[j])
j += 1
else:
count+=(len(a[j:]))
c.append(b[h])
h += 1
if j == len(a):
for i in b[h:]:
c.append(i)
else:
for i in a[j:]:
c.append(i)
# count += h+1
return c
def merge_sort(lists):
if len(lists) <= 1:
return lists
middle = len(lists)//2
left = merge_sort(lists[:middle])
right = merge_sort(lists[middle:])
return merge(left, right)
if __name__ == '__main__':
a = [7, 6, 5,9, 10, 11]
print(merge_sort(a))
print(count)
hash(i)
|
flexible
|
{
"blob_id": "cf3b66a635c6549553af738f263b035217e75a7a",
"index": 903,
"step-1": "<mask token>\n\n\ndef merge_sort(lists):\n if len(lists) <= 1:\n return lists\n middle = len(lists) // 2\n left = merge_sort(lists[:middle])\n right = merge_sort(lists[middle:])\n return merge(left, right)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef merge(a, b):\n global count\n c = []\n h = j = 0\n while j < len(a) and h < len(b):\n if a[j] <= b[h]:\n c.append(a[j])\n j += 1\n else:\n count += len(a[j:])\n c.append(b[h])\n h += 1\n if j == len(a):\n for i in b[h:]:\n c.append(i)\n else:\n for i in a[j:]:\n c.append(i)\n return c\n\n\ndef merge_sort(lists):\n if len(lists) <= 1:\n return lists\n middle = len(lists) // 2\n left = merge_sort(lists[:middle])\n right = merge_sort(lists[middle:])\n return merge(left, right)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef merge(a, b):\n global count\n c = []\n h = j = 0\n while j < len(a) and h < len(b):\n if a[j] <= b[h]:\n c.append(a[j])\n j += 1\n else:\n count += len(a[j:])\n c.append(b[h])\n h += 1\n if j == len(a):\n for i in b[h:]:\n c.append(i)\n else:\n for i in a[j:]:\n c.append(i)\n return c\n\n\ndef merge_sort(lists):\n if len(lists) <= 1:\n return lists\n middle = len(lists) // 2\n left = merge_sort(lists[:middle])\n right = merge_sort(lists[middle:])\n return merge(left, right)\n\n\nif __name__ == '__main__':\n a = [7, 6, 5, 9, 10, 11]\n print(merge_sort(a))\n print(count)\n hash(i)\n",
"step-4": "count = 0\n\n\ndef merge(a, b):\n global count\n c = []\n h = j = 0\n while j < len(a) and h < len(b):\n if a[j] <= b[h]:\n c.append(a[j])\n j += 1\n else:\n count += len(a[j:])\n c.append(b[h])\n h += 1\n if j == len(a):\n for i in b[h:]:\n c.append(i)\n else:\n for i in a[j:]:\n c.append(i)\n return c\n\n\ndef merge_sort(lists):\n if len(lists) <= 1:\n return lists\n middle = len(lists) // 2\n left = merge_sort(lists[:middle])\n right = merge_sort(lists[middle:])\n return merge(left, right)\n\n\nif __name__ == '__main__':\n a = [7, 6, 5, 9, 10, 11]\n print(merge_sort(a))\n print(count)\n hash(i)\n",
"step-5": "count=0\ndef merge(a, b):\n global count\n c = []\n h = j = 0\n while j < len(a) and h < len(b):\n if a[j] <= b[h]:\n c.append(a[j])\n j += 1\n else:\n count+=(len(a[j:]))\n c.append(b[h])\n h += 1\n\n if j == len(a):\n for i in b[h:]:\n c.append(i)\n else:\n for i in a[j:]:\n c.append(i)\n # count += h+1\n\n return c\n\ndef merge_sort(lists):\n if len(lists) <= 1:\n return lists\n middle = len(lists)//2\n left = merge_sort(lists[:middle])\n right = merge_sort(lists[middle:])\n return merge(left, right)\n\n\nif __name__ == '__main__':\n a = [7, 6, 5,9, 10, 11]\n print(merge_sort(a))\n print(count)\n hash(i)",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class DjangoHandler(tornado.web.RequestHandler):
async def reroute(self):
http = tornado.httpclient.AsyncHTTPClient()
new_request = copy.deepcopy(self.request)
url_obj = copy.urlparse(new_request.url)
new_request.url = f'{url_obj.scheme}://localhost:9000{url_obj.path}'
return await http.fetch(new_request)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DjangoHandler(tornado.web.RequestHandler):
async def reroute(self):
http = tornado.httpclient.AsyncHTTPClient()
new_request = copy.deepcopy(self.request)
url_obj = copy.urlparse(new_request.url)
new_request.url = f'{url_obj.scheme}://localhost:9000{url_obj.path}'
return await http.fetch(new_request)
get = reroute
post = reroute
<|reserved_special_token_0|>
application.listen(80)
tornado.ioloop.IOLoop.current().start()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DjangoHandler(tornado.web.RequestHandler):
async def reroute(self):
http = tornado.httpclient.AsyncHTTPClient()
new_request = copy.deepcopy(self.request)
url_obj = copy.urlparse(new_request.url)
new_request.url = f'{url_obj.scheme}://localhost:9000{url_obj.path}'
return await http.fetch(new_request)
get = reroute
post = reroute
application = tornado.web.Application([('/', DjangoHandler)])
application.listen(80)
tornado.ioloop.IOLoop.current().start()
<|reserved_special_token_1|>
import tornado
import copy
class DjangoHandler(tornado.web.RequestHandler):
async def reroute(self):
http = tornado.httpclient.AsyncHTTPClient()
new_request = copy.deepcopy(self.request)
url_obj = copy.urlparse(new_request.url)
new_request.url = f'{url_obj.scheme}://localhost:9000{url_obj.path}'
return await http.fetch(new_request)
get = reroute
post = reroute
application = tornado.web.Application([('/', DjangoHandler)])
application.listen(80)
tornado.ioloop.IOLoop.current().start()
<|reserved_special_token_1|>
import tornado
import copy
class DjangoHandler(tornado.web.RequestHandler):
async def reroute(self):
http = tornado.httpclient.AsyncHTTPClient()
new_request = copy.deepcopy(self.request)
url_obj = copy.urlparse(new_request.url)
new_request.url = f"{url_obj.scheme}://localhost:9000{url_obj.path}"
return await http.fetch(new_request)
get = reroute
post = reroute
application = tornado.web.Application([
# (r'/chat', WebsocketChatHandler),
(r'/', DjangoHandler),
])
application.listen(80)
tornado.ioloop.IOLoop.current().start()
|
flexible
|
{
"blob_id": "6960fc6d949512ffc783b085041f86cb791160a3",
"index": 1500,
"step-1": "<mask token>\n\n\nclass DjangoHandler(tornado.web.RequestHandler):\n\n async def reroute(self):\n http = tornado.httpclient.AsyncHTTPClient()\n new_request = copy.deepcopy(self.request)\n url_obj = copy.urlparse(new_request.url)\n new_request.url = f'{url_obj.scheme}://localhost:9000{url_obj.path}'\n return await http.fetch(new_request)\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass DjangoHandler(tornado.web.RequestHandler):\n\n async def reroute(self):\n http = tornado.httpclient.AsyncHTTPClient()\n new_request = copy.deepcopy(self.request)\n url_obj = copy.urlparse(new_request.url)\n new_request.url = f'{url_obj.scheme}://localhost:9000{url_obj.path}'\n return await http.fetch(new_request)\n get = reroute\n post = reroute\n\n\n<mask token>\napplication.listen(80)\ntornado.ioloop.IOLoop.current().start()\n",
"step-3": "<mask token>\n\n\nclass DjangoHandler(tornado.web.RequestHandler):\n\n async def reroute(self):\n http = tornado.httpclient.AsyncHTTPClient()\n new_request = copy.deepcopy(self.request)\n url_obj = copy.urlparse(new_request.url)\n new_request.url = f'{url_obj.scheme}://localhost:9000{url_obj.path}'\n return await http.fetch(new_request)\n get = reroute\n post = reroute\n\n\napplication = tornado.web.Application([('/', DjangoHandler)])\napplication.listen(80)\ntornado.ioloop.IOLoop.current().start()\n",
"step-4": "import tornado\nimport copy\n\n\nclass DjangoHandler(tornado.web.RequestHandler):\n\n async def reroute(self):\n http = tornado.httpclient.AsyncHTTPClient()\n new_request = copy.deepcopy(self.request)\n url_obj = copy.urlparse(new_request.url)\n new_request.url = f'{url_obj.scheme}://localhost:9000{url_obj.path}'\n return await http.fetch(new_request)\n get = reroute\n post = reroute\n\n\napplication = tornado.web.Application([('/', DjangoHandler)])\napplication.listen(80)\ntornado.ioloop.IOLoop.current().start()\n",
"step-5": "import tornado\nimport copy\n\n\nclass DjangoHandler(tornado.web.RequestHandler):\n async def reroute(self):\n http = tornado.httpclient.AsyncHTTPClient()\n\n new_request = copy.deepcopy(self.request)\n url_obj = copy.urlparse(new_request.url)\n new_request.url = f\"{url_obj.scheme}://localhost:9000{url_obj.path}\"\n\n return await http.fetch(new_request)\n\n get = reroute\n post = reroute\n\n\napplication = tornado.web.Application([\n # (r'/chat', WebsocketChatHandler),\n (r'/', DjangoHandler),\n])\napplication.listen(80)\n\ntornado.ioloop.IOLoop.current().start()\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
__all__ = ['ISearcher']
<|reserved_special_token_1|>
from .isearch import ISearcher
__all__ = ['ISearcher']
|
flexible
|
{
"blob_id": "13e2f474294edb7c78bd81456097d1389e6a0f1b",
"index": 5003,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n__all__ = ['ISearcher']\n",
"step-3": "from .isearch import ISearcher\n__all__ = ['ISearcher']\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
#-*- coding = utf-8-*-
#@Time : 2020/6/26 11:02
#@Author :Ella
#@File :app.py
#@Software : PyCharm
import time
import datetime
from flask import Flask,render_template,request #render_template渲染模板
app = Flask(__name__) #初始化的对象
#路由解析,通过用户访问的路径,匹配想要的函数
@app.route('/')
def hello_world():
return '你好'
#通过访问路径,获取用户的字符串参数
@app.route('/test1/<name>')
def test1(name):
return '你好,%s'%name
#通过访问路径,获取用户的整形参数 此外,还有float类型
@app.route('/test2/<int:id>')
def test2(id):
return '你好,%d'%id
#返回给用户渲染后的网页文件
# @app.route('/index1')
# def index1():
# return render_template("index.html")
#向页面传递变量
@app.route('/index1')
def index2():
time = datetime.date.today() #普通变量
name = ['小新','小英','小红'] #列表类型
task = {"任务":"打扫卫生","时间":"3小时"} #字典类型
return render_template("index.html",var = time,list = name,task = task)
#表单提交
@app.route('/test/register')
def register():
return render_template("test/register.html")
#接受表单提交的路由,需要指定methods为post
@app.route('/result',methods = ['POST','GET'])
def result():
if request.method == 'POST':
result = request.form
return render_template("test/result.html",result = result)
if __name__ == '__main__':
app.run(debug=True)
|
normal
|
{
"blob_id": "d68bd9c90a106a9eac767607ad77bdd84d0f18d2",
"index": 1006,
"step-1": "<mask token>\n\n\n@app.route('/')\ndef hello_world():\n return '你好'\n\n\n@app.route('/test1/<name>')\ndef test1(name):\n return '你好,%s' % name\n\n\n@app.route('/test2/<int:id>')\ndef test2(id):\n return '你好,%d' % id\n\n\n<mask token>\n\n\n@app.route('/test/register')\ndef register():\n return render_template('test/register.html')\n\n\n@app.route('/result', methods=['POST', 'GET'])\ndef result():\n if request.method == 'POST':\n result = request.form\n return render_template('test/result.html', result=result)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@app.route('/')\ndef hello_world():\n return '你好'\n\n\n@app.route('/test1/<name>')\ndef test1(name):\n return '你好,%s' % name\n\n\n@app.route('/test2/<int:id>')\ndef test2(id):\n return '你好,%d' % id\n\n\n@app.route('/index1')\ndef index2():\n time = datetime.date.today()\n name = ['小新', '小英', '小红']\n task = {'任务': '打扫卫生', '时间': '3小时'}\n return render_template('index.html', var=time, list=name, task=task)\n\n\n@app.route('/test/register')\ndef register():\n return render_template('test/register.html')\n\n\n@app.route('/result', methods=['POST', 'GET'])\ndef result():\n if request.method == 'POST':\n result = request.form\n return render_template('test/result.html', result=result)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@app.route('/')\ndef hello_world():\n return '你好'\n\n\n@app.route('/test1/<name>')\ndef test1(name):\n return '你好,%s' % name\n\n\n@app.route('/test2/<int:id>')\ndef test2(id):\n return '你好,%d' % id\n\n\n@app.route('/index1')\ndef index2():\n time = datetime.date.today()\n name = ['小新', '小英', '小红']\n task = {'任务': '打扫卫生', '时间': '3小时'}\n return render_template('index.html', var=time, list=name, task=task)\n\n\n@app.route('/test/register')\ndef register():\n return render_template('test/register.html')\n\n\n@app.route('/result', methods=['POST', 'GET'])\ndef result():\n if request.method == 'POST':\n result = request.form\n return render_template('test/result.html', result=result)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-4": "import time\nimport datetime\nfrom flask import Flask, render_template, request\napp = Flask(__name__)\n\n\n@app.route('/')\ndef hello_world():\n return '你好'\n\n\n@app.route('/test1/<name>')\ndef test1(name):\n return '你好,%s' % name\n\n\n@app.route('/test2/<int:id>')\ndef test2(id):\n return '你好,%d' % id\n\n\n@app.route('/index1')\ndef index2():\n time = datetime.date.today()\n name = ['小新', '小英', '小红']\n task = {'任务': '打扫卫生', '时间': '3小时'}\n return render_template('index.html', var=time, list=name, task=task)\n\n\n@app.route('/test/register')\ndef register():\n return render_template('test/register.html')\n\n\n@app.route('/result', methods=['POST', 'GET'])\ndef result():\n if request.method == 'POST':\n result = request.form\n return render_template('test/result.html', result=result)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-5": "#-*- coding = utf-8-*-\n#@Time : 2020/6/26 11:02\n#@Author :Ella\n#@File :app.py\n#@Software : PyCharm\n\nimport time\nimport datetime\n\nfrom flask import Flask,render_template,request #render_template渲染模板\napp = Flask(__name__) #初始化的对象\n\n#路由解析,通过用户访问的路径,匹配想要的函数\n@app.route('/')\ndef hello_world():\n return '你好'\n\n#通过访问路径,获取用户的字符串参数\n@app.route('/test1/<name>')\ndef test1(name):\n return '你好,%s'%name\n\n#通过访问路径,获取用户的整形参数 此外,还有float类型\n@app.route('/test2/<int:id>')\ndef test2(id):\n return '你好,%d'%id\n\n#返回给用户渲染后的网页文件\n# @app.route('/index1')\n# def index1():\n# return render_template(\"index.html\")\n\n#向页面传递变量\n@app.route('/index1')\ndef index2():\n time = datetime.date.today() #普通变量\n name = ['小新','小英','小红'] #列表类型\n task = {\"任务\":\"打扫卫生\",\"时间\":\"3小时\"} #字典类型\n return render_template(\"index.html\",var = time,list = name,task = task)\n\n#表单提交\n@app.route('/test/register')\ndef register():\n return render_template(\"test/register.html\")\n\n#接受表单提交的路由,需要指定methods为post\n@app.route('/result',methods = ['POST','GET'])\ndef result():\n if request.method == 'POST':\n result = request.form\n return render_template(\"test/result.html\",result = result)\n\nif __name__ == '__main__':\n app.run(debug=True)",
"step-ids": [
5,
6,
7,
9,
10
]
}
|
[
5,
6,
7,
9,
10
] |
# Copyright 2017 Klarna AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
import base64
import json
from django.test import TestCase, override_settings
from django.conf import settings
from django.core import management
from django.urls import reverse
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.contrib.auth.hashers import make_password
from django_rethink.connection import r
from django_rethink.serializers import *
class TestSerializer(RethinkSerializer):
id = serializers.CharField(required=False, read_only=True)
permissions = PermissionsSerializer()
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test'
indices = [
('permissions_read', r.row['permissions']['read']),
('permissions_write', r.row['permissions']['write']),
('permissions_create', r.row['permissions']['create']),
]
class TestReviewSerializer(NeedsReviewMixin, HistorySerializerMixin):
id = serializers.CharField(required=False, read_only=True)
field1 = serializers.CharField(required=True)
permissions = PermissionsSerializer()
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test_reviewed'
indices = [
('permissions_read', r.row['permissions']['read']),
('permissions_write', r.row['permissions']['write']),
('permissions_create', r.row['permissions']['create']),
]
class TestHistoryPermissionsSerializer(HistorySerializerMixin):
id = serializers.CharField(required=False, read_only=True)
field1 = serializers.CharField(required=True)
permissions = PermissionsSerializer()
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test_history_permissions'
indices = [
('permissions_read', r.row['permissions']['read']),
('permissions_write', r.row['permissions']['write']),
('permissions_create', r.row['permissions']['create']),
]
class TestHistoryHasReadPermissionSerializer(HistorySerializerMixin):
id = serializers.CharField(required=False, read_only=True)
field1 = serializers.CharField(required=True)
user = serializers.CharField(required=True)
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test_history_has_read_permission'
def has_read_permission(self, user):
return self.instance['user'] == user.username
@override_settings(
RETHINK_DB_DB=os.environ.get('RETHINK_DB_DB', 'django_rethinkci'),
)
class APITests(TestCase):
@classmethod
def setUpClass(cls):
super(APITests, cls).setUpClass()
cls.conn = r.connect(host=settings.RETHINK_DB_HOST, port=settings.RETHINK_DB_PORT)
try:
r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)
except:
pass
r.db_create(settings.RETHINK_DB_DB).run(cls.conn)
cls.conn.db = settings.RETHINK_DB_DB
management.call_command('syncrethinkdb', verbosity=0)
@classmethod
def tearDownClass(cls):
r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)
super(APITests, cls).tearDownClass()
def tearDown(self):
for t in [
"django_rethink_test",
"history",
"django_rethink_test_reviewed",
"django_rethink_test_history_permissions",
"django_rethink_test_history_has_read_permission",
]:
r.table(t).delete().run(self.conn)
super(APITests, self).tearDown()
def create_user(self, username='tester', password='tester', is_superuser=True, groups=[], **kwargs):
user = get_user_model().objects.create(
username=username,
password=make_password(password),
is_superuser=is_superuser,
**kwargs
)
for name in groups:
group, created = Group.objects.get_or_create(name=name)
user.groups.add(group)
auth = "Basic %s" % (base64.b64encode(("%s:%s" % (username, password)).encode("ascii")).decode("ascii"))
return user, auth
def test_history_no_type(self):
super_user, super_auth = self.create_user()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': 'i_dont_exist', 'pk': '1'}),
HTTP_AUTHORIZATION=super_auth
)
self.assertEqual(response.status_code, 404)
def test_history_with_permissions(self):
super_user, super_auth = self.create_user()
luser, lauth = self.create_user(username='luser', is_superuser=False, groups=['group1'])
serializer = TestHistoryPermissionsSerializer(None,
data={'field1': 'test1', 'user': luser.username,
'permissions': {'write': ['group1']}},
context={'username': luser.username}
)
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryPermissionsSerializer(None,
data={'field1': 'test2', 'user': super_user.username,
'permissions': {'write': []}},
context={'username': super_user.username}
)
serializer.is_valid(raise_exception=True)
test2 = serializer.save()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryPermissionsSerializer.Meta.table_name,
'pk': test1['id'],
}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 1)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryPermissionsSerializer.Meta.table_name,
'pk': test2['id'],
}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 403)
def test_history_with_has_read_permission(self):
super_user, super_auth = self.create_user()
luser, lauth = self.create_user(username='luser', is_superuser=False, groups=['group1'])
serializer = TestHistoryHasReadPermissionSerializer(None,
data={'field1': 'test1', 'user': luser.username},
context={'username': luser.username}
)
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryHasReadPermissionSerializer(test1,
data={'field1': 'test1.1'}, partial=True,
context={'username': luser.username},
)
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryHasReadPermissionSerializer(None,
data={'field1': 'test2', 'user': super_user.username},
context={'username': super_user.username}
)
serializer.is_valid(raise_exception=True)
test2 = serializer.save()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,
'pk': test1['id'],
}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 2)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,
'pk': test2['id'],
}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 403)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,
'pk': test2['id'],
}), HTTP_AUTHORIZATION=super_auth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 1)
serializer = TestHistoryHasReadPermissionSerializer(test1,
context={'username': luser.username},
)
serializer.delete()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,
'pk': test1['id'],
}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 3)
|
normal
|
{
"blob_id": "d5d12e2269b343dde78534eddf2cce06759eb264",
"index": 9128,
"step-1": "<mask token>\n\n\n@override_settings(RETHINK_DB_DB=os.environ.get('RETHINK_DB_DB',\n 'django_rethinkci'))\nclass APITests(TestCase):\n\n @classmethod\n def setUpClass(cls):\n super(APITests, cls).setUpClass()\n cls.conn = r.connect(host=settings.RETHINK_DB_HOST, port=settings.\n RETHINK_DB_PORT)\n try:\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n except:\n pass\n r.db_create(settings.RETHINK_DB_DB).run(cls.conn)\n cls.conn.db = settings.RETHINK_DB_DB\n management.call_command('syncrethinkdb', verbosity=0)\n\n @classmethod\n def tearDownClass(cls):\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n super(APITests, cls).tearDownClass()\n\n def tearDown(self):\n for t in ['django_rethink_test', 'history',\n 'django_rethink_test_reviewed',\n 'django_rethink_test_history_permissions',\n 'django_rethink_test_history_has_read_permission']:\n r.table(t).delete().run(self.conn)\n super(APITests, self).tearDown()\n\n def create_user(self, username='tester', password='tester',\n is_superuser=True, groups=[], **kwargs):\n user = get_user_model().objects.create(username=username, password=\n make_password(password), is_superuser=is_superuser, **kwargs)\n for name in groups:\n group, created = Group.objects.get_or_create(name=name)\n user.groups.add(group)\n auth = 'Basic %s' % base64.b64encode(('%s:%s' % (username, password\n )).encode('ascii')).decode('ascii')\n return user, auth\n\n def test_history_no_type(self):\n super_user, super_auth = self.create_user()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': 'i_dont_exist', 'pk': '1'}),\n HTTP_AUTHORIZATION=super_auth)\n self.assertEqual(response.status_code, 404)\n\n def test_history_with_permissions(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=\n False, groups=['group1'])\n serializer = TestHistoryPermissionsSerializer(None, data={'field1':\n 'test1', 'user': luser.username, 'permissions': {'write': [\n 'group1']}}, context={'username': luser.username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryPermissionsSerializer(None, data={'field1':\n 'test2', 'user': super_user.username, 'permissions': {'write':\n []}}, context={'username': super_user.username})\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.\n table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.\n table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n\n def test_history_with_has_read_permission(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=\n False, groups=['group1'])\n serializer = TestHistoryHasReadPermissionSerializer(None, data={\n 'field1': 'test1', 'user': luser.username}, context={'username':\n luser.username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(test1, data={\n 'field1': 'test1.1'}, partial=True, context={'username': luser.\n username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(None, data={\n 'field1': 'test2', 'user': super_user.username}, context={\n 'username': super_user.username})\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 2)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=super_auth\n )\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n serializer = TestHistoryHasReadPermissionSerializer(test1, context=\n {'username': luser.username})\n serializer.delete()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 3)\n",
"step-2": "<mask token>\n\n\nclass TestHistoryPermissionsSerializer(HistorySerializerMixin):\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_history_permissions'\n indices = [('permissions_read', r.row['permissions']['read']), (\n 'permissions_write', r.row['permissions']['write']), (\n 'permissions_create', r.row['permissions']['create'])]\n\n\nclass TestHistoryHasReadPermissionSerializer(HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n user = serializers.CharField(required=True)\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_history_has_read_permission'\n\n def has_read_permission(self, user):\n return self.instance['user'] == user.username\n\n\n@override_settings(RETHINK_DB_DB=os.environ.get('RETHINK_DB_DB',\n 'django_rethinkci'))\nclass APITests(TestCase):\n\n @classmethod\n def setUpClass(cls):\n super(APITests, cls).setUpClass()\n cls.conn = r.connect(host=settings.RETHINK_DB_HOST, port=settings.\n RETHINK_DB_PORT)\n try:\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n except:\n pass\n r.db_create(settings.RETHINK_DB_DB).run(cls.conn)\n cls.conn.db = settings.RETHINK_DB_DB\n management.call_command('syncrethinkdb', verbosity=0)\n\n @classmethod\n def tearDownClass(cls):\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n super(APITests, cls).tearDownClass()\n\n def tearDown(self):\n for t in ['django_rethink_test', 'history',\n 'django_rethink_test_reviewed',\n 'django_rethink_test_history_permissions',\n 'django_rethink_test_history_has_read_permission']:\n r.table(t).delete().run(self.conn)\n super(APITests, self).tearDown()\n\n def create_user(self, username='tester', password='tester',\n is_superuser=True, groups=[], **kwargs):\n user = get_user_model().objects.create(username=username, password=\n make_password(password), is_superuser=is_superuser, **kwargs)\n for name in groups:\n group, created = Group.objects.get_or_create(name=name)\n user.groups.add(group)\n auth = 'Basic %s' % base64.b64encode(('%s:%s' % (username, password\n )).encode('ascii')).decode('ascii')\n return user, auth\n\n def test_history_no_type(self):\n super_user, super_auth = self.create_user()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': 'i_dont_exist', 'pk': '1'}),\n HTTP_AUTHORIZATION=super_auth)\n self.assertEqual(response.status_code, 404)\n\n def test_history_with_permissions(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=\n False, groups=['group1'])\n serializer = TestHistoryPermissionsSerializer(None, data={'field1':\n 'test1', 'user': luser.username, 'permissions': {'write': [\n 'group1']}}, context={'username': luser.username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryPermissionsSerializer(None, data={'field1':\n 'test2', 'user': super_user.username, 'permissions': {'write':\n []}}, context={'username': super_user.username})\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.\n table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.\n table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n\n def test_history_with_has_read_permission(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=\n False, groups=['group1'])\n serializer = TestHistoryHasReadPermissionSerializer(None, data={\n 'field1': 'test1', 'user': luser.username}, context={'username':\n luser.username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(test1, data={\n 'field1': 'test1.1'}, partial=True, context={'username': luser.\n username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(None, data={\n 'field1': 'test2', 'user': super_user.username}, context={\n 'username': super_user.username})\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 2)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=super_auth\n )\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n serializer = TestHistoryHasReadPermissionSerializer(test1, context=\n {'username': luser.username})\n serializer.delete()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 3)\n",
"step-3": "<mask token>\n\n\nclass TestReviewSerializer(NeedsReviewMixin, HistorySerializerMixin):\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_reviewed'\n indices = [('permissions_read', r.row['permissions']['read']), (\n 'permissions_write', r.row['permissions']['write']), (\n 'permissions_create', r.row['permissions']['create'])]\n\n\nclass TestHistoryPermissionsSerializer(HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n permissions = PermissionsSerializer()\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_history_permissions'\n indices = [('permissions_read', r.row['permissions']['read']), (\n 'permissions_write', r.row['permissions']['write']), (\n 'permissions_create', r.row['permissions']['create'])]\n\n\nclass TestHistoryHasReadPermissionSerializer(HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n user = serializers.CharField(required=True)\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_history_has_read_permission'\n\n def has_read_permission(self, user):\n return self.instance['user'] == user.username\n\n\n@override_settings(RETHINK_DB_DB=os.environ.get('RETHINK_DB_DB',\n 'django_rethinkci'))\nclass APITests(TestCase):\n\n @classmethod\n def setUpClass(cls):\n super(APITests, cls).setUpClass()\n cls.conn = r.connect(host=settings.RETHINK_DB_HOST, port=settings.\n RETHINK_DB_PORT)\n try:\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n except:\n pass\n r.db_create(settings.RETHINK_DB_DB).run(cls.conn)\n cls.conn.db = settings.RETHINK_DB_DB\n management.call_command('syncrethinkdb', verbosity=0)\n\n @classmethod\n def tearDownClass(cls):\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n super(APITests, cls).tearDownClass()\n\n def tearDown(self):\n for t in ['django_rethink_test', 'history',\n 'django_rethink_test_reviewed',\n 'django_rethink_test_history_permissions',\n 'django_rethink_test_history_has_read_permission']:\n r.table(t).delete().run(self.conn)\n super(APITests, self).tearDown()\n\n def create_user(self, username='tester', password='tester',\n is_superuser=True, groups=[], **kwargs):\n user = get_user_model().objects.create(username=username, password=\n make_password(password), is_superuser=is_superuser, **kwargs)\n for name in groups:\n group, created = Group.objects.get_or_create(name=name)\n user.groups.add(group)\n auth = 'Basic %s' % base64.b64encode(('%s:%s' % (username, password\n )).encode('ascii')).decode('ascii')\n return user, auth\n\n def test_history_no_type(self):\n super_user, super_auth = self.create_user()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': 'i_dont_exist', 'pk': '1'}),\n HTTP_AUTHORIZATION=super_auth)\n self.assertEqual(response.status_code, 404)\n\n def test_history_with_permissions(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=\n False, groups=['group1'])\n serializer = TestHistoryPermissionsSerializer(None, data={'field1':\n 'test1', 'user': luser.username, 'permissions': {'write': [\n 'group1']}}, context={'username': luser.username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryPermissionsSerializer(None, data={'field1':\n 'test2', 'user': super_user.username, 'permissions': {'write':\n []}}, context={'username': super_user.username})\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.\n table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.\n table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n\n def test_history_with_has_read_permission(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=\n False, groups=['group1'])\n serializer = TestHistoryHasReadPermissionSerializer(None, data={\n 'field1': 'test1', 'user': luser.username}, context={'username':\n luser.username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(test1, data={\n 'field1': 'test1.1'}, partial=True, context={'username': luser.\n username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(None, data={\n 'field1': 'test2', 'user': super_user.username}, context={\n 'username': super_user.username})\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 2)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=super_auth\n )\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n serializer = TestHistoryHasReadPermissionSerializer(test1, context=\n {'username': luser.username})\n serializer.delete()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 3)\n",
"step-4": "from __future__ import absolute_import\nimport os\nimport base64\nimport json\nfrom django.test import TestCase, override_settings\nfrom django.conf import settings\nfrom django.core import management\nfrom django.urls import reverse\nfrom django.contrib.auth import get_user_model\nfrom django.contrib.auth.models import Group\nfrom django.contrib.auth.hashers import make_password\nfrom django_rethink.connection import r\nfrom django_rethink.serializers import *\n\n\nclass TestSerializer(RethinkSerializer):\n id = serializers.CharField(required=False, read_only=True)\n permissions = PermissionsSerializer()\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test'\n indices = [('permissions_read', r.row['permissions']['read']), (\n 'permissions_write', r.row['permissions']['write']), (\n 'permissions_create', r.row['permissions']['create'])]\n\n\nclass TestReviewSerializer(NeedsReviewMixin, HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n permissions = PermissionsSerializer()\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_reviewed'\n indices = [('permissions_read', r.row['permissions']['read']), (\n 'permissions_write', r.row['permissions']['write']), (\n 'permissions_create', r.row['permissions']['create'])]\n\n\nclass TestHistoryPermissionsSerializer(HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n permissions = PermissionsSerializer()\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_history_permissions'\n indices = [('permissions_read', r.row['permissions']['read']), (\n 'permissions_write', r.row['permissions']['write']), (\n 'permissions_create', r.row['permissions']['create'])]\n\n\nclass TestHistoryHasReadPermissionSerializer(HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n user = serializers.CharField(required=True)\n\n\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_history_has_read_permission'\n\n def has_read_permission(self, user):\n return self.instance['user'] == user.username\n\n\n@override_settings(RETHINK_DB_DB=os.environ.get('RETHINK_DB_DB',\n 'django_rethinkci'))\nclass APITests(TestCase):\n\n @classmethod\n def setUpClass(cls):\n super(APITests, cls).setUpClass()\n cls.conn = r.connect(host=settings.RETHINK_DB_HOST, port=settings.\n RETHINK_DB_PORT)\n try:\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n except:\n pass\n r.db_create(settings.RETHINK_DB_DB).run(cls.conn)\n cls.conn.db = settings.RETHINK_DB_DB\n management.call_command('syncrethinkdb', verbosity=0)\n\n @classmethod\n def tearDownClass(cls):\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n super(APITests, cls).tearDownClass()\n\n def tearDown(self):\n for t in ['django_rethink_test', 'history',\n 'django_rethink_test_reviewed',\n 'django_rethink_test_history_permissions',\n 'django_rethink_test_history_has_read_permission']:\n r.table(t).delete().run(self.conn)\n super(APITests, self).tearDown()\n\n def create_user(self, username='tester', password='tester',\n is_superuser=True, groups=[], **kwargs):\n user = get_user_model().objects.create(username=username, password=\n make_password(password), is_superuser=is_superuser, **kwargs)\n for name in groups:\n group, created = Group.objects.get_or_create(name=name)\n user.groups.add(group)\n auth = 'Basic %s' % base64.b64encode(('%s:%s' % (username, password\n )).encode('ascii')).decode('ascii')\n return user, auth\n\n def test_history_no_type(self):\n super_user, super_auth = self.create_user()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': 'i_dont_exist', 'pk': '1'}),\n HTTP_AUTHORIZATION=super_auth)\n self.assertEqual(response.status_code, 404)\n\n def test_history_with_permissions(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=\n False, groups=['group1'])\n serializer = TestHistoryPermissionsSerializer(None, data={'field1':\n 'test1', 'user': luser.username, 'permissions': {'write': [\n 'group1']}}, context={'username': luser.username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryPermissionsSerializer(None, data={'field1':\n 'test2', 'user': super_user.username, 'permissions': {'write':\n []}}, context={'username': super_user.username})\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.\n table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryPermissionsSerializer.Meta.\n table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n\n def test_history_with_has_read_permission(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=\n False, groups=['group1'])\n serializer = TestHistoryHasReadPermissionSerializer(None, data={\n 'field1': 'test1', 'user': luser.username}, context={'username':\n luser.username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(test1, data={\n 'field1': 'test1.1'}, partial=True, context={'username': luser.\n username})\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(None, data={\n 'field1': 'test2', 'user': super_user.username}, context={\n 'username': super_user.username})\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 2)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test2['id']}), HTTP_AUTHORIZATION=super_auth\n )\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n serializer = TestHistoryHasReadPermissionSerializer(test1, context=\n {'username': luser.username})\n serializer.delete()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': TestHistoryHasReadPermissionSerializer.\n Meta.table_name, 'pk': test1['id']}), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 3)\n",
"step-5": "# Copyright 2017 Klarna AB\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import\nimport os\nimport base64\nimport json\nfrom django.test import TestCase, override_settings\nfrom django.conf import settings\nfrom django.core import management\nfrom django.urls import reverse\nfrom django.contrib.auth import get_user_model\nfrom django.contrib.auth.models import Group\nfrom django.contrib.auth.hashers import make_password\n\nfrom django_rethink.connection import r\nfrom django_rethink.serializers import *\n\nclass TestSerializer(RethinkSerializer):\n id = serializers.CharField(required=False, read_only=True)\n permissions = PermissionsSerializer()\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test'\n indices = [\n ('permissions_read', r.row['permissions']['read']),\n ('permissions_write', r.row['permissions']['write']),\n ('permissions_create', r.row['permissions']['create']),\n ]\n\nclass TestReviewSerializer(NeedsReviewMixin, HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n permissions = PermissionsSerializer()\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_reviewed'\n indices = [\n ('permissions_read', r.row['permissions']['read']),\n ('permissions_write', r.row['permissions']['write']),\n ('permissions_create', r.row['permissions']['create']),\n ]\n\nclass TestHistoryPermissionsSerializer(HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n permissions = PermissionsSerializer()\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_history_permissions'\n indices = [\n ('permissions_read', r.row['permissions']['read']),\n ('permissions_write', r.row['permissions']['write']),\n ('permissions_create', r.row['permissions']['create']),\n ]\n\nclass TestHistoryHasReadPermissionSerializer(HistorySerializerMixin):\n id = serializers.CharField(required=False, read_only=True)\n field1 = serializers.CharField(required=True)\n user = serializers.CharField(required=True)\n class Meta(RethinkSerializer.Meta):\n table_name = 'django_rethink_test_history_has_read_permission'\n def has_read_permission(self, user):\n return self.instance['user'] == user.username\n\n@override_settings(\n RETHINK_DB_DB=os.environ.get('RETHINK_DB_DB', 'django_rethinkci'),\n)\nclass APITests(TestCase):\n @classmethod\n def setUpClass(cls):\n super(APITests, cls).setUpClass()\n cls.conn = r.connect(host=settings.RETHINK_DB_HOST, port=settings.RETHINK_DB_PORT)\n try:\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n except:\n pass\n r.db_create(settings.RETHINK_DB_DB).run(cls.conn)\n cls.conn.db = settings.RETHINK_DB_DB\n management.call_command('syncrethinkdb', verbosity=0)\n\n @classmethod\n def tearDownClass(cls):\n r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)\n super(APITests, cls).tearDownClass()\n\n def tearDown(self):\n for t in [\n \"django_rethink_test\",\n \"history\",\n \"django_rethink_test_reviewed\",\n \"django_rethink_test_history_permissions\",\n \"django_rethink_test_history_has_read_permission\",\n ]:\n r.table(t).delete().run(self.conn)\n super(APITests, self).tearDown()\n\n def create_user(self, username='tester', password='tester', is_superuser=True, groups=[], **kwargs):\n user = get_user_model().objects.create(\n username=username,\n password=make_password(password),\n is_superuser=is_superuser,\n **kwargs\n )\n for name in groups:\n group, created = Group.objects.get_or_create(name=name)\n user.groups.add(group)\n auth = \"Basic %s\" % (base64.b64encode((\"%s:%s\" % (username, password)).encode(\"ascii\")).decode(\"ascii\"))\n return user, auth\n\n def test_history_no_type(self):\n super_user, super_auth = self.create_user()\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={'object_type': 'i_dont_exist', 'pk': '1'}),\n HTTP_AUTHORIZATION=super_auth\n )\n self.assertEqual(response.status_code, 404)\n\n def test_history_with_permissions(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=False, groups=['group1'])\n\n serializer = TestHistoryPermissionsSerializer(None,\n data={'field1': 'test1', 'user': luser.username,\n 'permissions': {'write': ['group1']}},\n context={'username': luser.username}\n )\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n\n serializer = TestHistoryPermissionsSerializer(None,\n data={'field1': 'test2', 'user': super_user.username,\n 'permissions': {'write': []}},\n context={'username': super_user.username}\n )\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={\n 'object_type': TestHistoryPermissionsSerializer.Meta.table_name,\n 'pk': test1['id'],\n }), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={\n 'object_type': TestHistoryPermissionsSerializer.Meta.table_name,\n 'pk': test2['id'],\n }), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n\n def test_history_with_has_read_permission(self):\n super_user, super_auth = self.create_user()\n luser, lauth = self.create_user(username='luser', is_superuser=False, groups=['group1'])\n\n serializer = TestHistoryHasReadPermissionSerializer(None,\n data={'field1': 'test1', 'user': luser.username},\n context={'username': luser.username}\n )\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n serializer = TestHistoryHasReadPermissionSerializer(test1,\n data={'field1': 'test1.1'}, partial=True,\n context={'username': luser.username},\n )\n serializer.is_valid(raise_exception=True)\n test1 = serializer.save()\n\n serializer = TestHistoryHasReadPermissionSerializer(None,\n data={'field1': 'test2', 'user': super_user.username},\n context={'username': super_user.username}\n )\n serializer.is_valid(raise_exception=True)\n test2 = serializer.save()\n\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={\n 'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,\n 'pk': test1['id'],\n }), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 2)\n\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={\n 'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,\n 'pk': test2['id'],\n }), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 403)\n\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={\n 'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,\n 'pk': test2['id'],\n }), HTTP_AUTHORIZATION=super_auth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 1)\n\n serializer = TestHistoryHasReadPermissionSerializer(test1,\n context={'username': luser.username},\n )\n serializer.delete()\n\n response = self.client.get(reverse('django_rethink:history_list',\n kwargs={\n 'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,\n 'pk': test1['id'],\n }), HTTP_AUTHORIZATION=lauth)\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(len(data), 3)\n",
"step-ids": [
8,
12,
14,
18,
19
]
}
|
[
8,
12,
14,
18,
19
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .variational_legacy import *
|
normal
|
{
"blob_id": "ea07cb640e76ced8be92b55ee14e1d3058e073c9",
"index": 845,
"step-1": "<mask token>\n",
"step-2": "from .variational_legacy import *\n",
"step-3": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom .variational_legacy import *\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from test.framework import TestCase
from test.mock import Mock
from package.util.svnutil import ReleaseXmlParser, Release
import time
class SvnUtilTests(TestCase):
def setUp(self):
r1 = Release()
r1.name = 'BETA1.1.0'
r1.type = 'BETA'
r1.version = '1.1.0'
r1.date = time.strptime('2009-04-21 23:22:03', '%Y-%m-%d %H:%M:%S')
r2 = Release()
r2.name = 'STABLE0.4.9'
r2.type = 'STABLE'
r2.version = '0.4.9'
r2.date = time.strptime('2009-01-07 22:58:31', '%Y-%m-%d %H:%M:%S')
self.expected = [r1, r2]
def testXmlLoad(self):
""" XML from svn list should be parsed correctly into releases"""
loader = ReleaseXmlParser(text=xml)
releases = loader.get_releases()
self.assertTrue(releases, 'No release loaded')
self.assertEquals(self.expected, releases, "Releases not loaded correctly")
xml = """<?xml version="1.0"?>
<lists>
<list
path="svn://localhost/tools/packagehelper/tags">
<entry
kind="dir">
<name>1.0.1b</name>
<commit
revision="39">
<author>daniel</author>
<date>2009-04-07T05:59:19.743486Z</date>
</commit>
</entry>
<entry
kind="dir">
<name>BETA1.1.0</name>
<commit
revision="43">
<author>dsaran</author>
<date>2009-04-21T23:22:03.748373Z</date>
</commit>
</entry>
<entry
kind="dir">
<name>RELEASE_1_0_0b</name>
<commit
revision="37">
<author>daniel</author>
<date>2009-04-06T17:49:17.446056Z</date>
</commit>
</entry>
<entry
kind="dir">
<name>STABLE0.4.9</name>
<commit
revision="3">
<date>2009-01-07T22:58:31.000000Z</date>
</commit>
</entry>
</list>
</lists>"""
|
normal
|
{
"blob_id": "9c320db85ca1a9df6b91f6bb062e4d5c3d94ee91",
"index": 9516,
"step-1": "<mask token>\n\n\nclass SvnUtilTests(TestCase):\n\n def setUp(self):\n r1 = Release()\n r1.name = 'BETA1.1.0'\n r1.type = 'BETA'\n r1.version = '1.1.0'\n r1.date = time.strptime('2009-04-21 23:22:03', '%Y-%m-%d %H:%M:%S')\n r2 = Release()\n r2.name = 'STABLE0.4.9'\n r2.type = 'STABLE'\n r2.version = '0.4.9'\n r2.date = time.strptime('2009-01-07 22:58:31', '%Y-%m-%d %H:%M:%S')\n self.expected = [r1, r2]\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass SvnUtilTests(TestCase):\n\n def setUp(self):\n r1 = Release()\n r1.name = 'BETA1.1.0'\n r1.type = 'BETA'\n r1.version = '1.1.0'\n r1.date = time.strptime('2009-04-21 23:22:03', '%Y-%m-%d %H:%M:%S')\n r2 = Release()\n r2.name = 'STABLE0.4.9'\n r2.type = 'STABLE'\n r2.version = '0.4.9'\n r2.date = time.strptime('2009-01-07 22:58:31', '%Y-%m-%d %H:%M:%S')\n self.expected = [r1, r2]\n\n def testXmlLoad(self):\n \"\"\" XML from svn list should be parsed correctly into releases\"\"\"\n loader = ReleaseXmlParser(text=xml)\n releases = loader.get_releases()\n self.assertTrue(releases, 'No release loaded')\n self.assertEquals(self.expected, releases,\n 'Releases not loaded correctly')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass SvnUtilTests(TestCase):\n\n def setUp(self):\n r1 = Release()\n r1.name = 'BETA1.1.0'\n r1.type = 'BETA'\n r1.version = '1.1.0'\n r1.date = time.strptime('2009-04-21 23:22:03', '%Y-%m-%d %H:%M:%S')\n r2 = Release()\n r2.name = 'STABLE0.4.9'\n r2.type = 'STABLE'\n r2.version = '0.4.9'\n r2.date = time.strptime('2009-01-07 22:58:31', '%Y-%m-%d %H:%M:%S')\n self.expected = [r1, r2]\n\n def testXmlLoad(self):\n \"\"\" XML from svn list should be parsed correctly into releases\"\"\"\n loader = ReleaseXmlParser(text=xml)\n releases = loader.get_releases()\n self.assertTrue(releases, 'No release loaded')\n self.assertEquals(self.expected, releases,\n 'Releases not loaded correctly')\n\n\nxml = \"\"\"<?xml version=\"1.0\"?>\n<lists>\n<list\n path=\"svn://localhost/tools/packagehelper/tags\">\n<entry\n kind=\"dir\">\n<name>1.0.1b</name>\n<commit\n revision=\"39\">\n<author>daniel</author>\n<date>2009-04-07T05:59:19.743486Z</date>\n</commit>\n</entry>\n<entry\n kind=\"dir\">\n<name>BETA1.1.0</name>\n<commit\n revision=\"43\">\n<author>dsaran</author>\n<date>2009-04-21T23:22:03.748373Z</date>\n</commit>\n</entry>\n<entry\n kind=\"dir\">\n<name>RELEASE_1_0_0b</name>\n<commit\n revision=\"37\">\n<author>daniel</author>\n<date>2009-04-06T17:49:17.446056Z</date>\n</commit>\n</entry>\n<entry\n kind=\"dir\">\n<name>STABLE0.4.9</name>\n<commit\n revision=\"3\">\n<date>2009-01-07T22:58:31.000000Z</date>\n</commit>\n</entry>\n</list>\n</lists>\"\"\"\n",
"step-4": "from test.framework import TestCase\nfrom test.mock import Mock\nfrom package.util.svnutil import ReleaseXmlParser, Release\nimport time\n\n\nclass SvnUtilTests(TestCase):\n\n def setUp(self):\n r1 = Release()\n r1.name = 'BETA1.1.0'\n r1.type = 'BETA'\n r1.version = '1.1.0'\n r1.date = time.strptime('2009-04-21 23:22:03', '%Y-%m-%d %H:%M:%S')\n r2 = Release()\n r2.name = 'STABLE0.4.9'\n r2.type = 'STABLE'\n r2.version = '0.4.9'\n r2.date = time.strptime('2009-01-07 22:58:31', '%Y-%m-%d %H:%M:%S')\n self.expected = [r1, r2]\n\n def testXmlLoad(self):\n \"\"\" XML from svn list should be parsed correctly into releases\"\"\"\n loader = ReleaseXmlParser(text=xml)\n releases = loader.get_releases()\n self.assertTrue(releases, 'No release loaded')\n self.assertEquals(self.expected, releases,\n 'Releases not loaded correctly')\n\n\nxml = \"\"\"<?xml version=\"1.0\"?>\n<lists>\n<list\n path=\"svn://localhost/tools/packagehelper/tags\">\n<entry\n kind=\"dir\">\n<name>1.0.1b</name>\n<commit\n revision=\"39\">\n<author>daniel</author>\n<date>2009-04-07T05:59:19.743486Z</date>\n</commit>\n</entry>\n<entry\n kind=\"dir\">\n<name>BETA1.1.0</name>\n<commit\n revision=\"43\">\n<author>dsaran</author>\n<date>2009-04-21T23:22:03.748373Z</date>\n</commit>\n</entry>\n<entry\n kind=\"dir\">\n<name>RELEASE_1_0_0b</name>\n<commit\n revision=\"37\">\n<author>daniel</author>\n<date>2009-04-06T17:49:17.446056Z</date>\n</commit>\n</entry>\n<entry\n kind=\"dir\">\n<name>STABLE0.4.9</name>\n<commit\n revision=\"3\">\n<date>2009-01-07T22:58:31.000000Z</date>\n</commit>\n</entry>\n</list>\n</lists>\"\"\"\n",
"step-5": "from test.framework import TestCase\nfrom test.mock import Mock\nfrom package.util.svnutil import ReleaseXmlParser, Release\nimport time\n\nclass SvnUtilTests(TestCase):\n def setUp(self):\n r1 = Release()\n r1.name = 'BETA1.1.0'\n r1.type = 'BETA'\n r1.version = '1.1.0'\n r1.date = time.strptime('2009-04-21 23:22:03', '%Y-%m-%d %H:%M:%S')\n\n r2 = Release()\n r2.name = 'STABLE0.4.9'\n r2.type = 'STABLE'\n r2.version = '0.4.9'\n r2.date = time.strptime('2009-01-07 22:58:31', '%Y-%m-%d %H:%M:%S')\n\n self.expected = [r1, r2]\n\n def testXmlLoad(self):\n \"\"\" XML from svn list should be parsed correctly into releases\"\"\"\n loader = ReleaseXmlParser(text=xml)\n releases = loader.get_releases()\n\n self.assertTrue(releases, 'No release loaded')\n self.assertEquals(self.expected, releases, \"Releases not loaded correctly\")\n\n\nxml = \"\"\"<?xml version=\"1.0\"?>\n<lists>\n<list\n path=\"svn://localhost/tools/packagehelper/tags\">\n<entry\n kind=\"dir\">\n<name>1.0.1b</name>\n<commit\n revision=\"39\">\n<author>daniel</author>\n<date>2009-04-07T05:59:19.743486Z</date>\n</commit>\n</entry>\n<entry\n kind=\"dir\">\n<name>BETA1.1.0</name>\n<commit\n revision=\"43\">\n<author>dsaran</author>\n<date>2009-04-21T23:22:03.748373Z</date>\n</commit>\n</entry>\n<entry\n kind=\"dir\">\n<name>RELEASE_1_0_0b</name>\n<commit\n revision=\"37\">\n<author>daniel</author>\n<date>2009-04-06T17:49:17.446056Z</date>\n</commit>\n</entry>\n<entry\n kind=\"dir\">\n<name>STABLE0.4.9</name>\n<commit\n revision=\"3\">\n<date>2009-01-07T22:58:31.000000Z</date>\n</commit>\n</entry>\n</list>\n</lists>\"\"\"\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import markdown
from django.db import models
from django.contrib.auth.models import User
from datetime import datetime
class MovieRankings(models.Model):
"""
各种电影排行榜.
"""
name = models.CharField(max_length=100)
def __unicode__(self):
return self.name
class Movie(models.Model):
"""
电影的数据库表格
"""
movie_name = models.CharField(max_length=64, blank=True)
# 豆瓣链接,值可以是null,也可以不填这个字段.
douban_link = models.CharField(max_length=256, null=True, blank=True)
# 豆瓣评分.
douban_score = models.CharField(max_length=64, null=True, blank=True)
# 豆瓣评分人数.
douban_counter = models.PositiveIntegerField(default=0, blank=True)
# Imdb链接.
imdb_link = models.CharField(max_length=256, null=True, blank=True)
# Imdb评分.
imdb_score = models.CharField(max_length=64, null=True, blank=True)
# Imdb评分人数.
imdb_counter = models.PositiveIntegerField(default=0, blank=True)
# 网站中的链接.
nomovie_link = models.CharField(max_length=256, null=True, blank=True)
# 网站中评分.
nomovie_score = models.CharField(max_length=64, null=True, blank=True)
# 网站中评分人数.
nomovie_counter = models.PositiveIntegerField(default=0, blank=True)
# 上映国家.
country = models.CharField(max_length=64, null=True, blank=True)
# 上映日期.
dateyear = models.CharField(max_length=64, null=True, blank=True)
# 主演.
actor = models.CharField(max_length=256, null=True, blank=True)
# 导演.
director = models.CharField(max_length=256, null=True, blank=True)
# 电影类型.
style = models.CharField(max_length=64, null=True, blank=True)
# 电影播放地址.
movie_address = models.CharField(max_length=256, null=True, blank=True)
# 电影下载链接.
download_link = models.CharField(max_length=256, null=True, blank=True)
# 电影在本网站的播放次数.
counter = models.PositiveIntegerField(default=0, blank=True)
# 电影来源,
# 0:表示豆瓣top250 1:表示imdbtop250 2:表示普通豆瓣 3:表示普通imdb
# 4:表示在豆瓣和imdb中都存在 5表示:用户自添加
original = models.CharField(max_length=256, null=True, blank=True)
# 1:表示通过 0:表示未通过 2:表示审核中
status = models.IntegerField(null=True, blank=True)
# 图片保存地址
image = models.CharField(max_length=256, null=True, blank=True)
# 爬取电影入库时间
spidertime = models.DateTimeField(auto_now_add=True, null=True)
# 关于电影
aboutmovie = models.CharField(max_length=256, null=True, blank=True)
# 电影语言
language = models.CharField(max_length=64, null=True, blank=True)
# 电影天堂搜索地址
dyttsearch = models.CharField(max_length=256, null=True, blank=True)
# 电影天堂搜索电影详情页面
dyttdetail = models.CharField(max_length=256, null=True, blank=True)
movierankings = models.ForeignKey(MovieRankings, null=True, blank=True)
def __unicode__(self):
return self.movie_name
# def get_comments(self):
class MovieHistory(models.Model):
# 观看的用户.
# 用户一对多MovieHistory,可以看多个电影.
user = models.ForeignKey(User)
# 观看的电影.
movie = models.ForeignKey(Movie)
# 观看的时间.
date = models.DateTimeField(auto_now_add=True)
# 0表示用户观看了该电影,1表示收藏,2表示推荐.
marked = models.IntegerField(blank=True, null=True)
def __unicode__(self):
return "{%s}--{%s}" % (self.user.username, self.movie.movie_name)
|
normal
|
{
"blob_id": "449ae193f8817d4ee2fe67eadf72d9c19b2c5e53",
"index": 1319,
"step-1": "<mask token>\n\n\nclass MovieRankings(models.Model):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Movie(models.Model):\n \"\"\"\n 电影的数据库表格\n \"\"\"\n movie_name = models.CharField(max_length=64, blank=True)\n douban_link = models.CharField(max_length=256, null=True, blank=True)\n douban_score = models.CharField(max_length=64, null=True, blank=True)\n douban_counter = models.PositiveIntegerField(default=0, blank=True)\n imdb_link = models.CharField(max_length=256, null=True, blank=True)\n imdb_score = models.CharField(max_length=64, null=True, blank=True)\n imdb_counter = models.PositiveIntegerField(default=0, blank=True)\n nomovie_link = models.CharField(max_length=256, null=True, blank=True)\n nomovie_score = models.CharField(max_length=64, null=True, blank=True)\n nomovie_counter = models.PositiveIntegerField(default=0, blank=True)\n country = models.CharField(max_length=64, null=True, blank=True)\n dateyear = models.CharField(max_length=64, null=True, blank=True)\n actor = models.CharField(max_length=256, null=True, blank=True)\n director = models.CharField(max_length=256, null=True, blank=True)\n style = models.CharField(max_length=64, null=True, blank=True)\n movie_address = models.CharField(max_length=256, null=True, blank=True)\n download_link = models.CharField(max_length=256, null=True, blank=True)\n counter = models.PositiveIntegerField(default=0, blank=True)\n original = models.CharField(max_length=256, null=True, blank=True)\n status = models.IntegerField(null=True, blank=True)\n image = models.CharField(max_length=256, null=True, blank=True)\n spidertime = models.DateTimeField(auto_now_add=True, null=True)\n aboutmovie = models.CharField(max_length=256, null=True, blank=True)\n language = models.CharField(max_length=64, null=True, blank=True)\n dyttsearch = models.CharField(max_length=256, null=True, blank=True)\n dyttdetail = models.CharField(max_length=256, null=True, blank=True)\n movierankings = models.ForeignKey(MovieRankings, null=True, blank=True)\n\n def __unicode__(self):\n return self.movie_name\n\n\nclass MovieHistory(models.Model):\n user = models.ForeignKey(User)\n movie = models.ForeignKey(Movie)\n date = models.DateTimeField(auto_now_add=True)\n marked = models.IntegerField(blank=True, null=True)\n\n def __unicode__(self):\n return '{%s}--{%s}' % (self.user.username, self.movie.movie_name)\n",
"step-2": "<mask token>\n\n\nclass MovieRankings(models.Model):\n <mask token>\n name = models.CharField(max_length=100)\n\n def __unicode__(self):\n return self.name\n\n\nclass Movie(models.Model):\n \"\"\"\n 电影的数据库表格\n \"\"\"\n movie_name = models.CharField(max_length=64, blank=True)\n douban_link = models.CharField(max_length=256, null=True, blank=True)\n douban_score = models.CharField(max_length=64, null=True, blank=True)\n douban_counter = models.PositiveIntegerField(default=0, blank=True)\n imdb_link = models.CharField(max_length=256, null=True, blank=True)\n imdb_score = models.CharField(max_length=64, null=True, blank=True)\n imdb_counter = models.PositiveIntegerField(default=0, blank=True)\n nomovie_link = models.CharField(max_length=256, null=True, blank=True)\n nomovie_score = models.CharField(max_length=64, null=True, blank=True)\n nomovie_counter = models.PositiveIntegerField(default=0, blank=True)\n country = models.CharField(max_length=64, null=True, blank=True)\n dateyear = models.CharField(max_length=64, null=True, blank=True)\n actor = models.CharField(max_length=256, null=True, blank=True)\n director = models.CharField(max_length=256, null=True, blank=True)\n style = models.CharField(max_length=64, null=True, blank=True)\n movie_address = models.CharField(max_length=256, null=True, blank=True)\n download_link = models.CharField(max_length=256, null=True, blank=True)\n counter = models.PositiveIntegerField(default=0, blank=True)\n original = models.CharField(max_length=256, null=True, blank=True)\n status = models.IntegerField(null=True, blank=True)\n image = models.CharField(max_length=256, null=True, blank=True)\n spidertime = models.DateTimeField(auto_now_add=True, null=True)\n aboutmovie = models.CharField(max_length=256, null=True, blank=True)\n language = models.CharField(max_length=64, null=True, blank=True)\n dyttsearch = models.CharField(max_length=256, null=True, blank=True)\n dyttdetail = models.CharField(max_length=256, null=True, blank=True)\n movierankings = models.ForeignKey(MovieRankings, null=True, blank=True)\n\n def __unicode__(self):\n return self.movie_name\n\n\nclass MovieHistory(models.Model):\n user = models.ForeignKey(User)\n movie = models.ForeignKey(Movie)\n date = models.DateTimeField(auto_now_add=True)\n marked = models.IntegerField(blank=True, null=True)\n\n def __unicode__(self):\n return '{%s}--{%s}' % (self.user.username, self.movie.movie_name)\n",
"step-3": "<mask token>\n\n\nclass MovieRankings(models.Model):\n \"\"\"\n 各种电影排行榜.\n \"\"\"\n name = models.CharField(max_length=100)\n\n def __unicode__(self):\n return self.name\n\n\nclass Movie(models.Model):\n \"\"\"\n 电影的数据库表格\n \"\"\"\n movie_name = models.CharField(max_length=64, blank=True)\n douban_link = models.CharField(max_length=256, null=True, blank=True)\n douban_score = models.CharField(max_length=64, null=True, blank=True)\n douban_counter = models.PositiveIntegerField(default=0, blank=True)\n imdb_link = models.CharField(max_length=256, null=True, blank=True)\n imdb_score = models.CharField(max_length=64, null=True, blank=True)\n imdb_counter = models.PositiveIntegerField(default=0, blank=True)\n nomovie_link = models.CharField(max_length=256, null=True, blank=True)\n nomovie_score = models.CharField(max_length=64, null=True, blank=True)\n nomovie_counter = models.PositiveIntegerField(default=0, blank=True)\n country = models.CharField(max_length=64, null=True, blank=True)\n dateyear = models.CharField(max_length=64, null=True, blank=True)\n actor = models.CharField(max_length=256, null=True, blank=True)\n director = models.CharField(max_length=256, null=True, blank=True)\n style = models.CharField(max_length=64, null=True, blank=True)\n movie_address = models.CharField(max_length=256, null=True, blank=True)\n download_link = models.CharField(max_length=256, null=True, blank=True)\n counter = models.PositiveIntegerField(default=0, blank=True)\n original = models.CharField(max_length=256, null=True, blank=True)\n status = models.IntegerField(null=True, blank=True)\n image = models.CharField(max_length=256, null=True, blank=True)\n spidertime = models.DateTimeField(auto_now_add=True, null=True)\n aboutmovie = models.CharField(max_length=256, null=True, blank=True)\n language = models.CharField(max_length=64, null=True, blank=True)\n dyttsearch = models.CharField(max_length=256, null=True, blank=True)\n dyttdetail = models.CharField(max_length=256, null=True, blank=True)\n movierankings = models.ForeignKey(MovieRankings, null=True, blank=True)\n\n def __unicode__(self):\n return self.movie_name\n\n\nclass MovieHistory(models.Model):\n user = models.ForeignKey(User)\n movie = models.ForeignKey(Movie)\n date = models.DateTimeField(auto_now_add=True)\n marked = models.IntegerField(blank=True, null=True)\n\n def __unicode__(self):\n return '{%s}--{%s}' % (self.user.username, self.movie.movie_name)\n",
"step-4": "from __future__ import unicode_literals\nimport markdown\nfrom django.db import models\nfrom django.contrib.auth.models import User\nfrom datetime import datetime\n\n\nclass MovieRankings(models.Model):\n \"\"\"\n 各种电影排行榜.\n \"\"\"\n name = models.CharField(max_length=100)\n\n def __unicode__(self):\n return self.name\n\n\nclass Movie(models.Model):\n \"\"\"\n 电影的数据库表格\n \"\"\"\n movie_name = models.CharField(max_length=64, blank=True)\n douban_link = models.CharField(max_length=256, null=True, blank=True)\n douban_score = models.CharField(max_length=64, null=True, blank=True)\n douban_counter = models.PositiveIntegerField(default=0, blank=True)\n imdb_link = models.CharField(max_length=256, null=True, blank=True)\n imdb_score = models.CharField(max_length=64, null=True, blank=True)\n imdb_counter = models.PositiveIntegerField(default=0, blank=True)\n nomovie_link = models.CharField(max_length=256, null=True, blank=True)\n nomovie_score = models.CharField(max_length=64, null=True, blank=True)\n nomovie_counter = models.PositiveIntegerField(default=0, blank=True)\n country = models.CharField(max_length=64, null=True, blank=True)\n dateyear = models.CharField(max_length=64, null=True, blank=True)\n actor = models.CharField(max_length=256, null=True, blank=True)\n director = models.CharField(max_length=256, null=True, blank=True)\n style = models.CharField(max_length=64, null=True, blank=True)\n movie_address = models.CharField(max_length=256, null=True, blank=True)\n download_link = models.CharField(max_length=256, null=True, blank=True)\n counter = models.PositiveIntegerField(default=0, blank=True)\n original = models.CharField(max_length=256, null=True, blank=True)\n status = models.IntegerField(null=True, blank=True)\n image = models.CharField(max_length=256, null=True, blank=True)\n spidertime = models.DateTimeField(auto_now_add=True, null=True)\n aboutmovie = models.CharField(max_length=256, null=True, blank=True)\n language = models.CharField(max_length=64, null=True, blank=True)\n dyttsearch = models.CharField(max_length=256, null=True, blank=True)\n dyttdetail = models.CharField(max_length=256, null=True, blank=True)\n movierankings = models.ForeignKey(MovieRankings, null=True, blank=True)\n\n def __unicode__(self):\n return self.movie_name\n\n\nclass MovieHistory(models.Model):\n user = models.ForeignKey(User)\n movie = models.ForeignKey(Movie)\n date = models.DateTimeField(auto_now_add=True)\n marked = models.IntegerField(blank=True, null=True)\n\n def __unicode__(self):\n return '{%s}--{%s}' % (self.user.username, self.movie.movie_name)\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nimport markdown\n\nfrom django.db import models\nfrom django.contrib.auth.models import User\n\nfrom datetime import datetime\n\nclass MovieRankings(models.Model):\n \"\"\"\n 各种电影排行榜.\n \"\"\"\n name = models.CharField(max_length=100)\n def __unicode__(self):\n return self.name\n\nclass Movie(models.Model):\n \"\"\"\n 电影的数据库表格\n \"\"\"\n movie_name = models.CharField(max_length=64, blank=True)\n # 豆瓣链接,值可以是null,也可以不填这个字段.\n douban_link = models.CharField(max_length=256, null=True, blank=True)\n # 豆瓣评分.\n douban_score = models.CharField(max_length=64, null=True, blank=True)\n # 豆瓣评分人数.\n douban_counter = models.PositiveIntegerField(default=0, blank=True)\n # Imdb链接.\n imdb_link = models.CharField(max_length=256, null=True, blank=True)\n # Imdb评分.\n imdb_score = models.CharField(max_length=64, null=True, blank=True)\n # Imdb评分人数.\n imdb_counter = models.PositiveIntegerField(default=0, blank=True)\n # 网站中的链接.\n nomovie_link = models.CharField(max_length=256, null=True, blank=True)\n # 网站中评分.\n nomovie_score = models.CharField(max_length=64, null=True, blank=True)\n # 网站中评分人数.\n nomovie_counter = models.PositiveIntegerField(default=0, blank=True)\n # 上映国家.\n country = models.CharField(max_length=64, null=True, blank=True)\n # 上映日期.\n dateyear = models.CharField(max_length=64, null=True, blank=True)\n # 主演.\n actor = models.CharField(max_length=256, null=True, blank=True)\n # 导演.\n director = models.CharField(max_length=256, null=True, blank=True)\n # 电影类型.\n style = models.CharField(max_length=64, null=True, blank=True)\n # 电影播放地址.\n movie_address = models.CharField(max_length=256, null=True, blank=True)\n # 电影下载链接.\n download_link = models.CharField(max_length=256, null=True, blank=True)\n # 电影在本网站的播放次数.\n counter = models.PositiveIntegerField(default=0, blank=True)\n # 电影来源,\n # 0:表示豆瓣top250 1:表示imdbtop250 2:表示普通豆瓣 3:表示普通imdb \n # 4:表示在豆瓣和imdb中都存在 5表示:用户自添加\n original = models.CharField(max_length=256, null=True, blank=True)\n # 1:表示通过 0:表示未通过 2:表示审核中\n status = models.IntegerField(null=True, blank=True)\n # 图片保存地址\n image = models.CharField(max_length=256, null=True, blank=True)\n # 爬取电影入库时间\n spidertime = models.DateTimeField(auto_now_add=True, null=True)\n # 关于电影\n aboutmovie = models.CharField(max_length=256, null=True, blank=True)\n # 电影语言\n language = models.CharField(max_length=64, null=True, blank=True)\n # 电影天堂搜索地址\n dyttsearch = models.CharField(max_length=256, null=True, blank=True)\n # 电影天堂搜索电影详情页面\n dyttdetail = models.CharField(max_length=256, null=True, blank=True)\n movierankings = models.ForeignKey(MovieRankings, null=True, blank=True)\n\n def __unicode__(self):\n return self.movie_name\n\n # def get_comments(self):\n\nclass MovieHistory(models.Model):\n # 观看的用户.\n # 用户一对多MovieHistory,可以看多个电影.\n user = models.ForeignKey(User)\n # 观看的电影.\n movie = models.ForeignKey(Movie)\n # 观看的时间.\n date = models.DateTimeField(auto_now_add=True)\n # 0表示用户观看了该电影,1表示收藏,2表示推荐.\n marked = models.IntegerField(blank=True, null=True)\n \n def __unicode__(self):\n return \"{%s}--{%s}\" % (self.user.username, self.movie.movie_name)\n\n\n",
"step-ids": [
8,
10,
11,
12,
13
]
}
|
[
8,
10,
11,
12,
13
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
__author__ = 'ghou'
<|reserved_special_token_0|>
bGameValid = True
dAskUserInfo = {}
gAccMode = 0
gWhiteTestResourceVersion = None
gInvalidClientVersion = None
<|reserved_special_token_1|>
__author__ = 'ghou'
from datetime import datetime
bGameValid = True
dAskUserInfo = {}
gAccMode = 0
gWhiteTestResourceVersion = None
gInvalidClientVersion = None
<|reserved_special_token_1|>
#!/usr/bin/env python
# -*- coding:utf-8 -*-
__author__ = 'ghou'
from datetime import datetime
bGameValid = True
dAskUserInfo = {}
gAccMode = 0
#============UserSyncResource2.py===================
#============前端资源热更白名单测试功能================
#============去读配置表config.xml==================
#============大于配置标号的热更内容只有白名单可见=======
gWhiteTestResourceVersion = None
#============评审版本热更过滤========================
#============去读配置表config.xml==================
#============等于配置标号的热更内容都不可见=============
gInvalidClientVersion = None # 非法的客户端版本号
|
flexible
|
{
"blob_id": "2e075c3ee6b245b1ffd0bb8c4e205199f794da76",
"index": 5725,
"step-1": "<mask token>\n",
"step-2": "__author__ = 'ghou'\n<mask token>\nbGameValid = True\ndAskUserInfo = {}\ngAccMode = 0\ngWhiteTestResourceVersion = None\ngInvalidClientVersion = None\n",
"step-3": "__author__ = 'ghou'\nfrom datetime import datetime\nbGameValid = True\ndAskUserInfo = {}\ngAccMode = 0\ngWhiteTestResourceVersion = None\ngInvalidClientVersion = None\n",
"step-4": "#!/usr/bin/env python\n# -*- coding:utf-8 -*-\n\n__author__ = 'ghou'\n\nfrom datetime import datetime\n\nbGameValid = True\ndAskUserInfo = {}\ngAccMode = 0\n\n\n\n#============UserSyncResource2.py===================\n\n#============前端资源热更白名单测试功能================\n#============去读配置表config.xml==================\n#============大于配置标号的热更内容只有白名单可见=======\ngWhiteTestResourceVersion = None\n\n#============评审版本热更过滤========================\n#============去读配置表config.xml==================\n#============等于配置标号的热更内容都不可见=============\ngInvalidClientVersion = None # 非法的客户端版本号",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('backend', '0001_initial')]
operations = [migrations.AddField(model_name='company', name=
'coordinates', field=django.contrib.gis.db.models.fields.PointField
(blank=True, null=True, srid=4326)), migrations.AlterField(
model_name='company', name='founded_at', field=models.IntegerField())]
<|reserved_special_token_1|>
import django.contrib.gis.db.models.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('backend', '0001_initial')]
operations = [migrations.AddField(model_name='company', name=
'coordinates', field=django.contrib.gis.db.models.fields.PointField
(blank=True, null=True, srid=4326)), migrations.AlterField(
model_name='company', name='founded_at', field=models.IntegerField())]
<|reserved_special_token_1|>
# Generated by Django 2.2.6 on 2019-12-08 22:18
import django.contrib.gis.db.models.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('backend', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='company',
name='coordinates',
field=django.contrib.gis.db.models.fields.PointField(blank=True, null=True, srid=4326),
),
migrations.AlterField(
model_name='company',
name='founded_at',
field=models.IntegerField(),
),
]
|
flexible
|
{
"blob_id": "9af71eaf8f6f4daacdc1def7b8c5b29e6bac6b46",
"index": 4897,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('backend', '0001_initial')]\n operations = [migrations.AddField(model_name='company', name=\n 'coordinates', field=django.contrib.gis.db.models.fields.PointField\n (blank=True, null=True, srid=4326)), migrations.AlterField(\n model_name='company', name='founded_at', field=models.IntegerField())]\n",
"step-4": "import django.contrib.gis.db.models.fields\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('backend', '0001_initial')]\n operations = [migrations.AddField(model_name='company', name=\n 'coordinates', field=django.contrib.gis.db.models.fields.PointField\n (blank=True, null=True, srid=4326)), migrations.AlterField(\n model_name='company', name='founded_at', field=models.IntegerField())]\n",
"step-5": "# Generated by Django 2.2.6 on 2019-12-08 22:18\n\nimport django.contrib.gis.db.models.fields\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('backend', '0001_initial'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='company',\n name='coordinates',\n field=django.contrib.gis.db.models.fields.PointField(blank=True, null=True, srid=4326),\n ),\n migrations.AlterField(\n model_name='company',\n name='founded_at',\n field=models.IntegerField(),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from .kahfm_batch import KaHFMBatch
|
normal
|
{
"blob_id": "8e317d4d8ae8dc3d692d237e7e0abfaf37aecbb6",
"index": 7017,
"step-1": "<mask token>\n",
"step-2": "from .kahfm_batch import KaHFMBatch\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
# coding:utf-8
class Solution:
def searchInsert(self, nums, target: int):
n = len(nums)
left = 0
right = n - 1
# 返回大于等于target的第一个索引则用left,否则用right
while left <= right:
mid = (left + right) // 2
if nums[mid] == target:
return mid
if nums[mid] < target:
left = mid + 1
else:
right = mid - 1
return left
# 如果写两个分支
def searchInsert01(self, nums, target: int):
size = len(nums)
if size == 0:
return 0
left = 0
right = size
while left < right:
mid = left + (right - left) // 2
# 此处中位数小于目标值则排除掉,否则得包含中位数
if nums[mid] < target:
left = mid + 1
else: # >=
right = mid
return left
# 如果写两个分支
# 范围为[0,size-1]
def searchInsert02(self, nums, target: int):
size = len(nums)
if size == 0:
return 0
if nums[-1] < target:
return size
left = 0
right = size - 1
while left < right:
mid = left + (right - left) // 2
if nums[mid] < target:
left = mid + 1
else: # >=
right = mid
return left
if __name__ == '__main__':
nums = [1, 3, 5, 6]
target = 7
s = Solution()
# print(s.searchInsert(nums, target))
print(s.searchInsert01(nums, target))
|
normal
|
{
"blob_id": "9ec1cca08fac2fd976c1f596f7d340befc4eb339",
"index": 2020,
"step-1": "class Solution:\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "class Solution:\n\n def searchInsert(self, nums, target: int):\n n = len(nums)\n left = 0\n right = n - 1\n while left <= right:\n mid = (left + right) // 2\n if nums[mid] == target:\n return mid\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid - 1\n return left\n <mask token>\n\n def searchInsert02(self, nums, target: int):\n size = len(nums)\n if size == 0:\n return 0\n if nums[-1] < target:\n return size\n left = 0\n right = size - 1\n while left < right:\n mid = left + (right - left) // 2\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid\n return left\n\n\n<mask token>\n",
"step-3": "class Solution:\n\n def searchInsert(self, nums, target: int):\n n = len(nums)\n left = 0\n right = n - 1\n while left <= right:\n mid = (left + right) // 2\n if nums[mid] == target:\n return mid\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid - 1\n return left\n\n def searchInsert01(self, nums, target: int):\n size = len(nums)\n if size == 0:\n return 0\n left = 0\n right = size\n while left < right:\n mid = left + (right - left) // 2\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid\n return left\n\n def searchInsert02(self, nums, target: int):\n size = len(nums)\n if size == 0:\n return 0\n if nums[-1] < target:\n return size\n left = 0\n right = size - 1\n while left < right:\n mid = left + (right - left) // 2\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid\n return left\n\n\n<mask token>\n",
"step-4": "class Solution:\n\n def searchInsert(self, nums, target: int):\n n = len(nums)\n left = 0\n right = n - 1\n while left <= right:\n mid = (left + right) // 2\n if nums[mid] == target:\n return mid\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid - 1\n return left\n\n def searchInsert01(self, nums, target: int):\n size = len(nums)\n if size == 0:\n return 0\n left = 0\n right = size\n while left < right:\n mid = left + (right - left) // 2\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid\n return left\n\n def searchInsert02(self, nums, target: int):\n size = len(nums)\n if size == 0:\n return 0\n if nums[-1] < target:\n return size\n left = 0\n right = size - 1\n while left < right:\n mid = left + (right - left) // 2\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid\n return left\n\n\nif __name__ == '__main__':\n nums = [1, 3, 5, 6]\n target = 7\n s = Solution()\n print(s.searchInsert01(nums, target))\n",
"step-5": "# coding:utf-8\nclass Solution:\n def searchInsert(self, nums, target: int):\n n = len(nums)\n left = 0\n right = n - 1\n # 返回大于等于target的第一个索引则用left,否则用right\n while left <= right:\n mid = (left + right) // 2\n if nums[mid] == target:\n return mid\n if nums[mid] < target:\n left = mid + 1\n else:\n right = mid - 1\n return left\n\n # 如果写两个分支\n def searchInsert01(self, nums, target: int):\n size = len(nums)\n if size == 0:\n return 0\n left = 0\n right = size\n while left < right:\n mid = left + (right - left) // 2\n # 此处中位数小于目标值则排除掉,否则得包含中位数\n if nums[mid] < target:\n left = mid + 1\n else: # >=\n right = mid\n return left\n\n # 如果写两个分支\n # 范围为[0,size-1]\n def searchInsert02(self, nums, target: int):\n size = len(nums)\n if size == 0:\n return 0\n if nums[-1] < target:\n return size\n left = 0\n right = size - 1\n while left < right:\n mid = left + (right - left) // 2\n if nums[mid] < target:\n left = mid + 1\n else: # >=\n right = mid\n return left\n\n\nif __name__ == '__main__':\n nums = [1, 3, 5, 6]\n target = 7\n s = Solution()\n # print(s.searchInsert(nums, target))\n print(s.searchInsert01(nums, target))\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_station_coords(station):
if station == 'steaks':
coords = [0.4625, 0.375, 0.14]
elif station == 'griddle':
coords = [0.73, 0.375, 0.05]
elif station == 'steak_flip_initial':
coords = [0.73, 0.7, 0.166]
elif station == 'steak_flip_done':
coords = [0.73, 0.33, 0.166]
elif station == 'steak_flip_drop':
coords = [0.73, 0.6, 0.05]
elif station == 'plate':
coords = [1.11, 0.35, 0.1]
elif station == 'oil':
coords = [0.9, 0.375, 0.08]
else:
coords = [0.0, 0.0, 0.0]
return coords
<|reserved_special_token_1|>
def get_attachment_station_coords(station):
if station == 'gripper1':
coords = [0.48, 0.05, 0.161]
elif station == 'gripper2':
coords = [0.28, 0.05, 0.13]
elif station == 'syringe':
coords = [0.405, 0.745, 0.213]
else:
coords = [0.0, 0.0, 0.0]
return coords
def get_station_coords(station):
if station == 'steaks':
coords = [0.4625, 0.375, 0.14]
elif station == 'griddle':
coords = [0.73, 0.375, 0.05]
elif station == 'steak_flip_initial':
coords = [0.73, 0.7, 0.166]
elif station == 'steak_flip_done':
coords = [0.73, 0.33, 0.166]
elif station == 'steak_flip_drop':
coords = [0.73, 0.6, 0.05]
elif station == 'plate':
coords = [1.11, 0.35, 0.1]
elif station == 'oil':
coords = [0.9, 0.375, 0.08]
else:
coords = [0.0, 0.0, 0.0]
return coords
<|reserved_special_token_1|>
#!/usr/bin/env python
def get_attachment_station_coords(station):
if (station == "gripper1"):
coords = [0.48, 0.05, 0.161]
elif (station == "gripper2"):
coords = [0.28, 0.05, 0.13]
elif (station == "syringe"):
coords = [0.405, 0.745, 0.213]
else:
coords = [0.0, 0.0, 0.0]
# Move the gantry to the coordinates
return coords
def get_station_coords(station):
if(station == "steaks"):
coords= [0.4625, 0.375, 0.14]
elif(station == "griddle"):
coords = [0.73, 0.375, 0.05]
elif(station == "steak_flip_initial"):
coords = [0.73, 0.7, 0.166]
elif(station == "steak_flip_done"):
coords = [0.73, 0.33, 0.166]
elif(station == "steak_flip_drop"):
coords = [0.73, 0.6, 0.05]
elif(station == "plate"):
# coords = [1.11, 0.75, 0.1]
coords = [1.11, 0.35, 0.1]
elif(station == "oil"):
coords = [0.9, 0.375, 0.08]
else:
coords = [0.0, 0.0, 0.0]
return coords
|
flexible
|
{
"blob_id": "86c03fa85ac405a148be13325efeaaf691d9ec26",
"index": 5223,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_station_coords(station):\n if station == 'steaks':\n coords = [0.4625, 0.375, 0.14]\n elif station == 'griddle':\n coords = [0.73, 0.375, 0.05]\n elif station == 'steak_flip_initial':\n coords = [0.73, 0.7, 0.166]\n elif station == 'steak_flip_done':\n coords = [0.73, 0.33, 0.166]\n elif station == 'steak_flip_drop':\n coords = [0.73, 0.6, 0.05]\n elif station == 'plate':\n coords = [1.11, 0.35, 0.1]\n elif station == 'oil':\n coords = [0.9, 0.375, 0.08]\n else:\n coords = [0.0, 0.0, 0.0]\n return coords\n",
"step-3": "def get_attachment_station_coords(station):\n if station == 'gripper1':\n coords = [0.48, 0.05, 0.161]\n elif station == 'gripper2':\n coords = [0.28, 0.05, 0.13]\n elif station == 'syringe':\n coords = [0.405, 0.745, 0.213]\n else:\n coords = [0.0, 0.0, 0.0]\n return coords\n\n\ndef get_station_coords(station):\n if station == 'steaks':\n coords = [0.4625, 0.375, 0.14]\n elif station == 'griddle':\n coords = [0.73, 0.375, 0.05]\n elif station == 'steak_flip_initial':\n coords = [0.73, 0.7, 0.166]\n elif station == 'steak_flip_done':\n coords = [0.73, 0.33, 0.166]\n elif station == 'steak_flip_drop':\n coords = [0.73, 0.6, 0.05]\n elif station == 'plate':\n coords = [1.11, 0.35, 0.1]\n elif station == 'oil':\n coords = [0.9, 0.375, 0.08]\n else:\n coords = [0.0, 0.0, 0.0]\n return coords\n",
"step-4": "#!/usr/bin/env python\n\ndef get_attachment_station_coords(station):\n if (station == \"gripper1\"):\n coords = [0.48, 0.05, 0.161]\n elif (station == \"gripper2\"):\n coords = [0.28, 0.05, 0.13]\n elif (station == \"syringe\"):\n coords = [0.405, 0.745, 0.213]\n else:\n coords = [0.0, 0.0, 0.0]\n # Move the gantry to the coordinates\n return coords\n\ndef get_station_coords(station):\n if(station == \"steaks\"):\n coords= [0.4625, 0.375, 0.14]\n elif(station == \"griddle\"):\n coords = [0.73, 0.375, 0.05]\n elif(station == \"steak_flip_initial\"):\n coords = [0.73, 0.7, 0.166]\n elif(station == \"steak_flip_done\"):\n coords = [0.73, 0.33, 0.166]\n elif(station == \"steak_flip_drop\"):\n coords = [0.73, 0.6, 0.05]\n elif(station == \"plate\"):\n # coords = [1.11, 0.75, 0.1]\n coords = [1.11, 0.35, 0.1]\n elif(station == \"oil\"):\n coords = [0.9, 0.375, 0.08]\n else:\n coords = [0.0, 0.0, 0.0]\n return coords",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import MySQLdb
import settings
import redis
import socket
import fcntl
import struct
import datetime
db = MySQLdb.connect(settings.host, settings.user, settings.pwd, settings.db)
cursor = db.cursor()
def connect_mysql():
try:
db.ping()
except:
db = MySQLdb.connect(settings.host, settings.user, settings.pwd, settings.db)
def init_database(table, sql):
cursor.execute("DROP TABLE IF EXISTS %s" % table)
cursor.execute(sql)
print "init %s successful" % table
def insert_data(sql):
connect_mysql()
try:
cursor = db.cursor()
cursor.execute(sql)
db.commit()
except:
print "execute %s error" % sql
db.rollback()
def set_tags_from_result():
sql = "select WIDTH,DROP_RATE,MEMORY,CPU,SERVICE,THREAD_NUM,FRECURENT,R100 from result"
devide = [125.0, 1, 100.0, 100.0, 1000.0, 20.0, 1, 1]
result = [1,2,3,4,5,6,7,8]
try:
cursor.execute(sql)
results = cursor.fetchall()
for element in results:
for i in range(len(element)):
result[i] = element[i]/devide[i]
sql = "insert into tags (WIDTH,DROP_RATE,MEMORY,CPU,SERVICE,THREAD_NUM,FRECURENT,R100) values('%f', '%f', '%f', '%f', '%f', '%f', '%f', '%f')" % (result[0], result[1], result[2],result[3], result[4], result[5], result[6], result[7])
insert_data(sql)
except Exception as msg:
print "select from result error"
print msg
print str(msg)
db.close()
if __name__ == '__main__':
table = 'tags'
sql = """CREATE TABLE %s (
WIDTH FLOAT(3,2),
DROP_RATE FLOAT,
MEMORY FLOAT(3,2),
CPU FLOAT(3,2),
SERVICE FLOAT(3,2),
THREAD_NUM FLOAT,
FRECURENT FLOAT,
R100 FLOAT(2, 1))""" % table
init_database(table, sql)
set_tags_from_result()
|
normal
|
{
"blob_id": "b46b9b086fc089e24cb39a0c2c4ac252591b2190",
"index": 1540,
"step-1": "import MySQLdb\nimport settings\nimport redis\nimport socket\nimport fcntl\nimport struct\nimport datetime\n\n\ndb = MySQLdb.connect(settings.host, settings.user, settings.pwd, settings.db)\ncursor = db.cursor()\ndef connect_mysql():\n\ttry: \n db.ping()\n except: \n db = MySQLdb.connect(settings.host, settings.user, settings.pwd, settings.db)\n\ndef init_database(table, sql):\n\tcursor.execute(\"DROP TABLE IF EXISTS %s\" % table)\n cursor.execute(sql)\n\tprint \"init %s successful\" % table\n\ndef insert_data(sql):\n connect_mysql()\n\ttry:\n cursor = db.cursor()\n cursor.execute(sql)\n db.commit()\n except:\n print \"execute %s error\" % sql\n db.rollback()\n\n\ndef set_tags_from_result():\n\tsql = \"select WIDTH,DROP_RATE,MEMORY,CPU,SERVICE,THREAD_NUM,FRECURENT,R100 from result\"\n\tdevide = [125.0, 1, 100.0, 100.0, 1000.0, 20.0, 1, 1]\n\tresult = [1,2,3,4,5,6,7,8]\n\ttry:\n\t\tcursor.execute(sql)\n\t\tresults = cursor.fetchall()\n\t\tfor element in results:\n\t\t\tfor i in range(len(element)):\n\t\t\t\tresult[i] = element[i]/devide[i]\n\t\t\tsql = \"insert into tags (WIDTH,DROP_RATE,MEMORY,CPU,SERVICE,THREAD_NUM,FRECURENT,R100) values('%f', '%f', '%f', '%f', '%f', '%f', '%f', '%f')\" % (result[0], result[1], result[2],result[3], result[4], result[5], result[6], result[7])\n\t\t\tinsert_data(sql)\n\texcept Exception as msg:\n \tprint \"select from result error\"\n\t print msg\n print str(msg)\n\tdb.close()\nif __name__ == '__main__':\n\ttable = 'tags'\n\tsql = \"\"\"CREATE TABLE %s (\n WIDTH FLOAT(3,2),\n DROP_RATE FLOAT,\n MEMORY FLOAT(3,2),\n CPU FLOAT(3,2),\n SERVICE FLOAT(3,2),\n THREAD_NUM FLOAT,\n FRECURENT FLOAT,\n R100 FLOAT(2, 1))\"\"\" % table\n\tinit_database(table, sql)\n\tset_tags_from_result()\n\t\n\t\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class IGENMessage(object):
def __init__(self):
self.serial = None
self.temperature = None
self.pv1 = 0
self.pv2 = 0
self.pv3 = 0
self.pa1 = 0
self.pa2 = 0
self.pa3 = 0
self.ov1 = 0
self.ov2 = 0
self.ov3 = 0
self.oa1 = 0
self.oa2 = 0
self.oa3 = 0
self.oHz = 0
self.op1 = 0
self.op2 = 0
self.op3 = 0
self.energy_today = None
self.energy_overall = None
self.operational_hours = None
<|reserved_special_token_0|>
def outputs(self):
return [(self.ov1, self.oa1, self.op1), (self.ov2, self.oa2, self.
op2), (self.ov3, self.oa3, self.op3)]
def inputs(self):
return [(self.pv1, self.pa1), (self.pv2, self.pa2), (self.pv3, self
.pa3)]
def report(self):
print('Logger: {}'.format(self.serial))
print('Temperature: {} degrees celcius'.format(self.temperature))
print()
print('Inputs: ')
print(' Channel 1: {:6.2f} V {:5.2f} A'.format(self.pv1, self.pa1))
print(' Channel 2: {:6.2f} V {:5.2f} A'.format(self.pv2, self.pa2))
print(' Channel 3: {:6.2f} V {:5.2f} A'.format(self.pv3, self.pa3))
print()
print('Outputs: ({} Hz)'.format(self.oHz))
print(' L1: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov1,
self.oa1, self.op1))
print(' L2: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov2,
self.oa2, self.op2))
print(' L3: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov3,
self.oa3, self.op3))
print()
print('Energy today: {:8.1f} kWh'.format(self.energy_today))
print('Energy overall: {:8.1f} kWh'.format(self.energy_overall))
print('Operational hours: {}'.format(self.operational_hours))
def __repr__(self):
total_power = self.op1 + self.op2 + self.op3
return '<IGENMessage {} watt ({} kWh today)>'.format(total_power,
self.energy_today)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class IGENMessage(object):
def __init__(self):
self.serial = None
self.temperature = None
self.pv1 = 0
self.pv2 = 0
self.pv3 = 0
self.pa1 = 0
self.pa2 = 0
self.pa3 = 0
self.ov1 = 0
self.ov2 = 0
self.ov3 = 0
self.oa1 = 0
self.oa2 = 0
self.oa3 = 0
self.oHz = 0
self.op1 = 0
self.op2 = 0
self.op3 = 0
self.energy_today = None
self.energy_overall = None
self.operational_hours = None
@classmethod
def from_bytes(cls, data):
if len(data) != 103:
raise Exception('Packet should be exactly 103 bytes')
self = cls()
parsed = struct.unpack(
'!17x 14s H HHH HHH HHH HHH H HHH 4x H 2x H 2x H 24x', data)
self.serial = parsed[0].decode('ascii')
self.temperature = parsed[1] / 10
self.pv1 = parsed[2] / 10
self.pv2 = parsed[3] / 10
self.pv3 = parsed[4] / 10
self.pa1 = parsed[5] / 10
self.pa2 = parsed[6] / 10
self.pa3 = parsed[7] / 10
self.oa1 = parsed[8] / 10
self.oa2 = parsed[9] / 10
self.oa3 = parsed[10] / 10
self.ov1 = parsed[11] / 10
self.ov2 = parsed[12] / 10
self.ov3 = parsed[13] / 10
self.oHz = parsed[14] / 100
self.op1 = parsed[15]
self.op2 = parsed[16]
self.op3 = parsed[17]
self.energy_today = parsed[18] / 100
self.energy_overall = parsed[19] / 10
self.operational_hours = parsed[20]
return self
def outputs(self):
return [(self.ov1, self.oa1, self.op1), (self.ov2, self.oa2, self.
op2), (self.ov3, self.oa3, self.op3)]
def inputs(self):
return [(self.pv1, self.pa1), (self.pv2, self.pa2), (self.pv3, self
.pa3)]
def report(self):
print('Logger: {}'.format(self.serial))
print('Temperature: {} degrees celcius'.format(self.temperature))
print()
print('Inputs: ')
print(' Channel 1: {:6.2f} V {:5.2f} A'.format(self.pv1, self.pa1))
print(' Channel 2: {:6.2f} V {:5.2f} A'.format(self.pv2, self.pa2))
print(' Channel 3: {:6.2f} V {:5.2f} A'.format(self.pv3, self.pa3))
print()
print('Outputs: ({} Hz)'.format(self.oHz))
print(' L1: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov1,
self.oa1, self.op1))
print(' L2: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov2,
self.oa2, self.op2))
print(' L3: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov3,
self.oa3, self.op3))
print()
print('Energy today: {:8.1f} kWh'.format(self.energy_today))
print('Energy overall: {:8.1f} kWh'.format(self.energy_overall))
print('Operational hours: {}'.format(self.operational_hours))
def __repr__(self):
total_power = self.op1 + self.op2 + self.op3
return '<IGENMessage {} watt ({} kWh today)>'.format(total_power,
self.energy_today)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def parse(message):
return IGENMessage.from_bytes(message)
class IGENMessage(object):
def __init__(self):
self.serial = None
self.temperature = None
self.pv1 = 0
self.pv2 = 0
self.pv3 = 0
self.pa1 = 0
self.pa2 = 0
self.pa3 = 0
self.ov1 = 0
self.ov2 = 0
self.ov3 = 0
self.oa1 = 0
self.oa2 = 0
self.oa3 = 0
self.oHz = 0
self.op1 = 0
self.op2 = 0
self.op3 = 0
self.energy_today = None
self.energy_overall = None
self.operational_hours = None
@classmethod
def from_bytes(cls, data):
if len(data) != 103:
raise Exception('Packet should be exactly 103 bytes')
self = cls()
parsed = struct.unpack(
'!17x 14s H HHH HHH HHH HHH H HHH 4x H 2x H 2x H 24x', data)
self.serial = parsed[0].decode('ascii')
self.temperature = parsed[1] / 10
self.pv1 = parsed[2] / 10
self.pv2 = parsed[3] / 10
self.pv3 = parsed[4] / 10
self.pa1 = parsed[5] / 10
self.pa2 = parsed[6] / 10
self.pa3 = parsed[7] / 10
self.oa1 = parsed[8] / 10
self.oa2 = parsed[9] / 10
self.oa3 = parsed[10] / 10
self.ov1 = parsed[11] / 10
self.ov2 = parsed[12] / 10
self.ov3 = parsed[13] / 10
self.oHz = parsed[14] / 100
self.op1 = parsed[15]
self.op2 = parsed[16]
self.op3 = parsed[17]
self.energy_today = parsed[18] / 100
self.energy_overall = parsed[19] / 10
self.operational_hours = parsed[20]
return self
def outputs(self):
return [(self.ov1, self.oa1, self.op1), (self.ov2, self.oa2, self.
op2), (self.ov3, self.oa3, self.op3)]
def inputs(self):
return [(self.pv1, self.pa1), (self.pv2, self.pa2), (self.pv3, self
.pa3)]
def report(self):
print('Logger: {}'.format(self.serial))
print('Temperature: {} degrees celcius'.format(self.temperature))
print()
print('Inputs: ')
print(' Channel 1: {:6.2f} V {:5.2f} A'.format(self.pv1, self.pa1))
print(' Channel 2: {:6.2f} V {:5.2f} A'.format(self.pv2, self.pa2))
print(' Channel 3: {:6.2f} V {:5.2f} A'.format(self.pv3, self.pa3))
print()
print('Outputs: ({} Hz)'.format(self.oHz))
print(' L1: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov1,
self.oa1, self.op1))
print(' L2: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov2,
self.oa2, self.op2))
print(' L3: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov3,
self.oa3, self.op3))
print()
print('Energy today: {:8.1f} kWh'.format(self.energy_today))
print('Energy overall: {:8.1f} kWh'.format(self.energy_overall))
print('Operational hours: {}'.format(self.operational_hours))
def __repr__(self):
total_power = self.op1 + self.op2 + self.op3
return '<IGENMessage {} watt ({} kWh today)>'.format(total_power,
self.energy_today)
<|reserved_special_token_1|>
import struct
def parse(message):
return IGENMessage.from_bytes(message)
class IGENMessage(object):
def __init__(self):
self.serial = None
self.temperature = None
self.pv1 = 0
self.pv2 = 0
self.pv3 = 0
self.pa1 = 0
self.pa2 = 0
self.pa3 = 0
self.ov1 = 0
self.ov2 = 0
self.ov3 = 0
self.oa1 = 0
self.oa2 = 0
self.oa3 = 0
self.oHz = 0
self.op1 = 0
self.op2 = 0
self.op3 = 0
self.energy_today = None
self.energy_overall = None
self.operational_hours = None
@classmethod
def from_bytes(cls, data):
if len(data) != 103:
raise Exception('Packet should be exactly 103 bytes')
self = cls()
parsed = struct.unpack(
'!17x 14s H HHH HHH HHH HHH H HHH 4x H 2x H 2x H 24x', data)
self.serial = parsed[0].decode('ascii')
self.temperature = parsed[1] / 10
self.pv1 = parsed[2] / 10
self.pv2 = parsed[3] / 10
self.pv3 = parsed[4] / 10
self.pa1 = parsed[5] / 10
self.pa2 = parsed[6] / 10
self.pa3 = parsed[7] / 10
self.oa1 = parsed[8] / 10
self.oa2 = parsed[9] / 10
self.oa3 = parsed[10] / 10
self.ov1 = parsed[11] / 10
self.ov2 = parsed[12] / 10
self.ov3 = parsed[13] / 10
self.oHz = parsed[14] / 100
self.op1 = parsed[15]
self.op2 = parsed[16]
self.op3 = parsed[17]
self.energy_today = parsed[18] / 100
self.energy_overall = parsed[19] / 10
self.operational_hours = parsed[20]
return self
def outputs(self):
return [(self.ov1, self.oa1, self.op1), (self.ov2, self.oa2, self.
op2), (self.ov3, self.oa3, self.op3)]
def inputs(self):
return [(self.pv1, self.pa1), (self.pv2, self.pa2), (self.pv3, self
.pa3)]
def report(self):
print('Logger: {}'.format(self.serial))
print('Temperature: {} degrees celcius'.format(self.temperature))
print()
print('Inputs: ')
print(' Channel 1: {:6.2f} V {:5.2f} A'.format(self.pv1, self.pa1))
print(' Channel 2: {:6.2f} V {:5.2f} A'.format(self.pv2, self.pa2))
print(' Channel 3: {:6.2f} V {:5.2f} A'.format(self.pv3, self.pa3))
print()
print('Outputs: ({} Hz)'.format(self.oHz))
print(' L1: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov1,
self.oa1, self.op1))
print(' L2: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov2,
self.oa2, self.op2))
print(' L3: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov3,
self.oa3, self.op3))
print()
print('Energy today: {:8.1f} kWh'.format(self.energy_today))
print('Energy overall: {:8.1f} kWh'.format(self.energy_overall))
print('Operational hours: {}'.format(self.operational_hours))
def __repr__(self):
total_power = self.op1 + self.op2 + self.op3
return '<IGENMessage {} watt ({} kWh today)>'.format(total_power,
self.energy_today)
<|reserved_special_token_1|>
import struct
def parse(message):
return IGENMessage.from_bytes(message)
class IGENMessage(object):
def __init__(self):
self.serial = None
self.temperature = None
self.pv1 = 0
self.pv2 = 0
self.pv3 = 0
self.pa1 = 0
self.pa2 = 0
self.pa3 = 0
self.ov1 = 0
self.ov2 = 0
self.ov3 = 0
self.oa1 = 0
self.oa2 = 0
self.oa3 = 0
self.oHz = 0
self.op1 = 0
self.op2 = 0
self.op3 = 0
self.energy_today = None
self.energy_overall = None
self.operational_hours = None
@classmethod
def from_bytes(cls, data):
if len(data) != 103:
raise Exception('Packet should be exactly 103 bytes')
self = cls()
parsed = struct.unpack('!17x 14s H HHH HHH HHH HHH H HHH 4x H 2x H 2x H 24x', data)
self.serial = parsed[0].decode('ascii')
self.temperature = parsed[1] / 10
self.pv1 = parsed[2] / 10
self.pv2 = parsed[3] / 10
self.pv3 = parsed[4] / 10
self.pa1 = parsed[5] / 10
self.pa2 = parsed[6] / 10
self.pa3 = parsed[7] / 10
self.oa1 = parsed[8] / 10
self.oa2 = parsed[9] / 10
self.oa3 = parsed[10] / 10
self.ov1 = parsed[11] / 10
self.ov2 = parsed[12] / 10
self.ov3 = parsed[13] / 10
self.oHz = parsed[14] / 100
self.op1 = parsed[15]
self.op2 = parsed[16]
self.op3 = parsed[17]
self.energy_today = parsed[18] / 100
self.energy_overall = parsed[19] / 10
self.operational_hours = parsed[20]
return self
def outputs(self):
return [
(self.ov1, self.oa1, self.op1),
(self.ov2, self.oa2, self.op2),
(self.ov3, self.oa3, self.op3)
]
def inputs(self):
return [
(self.pv1, self.pa1),
(self.pv2, self.pa2),
(self.pv3, self.pa3)
]
def report(self):
print("Logger: {}".format(self.serial))
print("Temperature: {} degrees celcius".format(self.temperature))
print()
print("Inputs: ")
print(" Channel 1: {:6.2f} V {:5.2f} A".format(self.pv1, self.pa1))
print(" Channel 2: {:6.2f} V {:5.2f} A".format(self.pv2, self.pa2))
print(" Channel 3: {:6.2f} V {:5.2f} A".format(self.pv3, self.pa3))
print()
print("Outputs: ({} Hz)".format(self.oHz))
print(" L1: {:6.2f} V {:5.2f} A {:5.0f} W".format(self.ov1, self.oa1, self.op1))
print(" L2: {:6.2f} V {:5.2f} A {:5.0f} W".format(self.ov2, self.oa2, self.op2))
print(" L3: {:6.2f} V {:5.2f} A {:5.0f} W".format(self.ov3, self.oa3, self.op3))
print()
print("Energy today: {:8.1f} kWh".format(self.energy_today))
print("Energy overall: {:8.1f} kWh".format(self.energy_overall))
print("Operational hours: {}".format(self.operational_hours))
def __repr__(self):
total_power = self.op1 + self.op2 + self.op3
return "<IGENMessage {} watt ({} kWh today)>".format(total_power, self.energy_today)
|
flexible
|
{
"blob_id": "5df42a024e1edbe5cc977a814efe580db04b8b76",
"index": 2386,
"step-1": "<mask token>\n\n\nclass IGENMessage(object):\n\n def __init__(self):\n self.serial = None\n self.temperature = None\n self.pv1 = 0\n self.pv2 = 0\n self.pv3 = 0\n self.pa1 = 0\n self.pa2 = 0\n self.pa3 = 0\n self.ov1 = 0\n self.ov2 = 0\n self.ov3 = 0\n self.oa1 = 0\n self.oa2 = 0\n self.oa3 = 0\n self.oHz = 0\n self.op1 = 0\n self.op2 = 0\n self.op3 = 0\n self.energy_today = None\n self.energy_overall = None\n self.operational_hours = None\n <mask token>\n\n def outputs(self):\n return [(self.ov1, self.oa1, self.op1), (self.ov2, self.oa2, self.\n op2), (self.ov3, self.oa3, self.op3)]\n\n def inputs(self):\n return [(self.pv1, self.pa1), (self.pv2, self.pa2), (self.pv3, self\n .pa3)]\n\n def report(self):\n print('Logger: {}'.format(self.serial))\n print('Temperature: {} degrees celcius'.format(self.temperature))\n print()\n print('Inputs: ')\n print(' Channel 1: {:6.2f} V {:5.2f} A'.format(self.pv1, self.pa1))\n print(' Channel 2: {:6.2f} V {:5.2f} A'.format(self.pv2, self.pa2))\n print(' Channel 3: {:6.2f} V {:5.2f} A'.format(self.pv3, self.pa3))\n print()\n print('Outputs: ({} Hz)'.format(self.oHz))\n print(' L1: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov1,\n self.oa1, self.op1))\n print(' L2: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov2,\n self.oa2, self.op2))\n print(' L3: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov3,\n self.oa3, self.op3))\n print()\n print('Energy today: {:8.1f} kWh'.format(self.energy_today))\n print('Energy overall: {:8.1f} kWh'.format(self.energy_overall))\n print('Operational hours: {}'.format(self.operational_hours))\n\n def __repr__(self):\n total_power = self.op1 + self.op2 + self.op3\n return '<IGENMessage {} watt ({} kWh today)>'.format(total_power,\n self.energy_today)\n",
"step-2": "<mask token>\n\n\nclass IGENMessage(object):\n\n def __init__(self):\n self.serial = None\n self.temperature = None\n self.pv1 = 0\n self.pv2 = 0\n self.pv3 = 0\n self.pa1 = 0\n self.pa2 = 0\n self.pa3 = 0\n self.ov1 = 0\n self.ov2 = 0\n self.ov3 = 0\n self.oa1 = 0\n self.oa2 = 0\n self.oa3 = 0\n self.oHz = 0\n self.op1 = 0\n self.op2 = 0\n self.op3 = 0\n self.energy_today = None\n self.energy_overall = None\n self.operational_hours = None\n\n @classmethod\n def from_bytes(cls, data):\n if len(data) != 103:\n raise Exception('Packet should be exactly 103 bytes')\n self = cls()\n parsed = struct.unpack(\n '!17x 14s H HHH HHH HHH HHH H HHH 4x H 2x H 2x H 24x', data)\n self.serial = parsed[0].decode('ascii')\n self.temperature = parsed[1] / 10\n self.pv1 = parsed[2] / 10\n self.pv2 = parsed[3] / 10\n self.pv3 = parsed[4] / 10\n self.pa1 = parsed[5] / 10\n self.pa2 = parsed[6] / 10\n self.pa3 = parsed[7] / 10\n self.oa1 = parsed[8] / 10\n self.oa2 = parsed[9] / 10\n self.oa3 = parsed[10] / 10\n self.ov1 = parsed[11] / 10\n self.ov2 = parsed[12] / 10\n self.ov3 = parsed[13] / 10\n self.oHz = parsed[14] / 100\n self.op1 = parsed[15]\n self.op2 = parsed[16]\n self.op3 = parsed[17]\n self.energy_today = parsed[18] / 100\n self.energy_overall = parsed[19] / 10\n self.operational_hours = parsed[20]\n return self\n\n def outputs(self):\n return [(self.ov1, self.oa1, self.op1), (self.ov2, self.oa2, self.\n op2), (self.ov3, self.oa3, self.op3)]\n\n def inputs(self):\n return [(self.pv1, self.pa1), (self.pv2, self.pa2), (self.pv3, self\n .pa3)]\n\n def report(self):\n print('Logger: {}'.format(self.serial))\n print('Temperature: {} degrees celcius'.format(self.temperature))\n print()\n print('Inputs: ')\n print(' Channel 1: {:6.2f} V {:5.2f} A'.format(self.pv1, self.pa1))\n print(' Channel 2: {:6.2f} V {:5.2f} A'.format(self.pv2, self.pa2))\n print(' Channel 3: {:6.2f} V {:5.2f} A'.format(self.pv3, self.pa3))\n print()\n print('Outputs: ({} Hz)'.format(self.oHz))\n print(' L1: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov1,\n self.oa1, self.op1))\n print(' L2: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov2,\n self.oa2, self.op2))\n print(' L3: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov3,\n self.oa3, self.op3))\n print()\n print('Energy today: {:8.1f} kWh'.format(self.energy_today))\n print('Energy overall: {:8.1f} kWh'.format(self.energy_overall))\n print('Operational hours: {}'.format(self.operational_hours))\n\n def __repr__(self):\n total_power = self.op1 + self.op2 + self.op3\n return '<IGENMessage {} watt ({} kWh today)>'.format(total_power,\n self.energy_today)\n",
"step-3": "<mask token>\n\n\ndef parse(message):\n return IGENMessage.from_bytes(message)\n\n\nclass IGENMessage(object):\n\n def __init__(self):\n self.serial = None\n self.temperature = None\n self.pv1 = 0\n self.pv2 = 0\n self.pv3 = 0\n self.pa1 = 0\n self.pa2 = 0\n self.pa3 = 0\n self.ov1 = 0\n self.ov2 = 0\n self.ov3 = 0\n self.oa1 = 0\n self.oa2 = 0\n self.oa3 = 0\n self.oHz = 0\n self.op1 = 0\n self.op2 = 0\n self.op3 = 0\n self.energy_today = None\n self.energy_overall = None\n self.operational_hours = None\n\n @classmethod\n def from_bytes(cls, data):\n if len(data) != 103:\n raise Exception('Packet should be exactly 103 bytes')\n self = cls()\n parsed = struct.unpack(\n '!17x 14s H HHH HHH HHH HHH H HHH 4x H 2x H 2x H 24x', data)\n self.serial = parsed[0].decode('ascii')\n self.temperature = parsed[1] / 10\n self.pv1 = parsed[2] / 10\n self.pv2 = parsed[3] / 10\n self.pv3 = parsed[4] / 10\n self.pa1 = parsed[5] / 10\n self.pa2 = parsed[6] / 10\n self.pa3 = parsed[7] / 10\n self.oa1 = parsed[8] / 10\n self.oa2 = parsed[9] / 10\n self.oa3 = parsed[10] / 10\n self.ov1 = parsed[11] / 10\n self.ov2 = parsed[12] / 10\n self.ov3 = parsed[13] / 10\n self.oHz = parsed[14] / 100\n self.op1 = parsed[15]\n self.op2 = parsed[16]\n self.op3 = parsed[17]\n self.energy_today = parsed[18] / 100\n self.energy_overall = parsed[19] / 10\n self.operational_hours = parsed[20]\n return self\n\n def outputs(self):\n return [(self.ov1, self.oa1, self.op1), (self.ov2, self.oa2, self.\n op2), (self.ov3, self.oa3, self.op3)]\n\n def inputs(self):\n return [(self.pv1, self.pa1), (self.pv2, self.pa2), (self.pv3, self\n .pa3)]\n\n def report(self):\n print('Logger: {}'.format(self.serial))\n print('Temperature: {} degrees celcius'.format(self.temperature))\n print()\n print('Inputs: ')\n print(' Channel 1: {:6.2f} V {:5.2f} A'.format(self.pv1, self.pa1))\n print(' Channel 2: {:6.2f} V {:5.2f} A'.format(self.pv2, self.pa2))\n print(' Channel 3: {:6.2f} V {:5.2f} A'.format(self.pv3, self.pa3))\n print()\n print('Outputs: ({} Hz)'.format(self.oHz))\n print(' L1: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov1,\n self.oa1, self.op1))\n print(' L2: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov2,\n self.oa2, self.op2))\n print(' L3: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov3,\n self.oa3, self.op3))\n print()\n print('Energy today: {:8.1f} kWh'.format(self.energy_today))\n print('Energy overall: {:8.1f} kWh'.format(self.energy_overall))\n print('Operational hours: {}'.format(self.operational_hours))\n\n def __repr__(self):\n total_power = self.op1 + self.op2 + self.op3\n return '<IGENMessage {} watt ({} kWh today)>'.format(total_power,\n self.energy_today)\n",
"step-4": "import struct\n\n\ndef parse(message):\n return IGENMessage.from_bytes(message)\n\n\nclass IGENMessage(object):\n\n def __init__(self):\n self.serial = None\n self.temperature = None\n self.pv1 = 0\n self.pv2 = 0\n self.pv3 = 0\n self.pa1 = 0\n self.pa2 = 0\n self.pa3 = 0\n self.ov1 = 0\n self.ov2 = 0\n self.ov3 = 0\n self.oa1 = 0\n self.oa2 = 0\n self.oa3 = 0\n self.oHz = 0\n self.op1 = 0\n self.op2 = 0\n self.op3 = 0\n self.energy_today = None\n self.energy_overall = None\n self.operational_hours = None\n\n @classmethod\n def from_bytes(cls, data):\n if len(data) != 103:\n raise Exception('Packet should be exactly 103 bytes')\n self = cls()\n parsed = struct.unpack(\n '!17x 14s H HHH HHH HHH HHH H HHH 4x H 2x H 2x H 24x', data)\n self.serial = parsed[0].decode('ascii')\n self.temperature = parsed[1] / 10\n self.pv1 = parsed[2] / 10\n self.pv2 = parsed[3] / 10\n self.pv3 = parsed[4] / 10\n self.pa1 = parsed[5] / 10\n self.pa2 = parsed[6] / 10\n self.pa3 = parsed[7] / 10\n self.oa1 = parsed[8] / 10\n self.oa2 = parsed[9] / 10\n self.oa3 = parsed[10] / 10\n self.ov1 = parsed[11] / 10\n self.ov2 = parsed[12] / 10\n self.ov3 = parsed[13] / 10\n self.oHz = parsed[14] / 100\n self.op1 = parsed[15]\n self.op2 = parsed[16]\n self.op3 = parsed[17]\n self.energy_today = parsed[18] / 100\n self.energy_overall = parsed[19] / 10\n self.operational_hours = parsed[20]\n return self\n\n def outputs(self):\n return [(self.ov1, self.oa1, self.op1), (self.ov2, self.oa2, self.\n op2), (self.ov3, self.oa3, self.op3)]\n\n def inputs(self):\n return [(self.pv1, self.pa1), (self.pv2, self.pa2), (self.pv3, self\n .pa3)]\n\n def report(self):\n print('Logger: {}'.format(self.serial))\n print('Temperature: {} degrees celcius'.format(self.temperature))\n print()\n print('Inputs: ')\n print(' Channel 1: {:6.2f} V {:5.2f} A'.format(self.pv1, self.pa1))\n print(' Channel 2: {:6.2f} V {:5.2f} A'.format(self.pv2, self.pa2))\n print(' Channel 3: {:6.2f} V {:5.2f} A'.format(self.pv3, self.pa3))\n print()\n print('Outputs: ({} Hz)'.format(self.oHz))\n print(' L1: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov1,\n self.oa1, self.op1))\n print(' L2: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov2,\n self.oa2, self.op2))\n print(' L3: {:6.2f} V {:5.2f} A {:5.0f} W'.format(self.ov3,\n self.oa3, self.op3))\n print()\n print('Energy today: {:8.1f} kWh'.format(self.energy_today))\n print('Energy overall: {:8.1f} kWh'.format(self.energy_overall))\n print('Operational hours: {}'.format(self.operational_hours))\n\n def __repr__(self):\n total_power = self.op1 + self.op2 + self.op3\n return '<IGENMessage {} watt ({} kWh today)>'.format(total_power,\n self.energy_today)\n",
"step-5": "import struct\n\n\ndef parse(message):\n return IGENMessage.from_bytes(message)\n\n\nclass IGENMessage(object):\n def __init__(self):\n self.serial = None\n self.temperature = None\n self.pv1 = 0\n self.pv2 = 0\n self.pv3 = 0\n self.pa1 = 0\n self.pa2 = 0\n self.pa3 = 0\n self.ov1 = 0\n self.ov2 = 0\n self.ov3 = 0\n self.oa1 = 0\n self.oa2 = 0\n self.oa3 = 0\n self.oHz = 0\n self.op1 = 0\n self.op2 = 0\n self.op3 = 0\n self.energy_today = None\n self.energy_overall = None\n self.operational_hours = None\n\n @classmethod\n def from_bytes(cls, data):\n if len(data) != 103:\n raise Exception('Packet should be exactly 103 bytes')\n\n self = cls()\n\n parsed = struct.unpack('!17x 14s H HHH HHH HHH HHH H HHH 4x H 2x H 2x H 24x', data)\n self.serial = parsed[0].decode('ascii')\n\n self.temperature = parsed[1] / 10\n\n self.pv1 = parsed[2] / 10\n self.pv2 = parsed[3] / 10\n self.pv3 = parsed[4] / 10\n\n self.pa1 = parsed[5] / 10\n self.pa2 = parsed[6] / 10\n self.pa3 = parsed[7] / 10\n\n self.oa1 = parsed[8] / 10\n self.oa2 = parsed[9] / 10\n self.oa3 = parsed[10] / 10\n\n self.ov1 = parsed[11] / 10\n self.ov2 = parsed[12] / 10\n self.ov3 = parsed[13] / 10\n\n self.oHz = parsed[14] / 100\n\n self.op1 = parsed[15]\n self.op2 = parsed[16]\n self.op3 = parsed[17]\n\n self.energy_today = parsed[18] / 100\n self.energy_overall = parsed[19] / 10\n\n self.operational_hours = parsed[20]\n\n return self\n\n def outputs(self):\n return [\n (self.ov1, self.oa1, self.op1),\n (self.ov2, self.oa2, self.op2),\n (self.ov3, self.oa3, self.op3)\n ]\n\n def inputs(self):\n return [\n (self.pv1, self.pa1),\n (self.pv2, self.pa2),\n (self.pv3, self.pa3)\n ]\n\n def report(self):\n print(\"Logger: {}\".format(self.serial))\n print(\"Temperature: {} degrees celcius\".format(self.temperature))\n print()\n print(\"Inputs: \")\n print(\" Channel 1: {:6.2f} V {:5.2f} A\".format(self.pv1, self.pa1))\n print(\" Channel 2: {:6.2f} V {:5.2f} A\".format(self.pv2, self.pa2))\n print(\" Channel 3: {:6.2f} V {:5.2f} A\".format(self.pv3, self.pa3))\n print()\n print(\"Outputs: ({} Hz)\".format(self.oHz))\n print(\" L1: {:6.2f} V {:5.2f} A {:5.0f} W\".format(self.ov1, self.oa1, self.op1))\n print(\" L2: {:6.2f} V {:5.2f} A {:5.0f} W\".format(self.ov2, self.oa2, self.op2))\n print(\" L3: {:6.2f} V {:5.2f} A {:5.0f} W\".format(self.ov3, self.oa3, self.op3))\n print()\n print(\"Energy today: {:8.1f} kWh\".format(self.energy_today))\n print(\"Energy overall: {:8.1f} kWh\".format(self.energy_overall))\n print(\"Operational hours: {}\".format(self.operational_hours))\n\n def __repr__(self):\n total_power = self.op1 + self.op2 + self.op3\n return \"<IGENMessage {} watt ({} kWh today)>\".format(total_power, self.energy_today)\n",
"step-ids": [
6,
7,
8,
9,
10
]
}
|
[
6,
7,
8,
9,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
initial = True
dependencies = [('auth', '0008_alter_user_username_max_length')]
operations = [migrations.CreateModel(name='Member', fields=[('id',
models.AutoField(auto_created=True, primary_key=True, serialize=
False, verbose_name='ID')), ('username', models.CharField(
max_length=50, unique=True, verbose_name='用户名')), ('password',
models.CharField(max_length=200, verbose_name='密码')), ('email',
models.EmailField(blank=True, max_length=254, null=True, unique=
True, verbose_name='电子邮件')), ('phone', models.CharField(blank=True,
max_length=20, null=True, unique=True, verbose_name='phone')), (
'gender', models.SmallIntegerField(choices=[(0, 'unset'), (1,
'male'), (2, 'female')], default=0, null=True, verbose_name=
'gender')), ('real_name', models.CharField(blank=True, max_length=
100, null=True, verbose_name='real name')), ('birth_of_date',
models.DateField(null=True, verbose_name='birth of date')), (
'is_superuser', models.BooleanField(default=False, verbose_name=
'whether super user or not')), ('is_staff', models.BooleanField(
default=False, verbose_name='whether enter backend or not')), (
'last_login', models.DateTimeField(null=True, verbose_name=
'last login datetime')), ('create', models.DateTimeField(
auto_now_add=True, verbose_name='create datetime')), ('modify',
models.DateTimeField(auto_now=True, verbose_name='modify datetime')
), ('groups', models.ManyToManyField(blank=True, help_text=
'The groups this user belongs to. A user will get all permissions granted to each of their groups.'
, related_name='user_set', related_query_name='user', to=
'auth.Group', verbose_name='groups')), ('user_permissions', models.
ManyToManyField(blank=True, help_text=
'Specific permissions for this user.', related_name='user_set',
related_query_name='user', to='auth.Permission', verbose_name=
'user permissions'))], options={'db_table': 'member'})]
<|reserved_special_token_1|>
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [('auth', '0008_alter_user_username_max_length')]
operations = [migrations.CreateModel(name='Member', fields=[('id',
models.AutoField(auto_created=True, primary_key=True, serialize=
False, verbose_name='ID')), ('username', models.CharField(
max_length=50, unique=True, verbose_name='用户名')), ('password',
models.CharField(max_length=200, verbose_name='密码')), ('email',
models.EmailField(blank=True, max_length=254, null=True, unique=
True, verbose_name='电子邮件')), ('phone', models.CharField(blank=True,
max_length=20, null=True, unique=True, verbose_name='phone')), (
'gender', models.SmallIntegerField(choices=[(0, 'unset'), (1,
'male'), (2, 'female')], default=0, null=True, verbose_name=
'gender')), ('real_name', models.CharField(blank=True, max_length=
100, null=True, verbose_name='real name')), ('birth_of_date',
models.DateField(null=True, verbose_name='birth of date')), (
'is_superuser', models.BooleanField(default=False, verbose_name=
'whether super user or not')), ('is_staff', models.BooleanField(
default=False, verbose_name='whether enter backend or not')), (
'last_login', models.DateTimeField(null=True, verbose_name=
'last login datetime')), ('create', models.DateTimeField(
auto_now_add=True, verbose_name='create datetime')), ('modify',
models.DateTimeField(auto_now=True, verbose_name='modify datetime')
), ('groups', models.ManyToManyField(blank=True, help_text=
'The groups this user belongs to. A user will get all permissions granted to each of their groups.'
, related_name='user_set', related_query_name='user', to=
'auth.Group', verbose_name='groups')), ('user_permissions', models.
ManyToManyField(blank=True, help_text=
'Specific permissions for this user.', related_name='user_set',
related_query_name='user', to='auth.Permission', verbose_name=
'user permissions'))], options={'db_table': 'member'})]
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-18 07:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.CreateModel(
name='Member',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=50, unique=True, verbose_name='\u7528\u6237\u540d')),
('password', models.CharField(max_length=200, verbose_name='\u5bc6\u7801')),
('email', models.EmailField(blank=True, max_length=254, null=True, unique=True, verbose_name='\u7535\u5b50\u90ae\u4ef6')),
('phone', models.CharField(blank=True, max_length=20, null=True, unique=True, verbose_name='phone')),
('gender', models.SmallIntegerField(choices=[(0, 'unset'), (1, 'male'), (2, 'female')], default=0, null=True, verbose_name='gender')),
('real_name', models.CharField(blank=True, max_length=100, null=True, verbose_name='real name')),
('birth_of_date', models.DateField(null=True, verbose_name='birth of date')),
('is_superuser', models.BooleanField(default=False, verbose_name='whether super user or not')),
('is_staff', models.BooleanField(default=False, verbose_name='whether enter backend or not')),
('last_login', models.DateTimeField(null=True, verbose_name='last login datetime')),
('create', models.DateTimeField(auto_now_add=True, verbose_name='create datetime')),
('modify', models.DateTimeField(auto_now=True, verbose_name='modify datetime')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'db_table': 'member',
},
),
]
|
flexible
|
{
"blob_id": "ab343f88c84d45cf90bddd52623362f047c72d3c",
"index": 5754,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = [('auth', '0008_alter_user_username_max_length')]\n operations = [migrations.CreateModel(name='Member', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('username', models.CharField(\n max_length=50, unique=True, verbose_name='用户名')), ('password',\n models.CharField(max_length=200, verbose_name='密码')), ('email',\n models.EmailField(blank=True, max_length=254, null=True, unique=\n True, verbose_name='电子邮件')), ('phone', models.CharField(blank=True,\n max_length=20, null=True, unique=True, verbose_name='phone')), (\n 'gender', models.SmallIntegerField(choices=[(0, 'unset'), (1,\n 'male'), (2, 'female')], default=0, null=True, verbose_name=\n 'gender')), ('real_name', models.CharField(blank=True, max_length=\n 100, null=True, verbose_name='real name')), ('birth_of_date',\n models.DateField(null=True, verbose_name='birth of date')), (\n 'is_superuser', models.BooleanField(default=False, verbose_name=\n 'whether super user or not')), ('is_staff', models.BooleanField(\n default=False, verbose_name='whether enter backend or not')), (\n 'last_login', models.DateTimeField(null=True, verbose_name=\n 'last login datetime')), ('create', models.DateTimeField(\n auto_now_add=True, verbose_name='create datetime')), ('modify',\n models.DateTimeField(auto_now=True, verbose_name='modify datetime')\n ), ('groups', models.ManyToManyField(blank=True, help_text=\n 'The groups this user belongs to. A user will get all permissions granted to each of their groups.'\n , related_name='user_set', related_query_name='user', to=\n 'auth.Group', verbose_name='groups')), ('user_permissions', models.\n ManyToManyField(blank=True, help_text=\n 'Specific permissions for this user.', related_name='user_set',\n related_query_name='user', to='auth.Permission', verbose_name=\n 'user permissions'))], options={'db_table': 'member'})]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = [('auth', '0008_alter_user_username_max_length')]\n operations = [migrations.CreateModel(name='Member', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('username', models.CharField(\n max_length=50, unique=True, verbose_name='用户名')), ('password',\n models.CharField(max_length=200, verbose_name='密码')), ('email',\n models.EmailField(blank=True, max_length=254, null=True, unique=\n True, verbose_name='电子邮件')), ('phone', models.CharField(blank=True,\n max_length=20, null=True, unique=True, verbose_name='phone')), (\n 'gender', models.SmallIntegerField(choices=[(0, 'unset'), (1,\n 'male'), (2, 'female')], default=0, null=True, verbose_name=\n 'gender')), ('real_name', models.CharField(blank=True, max_length=\n 100, null=True, verbose_name='real name')), ('birth_of_date',\n models.DateField(null=True, verbose_name='birth of date')), (\n 'is_superuser', models.BooleanField(default=False, verbose_name=\n 'whether super user or not')), ('is_staff', models.BooleanField(\n default=False, verbose_name='whether enter backend or not')), (\n 'last_login', models.DateTimeField(null=True, verbose_name=\n 'last login datetime')), ('create', models.DateTimeField(\n auto_now_add=True, verbose_name='create datetime')), ('modify',\n models.DateTimeField(auto_now=True, verbose_name='modify datetime')\n ), ('groups', models.ManyToManyField(blank=True, help_text=\n 'The groups this user belongs to. A user will get all permissions granted to each of their groups.'\n , related_name='user_set', related_query_name='user', to=\n 'auth.Group', verbose_name='groups')), ('user_permissions', models.\n ManyToManyField(blank=True, help_text=\n 'Specific permissions for this user.', related_name='user_set',\n related_query_name='user', to='auth.Permission', verbose_name=\n 'user permissions'))], options={'db_table': 'member'})]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.6 on 2017-10-18 07:31\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ('auth', '0008_alter_user_username_max_length'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Member',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('username', models.CharField(max_length=50, unique=True, verbose_name='\\u7528\\u6237\\u540d')),\n ('password', models.CharField(max_length=200, verbose_name='\\u5bc6\\u7801')),\n ('email', models.EmailField(blank=True, max_length=254, null=True, unique=True, verbose_name='\\u7535\\u5b50\\u90ae\\u4ef6')),\n ('phone', models.CharField(blank=True, max_length=20, null=True, unique=True, verbose_name='phone')),\n ('gender', models.SmallIntegerField(choices=[(0, 'unset'), (1, 'male'), (2, 'female')], default=0, null=True, verbose_name='gender')),\n ('real_name', models.CharField(blank=True, max_length=100, null=True, verbose_name='real name')),\n ('birth_of_date', models.DateField(null=True, verbose_name='birth of date')),\n ('is_superuser', models.BooleanField(default=False, verbose_name='whether super user or not')),\n ('is_staff', models.BooleanField(default=False, verbose_name='whether enter backend or not')),\n ('last_login', models.DateTimeField(null=True, verbose_name='last login datetime')),\n ('create', models.DateTimeField(auto_now_add=True, verbose_name='create datetime')),\n ('modify', models.DateTimeField(auto_now=True, verbose_name='modify datetime')),\n ('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),\n ('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),\n ],\n options={\n 'db_table': 'member',\n },\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/python
#MTU Server
from config import *
from pymodbus.client.sync import ModbusTcpClient
import time
import numpy as np
import logging
from sklearn.decomposition import PCA
import matplotlib.pyplot as plt
import matplotlib.animation as anim
logging.basicConfig()
log = logging.getLogger()
log.setLevel(logging.INFO)
opc1_client = ModbusTcpClient(OPC1_IP, OPC1_PORT)
opc1_client.connect()
opc2_client = ModbusTcpClient(OPC2_IP, OPC2_PORT)
opc2_client.connect()
t = time.time()
Data = []
pca = PCA(n_components = 3)
Data = np.load("data.npy")
pca.fit(Data)
print "PCA Built"
t = time.time()
i = 0
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
y = []
def update(i):
#Read registers from the specific zone
l1 = float(opc1_client.read_holding_registers(L1, 1).registers[0])
l2 = float(opc2_client.read_holding_registers(L2, 1).registers[0])
t1 = float(opc1_client.read_holding_registers(T1, 1).registers[0])
t2 = float(opc2_client.read_holding_registers(T2, 1).registers[0])
v1 = opc2_client.read_holding_registers(V1, 1).registers[0]
v2 = opc1_client.read_holding_registers(V2, 1).registers[0]
p = opc2_client.read_holding_registers( P, 1).registers[0]
f1 = opc2_client.read_holding_registers(F1, 1).registers[0]
f2 = opc1_client.read_holding_registers(F2, 1).registers[0]
f3 = opc2_client.read_holding_registers(F3, 1).registers[0]
h = opc1_client.read_holding_registers( H, 1).registers[0]
v = np.array([[l1, l2, t1, t2, v1, v2, p, f1, f2, f3, h]])
v_transform = pca.transform(v)
y.append(v_transform[0])
x = range(len(y))
ax.clear()
ax.set_title("PCA Based Detection (Top 3 Scores).")
ax.set_ylabel("Top 3 scores")
ax.set_xlabel("Evaluation Points")
ax.set_xlim([0, 1.5*len(y)])
ax.plot(x, y, "x")
print "Simulation will start when the time is 0, 25, 50 ,75"
to = 0
while 1:
toot = int(time.time())%100
if to == toot - 1:
print toot
to = toot
# print to
if to == 0 or to == 25 or to == 50 or to == 75:
break
a = anim.FuncAnimation(fig, update, frames=int(SIM_TIME/SIM_STEP), interval=int(1000*SIM_STEP), repeat=False)
plt.show()
|
normal
|
{
"blob_id": "572a9da5edcff3ff5ca0a37f982432f9712dc58c",
"index": 9279,
"step-1": "#!/usr/bin/python\n#MTU Server\nfrom config import *\nfrom pymodbus.client.sync import ModbusTcpClient\nimport time\nimport numpy as np\nimport logging\nfrom sklearn.decomposition import PCA\nimport matplotlib.pyplot as plt\nimport matplotlib.animation as anim\n\nlogging.basicConfig()\nlog = logging.getLogger()\nlog.setLevel(logging.INFO)\n\nopc1_client = ModbusTcpClient(OPC1_IP, OPC1_PORT)\nopc1_client.connect()\nopc2_client = ModbusTcpClient(OPC2_IP, OPC2_PORT)\nopc2_client.connect()\n\nt = time.time()\n\nData = []\npca = PCA(n_components = 3)\nData = np.load(\"data.npy\")\npca.fit(Data)\nprint \"PCA Built\"\nt = time.time()\ni = 0\nfig = plt.figure()\nax = fig.add_subplot(1,1,1)\ny = []\n\ndef update(i):\n #Read registers from the specific zone\n l1 = float(opc1_client.read_holding_registers(L1, 1).registers[0])\n l2 = float(opc2_client.read_holding_registers(L2, 1).registers[0])\n t1 = float(opc1_client.read_holding_registers(T1, 1).registers[0])\n t2 = float(opc2_client.read_holding_registers(T2, 1).registers[0])\n v1 = opc2_client.read_holding_registers(V1, 1).registers[0]\n v2 = opc1_client.read_holding_registers(V2, 1).registers[0]\n p = opc2_client.read_holding_registers( P, 1).registers[0]\n f1 = opc2_client.read_holding_registers(F1, 1).registers[0]\n f2 = opc1_client.read_holding_registers(F2, 1).registers[0]\n f3 = opc2_client.read_holding_registers(F3, 1).registers[0]\n h = opc1_client.read_holding_registers( H, 1).registers[0]\n\n v = np.array([[l1, l2, t1, t2, v1, v2, p, f1, f2, f3, h]])\n v_transform = pca.transform(v)\n y.append(v_transform[0])\n x = range(len(y))\n ax.clear()\n ax.set_title(\"PCA Based Detection (Top 3 Scores).\")\n ax.set_ylabel(\"Top 3 scores\")\n ax.set_xlabel(\"Evaluation Points\")\n ax.set_xlim([0, 1.5*len(y)])\n ax.plot(x, y, \"x\")\n\n\nprint \"Simulation will start when the time is 0, 25, 50 ,75\"\nto = 0\nwhile 1:\n toot = int(time.time())%100\n if to == toot - 1:\n print toot\n to = toot\n # print to\n if to == 0 or to == 25 or to == 50 or to == 75:\n break\n\na = anim.FuncAnimation(fig, update, frames=int(SIM_TIME/SIM_STEP), interval=int(1000*SIM_STEP), repeat=False)\nplt.show()",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
print(sum([int(d) for d in str(pow(2, 1000))]))
|
normal
|
{
"blob_id": "fc0c8deb3a5a57934c9e707911c352af55100c3c",
"index": 3533,
"step-1": "<mask token>\n",
"step-2": "print(sum([int(d) for d in str(pow(2, 1000))]))\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
#! /usr/bin/env python3
from PIL import Image
from imtools import *
import os
cwd = os.getcwd()
filelist = get_imlist(os.getcwd())
print(filelist)
for infile in filelist:
outfile = os.path.splitext(infile)[0] + ".jpg"
if infile != outfile:
try:
Image.open(infile).save(outfile)
except IOError:
print("cannot convert", infile)
|
normal
|
{
"blob_id": "31416f1ba9f3c44a7aa740365e05b5db49e70444",
"index": 9106,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(filelist)\nfor infile in filelist:\n outfile = os.path.splitext(infile)[0] + '.jpg'\n if infile != outfile:\n try:\n Image.open(infile).save(outfile)\n except IOError:\n print('cannot convert', infile)\n",
"step-3": "<mask token>\ncwd = os.getcwd()\nfilelist = get_imlist(os.getcwd())\nprint(filelist)\nfor infile in filelist:\n outfile = os.path.splitext(infile)[0] + '.jpg'\n if infile != outfile:\n try:\n Image.open(infile).save(outfile)\n except IOError:\n print('cannot convert', infile)\n",
"step-4": "from PIL import Image\nfrom imtools import *\nimport os\ncwd = os.getcwd()\nfilelist = get_imlist(os.getcwd())\nprint(filelist)\nfor infile in filelist:\n outfile = os.path.splitext(infile)[0] + '.jpg'\n if infile != outfile:\n try:\n Image.open(infile).save(outfile)\n except IOError:\n print('cannot convert', infile)\n",
"step-5": "#! /usr/bin/env python3\n\nfrom PIL import Image\nfrom imtools import *\nimport os\n\ncwd = os.getcwd()\n\nfilelist = get_imlist(os.getcwd())\n\nprint(filelist)\n\nfor infile in filelist:\n\toutfile = os.path.splitext(infile)[0] + \".jpg\"\n\tif infile != outfile:\n\t\ttry:\n\t\t\tImage.open(infile).save(outfile)\n\t\texcept IOError:\n\t\t\tprint(\"cannot convert\", infile)\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class RetModel(object):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class RetModel(object):
def __init__(self, code=0, message='success', data=None):
self.code = code
self.msg = message
self.data = data
<|reserved_special_token_1|>
class RetModel(object):
def __init__(self, code = 0, message = "success", data = None):
self.code = code
self.msg = message
self.data = data
|
flexible
|
{
"blob_id": "ec395b93cecf8431fd0df1aa0151ebd32244c367",
"index": 4941,
"step-1": "<mask token>\n",
"step-2": "class RetModel(object):\n <mask token>\n",
"step-3": "class RetModel(object):\n\n def __init__(self, code=0, message='success', data=None):\n self.code = code\n self.msg = message\n self.data = data\n",
"step-4": "\r\nclass RetModel(object):\r\n def __init__(self, code = 0, message = \"success\", data = None):\r\n self.code = code\r\n self.msg = message\r\n self.data = data\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding:utf-8 -*-
#
from django.core.paginator import Paginator
def pagination(request, queryset, display_amount=15, after_range_num=5, bevor_range_num=4):
# 按参数分页
paginator = Paginator(queryset, display_amount)
try:
# 得到request中的page参数
page = int(request.GET['page'])
except:
# 默认为1
page = 1
try:
# 尝试获得分页列表
objects = paginator.page(page)
# 如果页数不存在
except paginator.EmptyPage:
# 获得最后一页
objects = paginator.page(paginator.num_pages)
# 如果不是一个整数
except:
# 获得第一页
objects = paginator.page(1)
# 根据参数配置导航显示范围
if page >= after_range_num:
page_range = paginator.page_range[page-after_range_num:page+bevor_range_num]
else:
page_range = paginator.page_range[0:page+bevor_range_num]
return objects, page_range
|
normal
|
{
"blob_id": "7a2b33d1763e66335c6a72a35082e20725cab03d",
"index": 3318,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef pagination(request, queryset, display_amount=15, after_range_num=5,\n bevor_range_num=4):\n paginator = Paginator(queryset, display_amount)\n try:\n page = int(request.GET['page'])\n except:\n page = 1\n try:\n objects = paginator.page(page)\n except paginator.EmptyPage:\n objects = paginator.page(paginator.num_pages)\n except:\n objects = paginator.page(1)\n if page >= after_range_num:\n page_range = paginator.page_range[page - after_range_num:page +\n bevor_range_num]\n else:\n page_range = paginator.page_range[0:page + bevor_range_num]\n return objects, page_range\n",
"step-3": "from django.core.paginator import Paginator\n\n\ndef pagination(request, queryset, display_amount=15, after_range_num=5,\n bevor_range_num=4):\n paginator = Paginator(queryset, display_amount)\n try:\n page = int(request.GET['page'])\n except:\n page = 1\n try:\n objects = paginator.page(page)\n except paginator.EmptyPage:\n objects = paginator.page(paginator.num_pages)\n except:\n objects = paginator.page(1)\n if page >= after_range_num:\n page_range = paginator.page_range[page - after_range_num:page +\n bevor_range_num]\n else:\n page_range = paginator.page_range[0:page + bevor_range_num]\n return objects, page_range\n",
"step-4": "# -*- coding:utf-8 -*-\n#\nfrom django.core.paginator import Paginator\n\ndef pagination(request, queryset, display_amount=15, after_range_num=5, bevor_range_num=4):\n # 按参数分页\n paginator = Paginator(queryset, display_amount)\n try:\n # 得到request中的page参数\n page = int(request.GET['page'])\n except:\n # 默认为1\n page = 1\n try:\n # 尝试获得分页列表\n objects = paginator.page(page)\n # 如果页数不存在\n except paginator.EmptyPage:\n # 获得最后一页\n objects = paginator.page(paginator.num_pages)\n # 如果不是一个整数\n except:\n # 获得第一页\n objects = paginator.page(1)\n # 根据参数配置导航显示范围\n if page >= after_range_num:\n page_range = paginator.page_range[page-after_range_num:page+bevor_range_num]\n else:\n page_range = paginator.page_range[0:page+bevor_range_num]\n return objects, page_range\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if __name__ == '__main__':
app.run_server()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app = dash.Dash()
app.layout = html.Div(children=[html.Label('Dropdowm'), dcc.Dropdown(id=
'my-dropdown', options=[{'label': 'İstanbul', 'value': 34}, {'label':
'Ankara', 'value': 6}, {'label': 'Erzurum', 'value': 25}], multi=True,
value=34, searchable=True), html.Label('Radio'), dcc.RadioItems(id=
'my-radio', options=[{'label': 'İstanbul', 'value': 34}, {'label':
'Ankara', 'value': 6}, {'label': 'Erzurum', 'value': 25}], value=34),
html.Label('Slider'), dcc.Slider(id='my-slider', min=0, max=20, step=
0.5, value=10, marks={i: i for i in range(0, 21)})])
if __name__ == '__main__':
app.run_server()
<|reserved_special_token_1|>
import dash
import dash_core_components as dcc
import dash_html_components as html
app = dash.Dash()
app.layout = html.Div(children=[html.Label('Dropdowm'), dcc.Dropdown(id=
'my-dropdown', options=[{'label': 'İstanbul', 'value': 34}, {'label':
'Ankara', 'value': 6}, {'label': 'Erzurum', 'value': 25}], multi=True,
value=34, searchable=True), html.Label('Radio'), dcc.RadioItems(id=
'my-radio', options=[{'label': 'İstanbul', 'value': 34}, {'label':
'Ankara', 'value': 6}, {'label': 'Erzurum', 'value': 25}], value=34),
html.Label('Slider'), dcc.Slider(id='my-slider', min=0, max=20, step=
0.5, value=10, marks={i: i for i in range(0, 21)})])
if __name__ == '__main__':
app.run_server()
<|reserved_special_token_1|>
import dash
import dash_core_components as dcc
import dash_html_components as html
app = dash.Dash()
app.layout = html.Div(
children=[
html.Label('Dropdowm'),
dcc.Dropdown(
id='my-dropdown',
options=[
{'label': 'İstanbul', 'value': 34}, # seçeneleri dict tutan liste olarak veririz
{'label': 'Ankara', 'value': 6},
{'label': 'Erzurum', 'value': 25},
],
multi=True,
value=34,
searchable=True,
),
html.Label('Radio'),
dcc.RadioItems(
id='my-radio',
options=[
{'label': 'İstanbul', 'value': 34},
{'label': 'Ankara', 'value': 6},
{'label': 'Erzurum', 'value': 25},
],
value=34,
),
html.Label('Slider'),
dcc.Slider(
id='my-slider',
min=0,
max=20,
step=0.5,
value=10,
marks={i: i for i in range(0, 21)}
),
]
)
if __name__ == '__main__':
app.run_server()
|
flexible
|
{
"blob_id": "443bf59bc3c5ed2114f0c276aa7134ff5bf7fb64",
"index": 7264,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n app.run_server()\n",
"step-3": "<mask token>\napp = dash.Dash()\napp.layout = html.Div(children=[html.Label('Dropdowm'), dcc.Dropdown(id=\n 'my-dropdown', options=[{'label': 'İstanbul', 'value': 34}, {'label':\n 'Ankara', 'value': 6}, {'label': 'Erzurum', 'value': 25}], multi=True,\n value=34, searchable=True), html.Label('Radio'), dcc.RadioItems(id=\n 'my-radio', options=[{'label': 'İstanbul', 'value': 34}, {'label':\n 'Ankara', 'value': 6}, {'label': 'Erzurum', 'value': 25}], value=34),\n html.Label('Slider'), dcc.Slider(id='my-slider', min=0, max=20, step=\n 0.5, value=10, marks={i: i for i in range(0, 21)})])\nif __name__ == '__main__':\n app.run_server()\n",
"step-4": "import dash\nimport dash_core_components as dcc\nimport dash_html_components as html\napp = dash.Dash()\napp.layout = html.Div(children=[html.Label('Dropdowm'), dcc.Dropdown(id=\n 'my-dropdown', options=[{'label': 'İstanbul', 'value': 34}, {'label':\n 'Ankara', 'value': 6}, {'label': 'Erzurum', 'value': 25}], multi=True,\n value=34, searchable=True), html.Label('Radio'), dcc.RadioItems(id=\n 'my-radio', options=[{'label': 'İstanbul', 'value': 34}, {'label':\n 'Ankara', 'value': 6}, {'label': 'Erzurum', 'value': 25}], value=34),\n html.Label('Slider'), dcc.Slider(id='my-slider', min=0, max=20, step=\n 0.5, value=10, marks={i: i for i in range(0, 21)})])\nif __name__ == '__main__':\n app.run_server()\n",
"step-5": "import dash\nimport dash_core_components as dcc\nimport dash_html_components as html\n\napp = dash.Dash()\n\napp.layout = html.Div(\n children=[\n html.Label('Dropdowm'),\n dcc.Dropdown(\n id='my-dropdown',\n options=[\n {'label': 'İstanbul', 'value': 34}, # seçeneleri dict tutan liste olarak veririz\n {'label': 'Ankara', 'value': 6},\n {'label': 'Erzurum', 'value': 25},\n ],\n multi=True,\n value=34,\n searchable=True,\n ),\n html.Label('Radio'),\n dcc.RadioItems(\n id='my-radio',\n options=[\n {'label': 'İstanbul', 'value': 34},\n {'label': 'Ankara', 'value': 6},\n {'label': 'Erzurum', 'value': 25},\n ],\n value=34,\n ),\n html.Label('Slider'),\n dcc.Slider(\n id='my-slider',\n min=0,\n max=20,\n step=0.5,\n value=10,\n marks={i: i for i in range(0, 21)}\n ),\n ]\n)\n\nif __name__ == '__main__':\n app.run_server()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Aceptar_letras_wizard(models.TransientModel):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@api.multi
def aceptar_letras(self):
active_ids = self.env.context.get('active_ids', []) or []
records = self.env['letra_cambio.letra'].browse(active_ids)
self.env['letra_cambio.letra'].cambiar_estado_all(records, 'ACE')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Aceptar_letras_wizard(models.TransientModel):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def _get_letras(self):
if self.env.context and self.env.context.get('active_ids'):
return self.env.context.get('active_ids')
return []
<|reserved_special_token_0|>
@api.multi
def aceptar_letras(self):
active_ids = self.env.context.get('active_ids', []) or []
records = self.env['letra_cambio.letra'].browse(active_ids)
self.env['letra_cambio.letra'].cambiar_estado_all(records, 'ACE')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Aceptar_letras_wizard(models.TransientModel):
_name = 'aceptar_letras_wizard'
_description = 'Aceptar letras'
def _get_letras(self):
if self.env.context and self.env.context.get('active_ids'):
return self.env.context.get('active_ids')
return []
letra_ids = fields.Many2many('letra_cambio.letra', default=_get_letras,
string='Letras')
@api.multi
def aceptar_letras(self):
active_ids = self.env.context.get('active_ids', []) or []
records = self.env['letra_cambio.letra'].browse(active_ids)
self.env['letra_cambio.letra'].cambiar_estado_all(records, 'ACE')
<|reserved_special_token_1|>
from odoo import models, fields, api
class Aceptar_letras_wizard(models.TransientModel):
_name = 'aceptar_letras_wizard'
_description = 'Aceptar letras'
def _get_letras(self):
if self.env.context and self.env.context.get('active_ids'):
return self.env.context.get('active_ids')
return []
letra_ids = fields.Many2many('letra_cambio.letra', default=_get_letras,
string='Letras')
@api.multi
def aceptar_letras(self):
active_ids = self.env.context.get('active_ids', []) or []
records = self.env['letra_cambio.letra'].browse(active_ids)
self.env['letra_cambio.letra'].cambiar_estado_all(records, 'ACE')
<|reserved_special_token_1|>
from odoo import models, fields, api
class Aceptar_letras_wizard(models.TransientModel):
_name = 'aceptar_letras_wizard'
_description = "Aceptar letras"
def _get_letras(self):
if self.env.context and self.env.context.get('active_ids'):
return self.env.context.get('active_ids')
return []
letra_ids = fields.Many2many('letra_cambio.letra', default=_get_letras, string='Letras')
@api.multi
def aceptar_letras(self):
active_ids = self.env.context.get('active_ids', []) or []
records = self.env['letra_cambio.letra'].browse(active_ids)
self.env['letra_cambio.letra'].cambiar_estado_all(records, "ACE")
|
flexible
|
{
"blob_id": "4ad3390f8f2c92f35acde507be7a7b713af997f2",
"index": 5092,
"step-1": "<mask token>\n\n\nclass Aceptar_letras_wizard(models.TransientModel):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @api.multi\n def aceptar_letras(self):\n active_ids = self.env.context.get('active_ids', []) or []\n records = self.env['letra_cambio.letra'].browse(active_ids)\n self.env['letra_cambio.letra'].cambiar_estado_all(records, 'ACE')\n",
"step-2": "<mask token>\n\n\nclass Aceptar_letras_wizard(models.TransientModel):\n <mask token>\n <mask token>\n\n def _get_letras(self):\n if self.env.context and self.env.context.get('active_ids'):\n return self.env.context.get('active_ids')\n return []\n <mask token>\n\n @api.multi\n def aceptar_letras(self):\n active_ids = self.env.context.get('active_ids', []) or []\n records = self.env['letra_cambio.letra'].browse(active_ids)\n self.env['letra_cambio.letra'].cambiar_estado_all(records, 'ACE')\n",
"step-3": "<mask token>\n\n\nclass Aceptar_letras_wizard(models.TransientModel):\n _name = 'aceptar_letras_wizard'\n _description = 'Aceptar letras'\n\n def _get_letras(self):\n if self.env.context and self.env.context.get('active_ids'):\n return self.env.context.get('active_ids')\n return []\n letra_ids = fields.Many2many('letra_cambio.letra', default=_get_letras,\n string='Letras')\n\n @api.multi\n def aceptar_letras(self):\n active_ids = self.env.context.get('active_ids', []) or []\n records = self.env['letra_cambio.letra'].browse(active_ids)\n self.env['letra_cambio.letra'].cambiar_estado_all(records, 'ACE')\n",
"step-4": "from odoo import models, fields, api\n\n\nclass Aceptar_letras_wizard(models.TransientModel):\n _name = 'aceptar_letras_wizard'\n _description = 'Aceptar letras'\n\n def _get_letras(self):\n if self.env.context and self.env.context.get('active_ids'):\n return self.env.context.get('active_ids')\n return []\n letra_ids = fields.Many2many('letra_cambio.letra', default=_get_letras,\n string='Letras')\n\n @api.multi\n def aceptar_letras(self):\n active_ids = self.env.context.get('active_ids', []) or []\n records = self.env['letra_cambio.letra'].browse(active_ids)\n self.env['letra_cambio.letra'].cambiar_estado_all(records, 'ACE')\n",
"step-5": "from odoo import models, fields, api\n\n\nclass Aceptar_letras_wizard(models.TransientModel):\n _name = 'aceptar_letras_wizard'\n _description = \"Aceptar letras\"\n\n def _get_letras(self):\n if self.env.context and self.env.context.get('active_ids'):\n return self.env.context.get('active_ids')\n return []\n\n\n letra_ids = fields.Many2many('letra_cambio.letra', default=_get_letras, string='Letras')\n\n @api.multi\n def aceptar_letras(self):\n active_ids = self.env.context.get('active_ids', []) or []\n records = self.env['letra_cambio.letra'].browse(active_ids)\n self.env['letra_cambio.letra'].cambiar_estado_all(records, \"ACE\")\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MainWindow(Ui_MainWindow, QtCore.QObject):
def __init__(self, qmain_window):
super().__init__()
self.setupUi(qmain_window)
self._proxy_model = CustomSortFilterProxyModel(self)
self._model = TreeModel(model_filename)
self._proxy_model.setSourceModel(self._model)
self.treeView.setModel(self._proxy_model)
self.treeView.header().setSectionResizeMode(QtWidgets.QHeaderView.
ResizeToContents)
self.filterPatternEdit.editingFinished.connect(lambda : self.
_proxy_model.update_filter_pattern(self.filterPatternEdit.text()))
self.filterSyntaxComboBox.currentTextChanged.connect(self.
_proxy_model.update_filter_syntax)
self.filterColumnComboBox.currentTextChanged.connect(self.
_proxy_model.update_filter_column)
self.caseSensitiveFilterCB.stateChanged.connect(lambda state: self.
_proxy_model.update_case_sensitive_filter(state))
self.caseSensitiveSortingCB.stateChanged.connect(lambda state: self
._proxy_model.update_case_sensitive_sort(state))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
model_filename = 'widgets/default.txt'
class MainWindow(Ui_MainWindow, QtCore.QObject):
def __init__(self, qmain_window):
super().__init__()
self.setupUi(qmain_window)
self._proxy_model = CustomSortFilterProxyModel(self)
self._model = TreeModel(model_filename)
self._proxy_model.setSourceModel(self._model)
self.treeView.setModel(self._proxy_model)
self.treeView.header().setSectionResizeMode(QtWidgets.QHeaderView.
ResizeToContents)
self.filterPatternEdit.editingFinished.connect(lambda : self.
_proxy_model.update_filter_pattern(self.filterPatternEdit.text()))
self.filterSyntaxComboBox.currentTextChanged.connect(self.
_proxy_model.update_filter_syntax)
self.filterColumnComboBox.currentTextChanged.connect(self.
_proxy_model.update_filter_column)
self.caseSensitiveFilterCB.stateChanged.connect(lambda state: self.
_proxy_model.update_case_sensitive_filter(state))
self.caseSensitiveSortingCB.stateChanged.connect(lambda state: self
._proxy_model.update_case_sensitive_sort(state))
<|reserved_special_token_1|>
from PyQt5 import QtCore, QtWidgets
from .main_window_base import Ui_MainWindow
from .custom_sort_filter_proxy_model import CustomSortFilterProxyModel
from .tree_model import TreeModel
model_filename = 'widgets/default.txt'
class MainWindow(Ui_MainWindow, QtCore.QObject):
def __init__(self, qmain_window):
super().__init__()
self.setupUi(qmain_window)
self._proxy_model = CustomSortFilterProxyModel(self)
self._model = TreeModel(model_filename)
self._proxy_model.setSourceModel(self._model)
self.treeView.setModel(self._proxy_model)
self.treeView.header().setSectionResizeMode(QtWidgets.QHeaderView.
ResizeToContents)
self.filterPatternEdit.editingFinished.connect(lambda : self.
_proxy_model.update_filter_pattern(self.filterPatternEdit.text()))
self.filterSyntaxComboBox.currentTextChanged.connect(self.
_proxy_model.update_filter_syntax)
self.filterColumnComboBox.currentTextChanged.connect(self.
_proxy_model.update_filter_column)
self.caseSensitiveFilterCB.stateChanged.connect(lambda state: self.
_proxy_model.update_case_sensitive_filter(state))
self.caseSensitiveSortingCB.stateChanged.connect(lambda state: self
._proxy_model.update_case_sensitive_sort(state))
<|reserved_special_token_1|>
from PyQt5 import QtCore, QtWidgets
from .main_window_base import Ui_MainWindow
from .custom_sort_filter_proxy_model import CustomSortFilterProxyModel
from .tree_model import TreeModel
model_filename = "widgets/default.txt"
class MainWindow(Ui_MainWindow, QtCore.QObject):
def __init__(self, qmain_window):
super().__init__()
self.setupUi(qmain_window)
self._proxy_model = CustomSortFilterProxyModel(self)
self._model = TreeModel(model_filename)
self._proxy_model.setSourceModel(self._model)
self.treeView.setModel(self._proxy_model)
self.treeView.header().setSectionResizeMode(QtWidgets.QHeaderView.ResizeToContents)
# Attach slot/signals
self.filterPatternEdit.editingFinished.connect(
lambda: self._proxy_model.update_filter_pattern(self.filterPatternEdit.text()))
self.filterSyntaxComboBox.currentTextChanged.connect(self._proxy_model.update_filter_syntax)
self.filterColumnComboBox.currentTextChanged.connect(self._proxy_model.update_filter_column)
self.caseSensitiveFilterCB.stateChanged.connect(
lambda state: self._proxy_model.update_case_sensitive_filter(state))
self.caseSensitiveSortingCB.stateChanged.connect(
lambda state: self._proxy_model.update_case_sensitive_sort(state))
|
flexible
|
{
"blob_id": "7a918518d8c9ff1184a634d1a5c799e735dfbc8a",
"index": 1707,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass MainWindow(Ui_MainWindow, QtCore.QObject):\n\n def __init__(self, qmain_window):\n super().__init__()\n self.setupUi(qmain_window)\n self._proxy_model = CustomSortFilterProxyModel(self)\n self._model = TreeModel(model_filename)\n self._proxy_model.setSourceModel(self._model)\n self.treeView.setModel(self._proxy_model)\n self.treeView.header().setSectionResizeMode(QtWidgets.QHeaderView.\n ResizeToContents)\n self.filterPatternEdit.editingFinished.connect(lambda : self.\n _proxy_model.update_filter_pattern(self.filterPatternEdit.text()))\n self.filterSyntaxComboBox.currentTextChanged.connect(self.\n _proxy_model.update_filter_syntax)\n self.filterColumnComboBox.currentTextChanged.connect(self.\n _proxy_model.update_filter_column)\n self.caseSensitiveFilterCB.stateChanged.connect(lambda state: self.\n _proxy_model.update_case_sensitive_filter(state))\n self.caseSensitiveSortingCB.stateChanged.connect(lambda state: self\n ._proxy_model.update_case_sensitive_sort(state))\n",
"step-3": "<mask token>\nmodel_filename = 'widgets/default.txt'\n\n\nclass MainWindow(Ui_MainWindow, QtCore.QObject):\n\n def __init__(self, qmain_window):\n super().__init__()\n self.setupUi(qmain_window)\n self._proxy_model = CustomSortFilterProxyModel(self)\n self._model = TreeModel(model_filename)\n self._proxy_model.setSourceModel(self._model)\n self.treeView.setModel(self._proxy_model)\n self.treeView.header().setSectionResizeMode(QtWidgets.QHeaderView.\n ResizeToContents)\n self.filterPatternEdit.editingFinished.connect(lambda : self.\n _proxy_model.update_filter_pattern(self.filterPatternEdit.text()))\n self.filterSyntaxComboBox.currentTextChanged.connect(self.\n _proxy_model.update_filter_syntax)\n self.filterColumnComboBox.currentTextChanged.connect(self.\n _proxy_model.update_filter_column)\n self.caseSensitiveFilterCB.stateChanged.connect(lambda state: self.\n _proxy_model.update_case_sensitive_filter(state))\n self.caseSensitiveSortingCB.stateChanged.connect(lambda state: self\n ._proxy_model.update_case_sensitive_sort(state))\n",
"step-4": "from PyQt5 import QtCore, QtWidgets\nfrom .main_window_base import Ui_MainWindow\nfrom .custom_sort_filter_proxy_model import CustomSortFilterProxyModel\nfrom .tree_model import TreeModel\nmodel_filename = 'widgets/default.txt'\n\n\nclass MainWindow(Ui_MainWindow, QtCore.QObject):\n\n def __init__(self, qmain_window):\n super().__init__()\n self.setupUi(qmain_window)\n self._proxy_model = CustomSortFilterProxyModel(self)\n self._model = TreeModel(model_filename)\n self._proxy_model.setSourceModel(self._model)\n self.treeView.setModel(self._proxy_model)\n self.treeView.header().setSectionResizeMode(QtWidgets.QHeaderView.\n ResizeToContents)\n self.filterPatternEdit.editingFinished.connect(lambda : self.\n _proxy_model.update_filter_pattern(self.filterPatternEdit.text()))\n self.filterSyntaxComboBox.currentTextChanged.connect(self.\n _proxy_model.update_filter_syntax)\n self.filterColumnComboBox.currentTextChanged.connect(self.\n _proxy_model.update_filter_column)\n self.caseSensitiveFilterCB.stateChanged.connect(lambda state: self.\n _proxy_model.update_case_sensitive_filter(state))\n self.caseSensitiveSortingCB.stateChanged.connect(lambda state: self\n ._proxy_model.update_case_sensitive_sort(state))\n",
"step-5": "\nfrom PyQt5 import QtCore, QtWidgets\n\nfrom .main_window_base import Ui_MainWindow\nfrom .custom_sort_filter_proxy_model import CustomSortFilterProxyModel\nfrom .tree_model import TreeModel\n\nmodel_filename = \"widgets/default.txt\"\n\n\nclass MainWindow(Ui_MainWindow, QtCore.QObject):\n\n def __init__(self, qmain_window):\n super().__init__()\n self.setupUi(qmain_window)\n\n self._proxy_model = CustomSortFilterProxyModel(self)\n self._model = TreeModel(model_filename)\n self._proxy_model.setSourceModel(self._model)\n self.treeView.setModel(self._proxy_model)\n\n self.treeView.header().setSectionResizeMode(QtWidgets.QHeaderView.ResizeToContents)\n\n # Attach slot/signals\n self.filterPatternEdit.editingFinished.connect(\n lambda: self._proxy_model.update_filter_pattern(self.filterPatternEdit.text()))\n self.filterSyntaxComboBox.currentTextChanged.connect(self._proxy_model.update_filter_syntax)\n self.filterColumnComboBox.currentTextChanged.connect(self._proxy_model.update_filter_column)\n self.caseSensitiveFilterCB.stateChanged.connect(\n lambda state: self._proxy_model.update_case_sensitive_filter(state))\n self.caseSensitiveSortingCB.stateChanged.connect(\n lambda state: self._proxy_model.update_case_sensitive_sort(state))\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(message)
<|reserved_special_token_1|>
message = 'Hello Python World '
print(message)
<|reserved_special_token_1|>
# message 为定义的变量
message = 'Hello Python World '
print(message)
|
flexible
|
{
"blob_id": "ee5e970f32b1d601f9dc3ab37a5028ce7ff8a32e",
"index": 1368,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(message)\n",
"step-3": "message = 'Hello Python World '\nprint(message)\n",
"step-4": "# message 为定义的变量\r\nmessage = 'Hello Python World '\r\nprint(message)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from flask import Flask
from flask_ask import Ask, statement, question, session
# import json, requests
import random
app = Flask(__name__)
ask = Ask(app, "/")
def get_cat_fact():
myFacts = [
"Cats should not be fed tuna exclusively, as it lacks taurine, an essential nutrient required for good feline health. Make sure you have the proper Pet supplies to keep your cat happy and healthy.",
"The strongest climber among the big cats, a leopard can carry prey twice its weight up a tree.",
"A cat’s hearing is better than a dog’s. A cat can hear high-frequency sounds up to two octaves higher than a human.",
"Tylenol and chocolate are both poisionous to cats.",
"Cats have 30 teeth (12 incisors, 10 premolars, 4 canines, and 4 molars), while dogs have 42. Kittens have baby teeth, which are replaced by permanent teeth around the age of 7 months.",
"It has been scientifically proven that owning cats is good for our health and can decrease the occurrence of high blood pressure and other illnesses.",
"A cat can’t climb head first down a tree because every claw on a cat’s paw points the same way. To get down from a tree, a cat must back down.",
"Cats are subject to gum disease and to dental caries. They should have their teeth cleaned by the vet or the cat dentist once a year.",
"A domestic cat can run at speeds of 30 mph.",
"Cat families usually play best in even numbers. Cats and kittens should be aquired in pairs whenever possible.",
"A cat’s back is extremely flexible because it has up to 53 loosely fitting vertebrae. Humans only have 34.",
"The claws on the cat’s back paws aren’t as sharp as the claws on the front paws because the claws in the back don’t retract and, consequently, become worn.",
"Cat paws act as tempetature regulators, shock absorbers, hunting and grooming tools, sensors, and more",
"Cats see six times better in the dark and at night than humans.",
"The cat's tail is used to maintain balance.",
"Cats have 300 million neurons; dogs have about 160 million",
"Both humans and cats have identical regions in the brain responsible for emotion.",
"The lightest cat on record is a blue point Himalayan called Tinker Toy, who weighed 1 pound, 6 ounces (616 g). Tinker Toy was 2.75 inches (7 cm) tall and 7.5 inches (19 cm) long.",
"An adult lion's roar can be heard up to five miles (eight kilometers) away.",
"You check your cats pulse on the inside of the back thigh, where the leg joins to the body. Normal for cats: 110-170 beats per minute.",
"The largest cat breed is the Ragdoll. Male Ragdolls weigh between 12 and 20 lbs (5.4-9.0 k). Females weigh between 10 and 15 lbs (4.5-6.8 k).",
"A cat's normal temperature varies around 101 degrees Fahrenheit.",
"Unlike other cats, lions have a tuft of hair at the end of their tails.",
"Cats don’t have sweat glands over their bodies like humans do. Instead, they sweat only through their paws.",
"The average cat food meal is the equivalent to about five mice.",
"The first official cat show in the UK was organised at Crystal Palace in 1871.",
"In just seven years, a single pair of cats and their offspring could produce a staggering total of 420,000 kittens."
]
fact = myFacts[random.ranint(0,len(myFacts)-1)]
return fact
@ask.launch
def start_skill():
welcome_message = 'Hello there, would you like to hear a cat fact?'
return question(welcome_message)
@ask.intent("YesIntent")
def share_headlines():
fact = get_cat_fact()
cat_fact = 'Did you know, ' + fact
return statement(cat_fact)
@ask.intent("NoIntent")
def no_intent():
bye_text = 'Ok! Have a wonderful day!'
return statement(bye_text)
if __name__ == '__main__':
app.run(debug=True)
|
normal
|
{
"blob_id": "77971b088a7e076e3bf6d7aa320981a50e7756ce",
"index": 429,
"step-1": "<mask token>\n\n\ndef get_cat_fact():\n myFacts = [\n 'Cats should not be fed tuna exclusively, as it lacks taurine, an essential nutrient required for good feline health. Make sure you have the proper Pet supplies to keep your cat happy and healthy.'\n ,\n 'The strongest climber among the big cats, a leopard can carry prey twice its weight up a tree.'\n ,\n 'A cat’s hearing is better than a dog’s. A cat can hear high-frequency sounds up to two octaves higher than a human.'\n , 'Tylenol and chocolate are both poisionous to cats.',\n 'Cats have 30 teeth (12 incisors, 10 premolars, 4 canines, and 4 molars), while dogs have 42. Kittens have baby teeth, which are replaced by permanent teeth around the age of 7 months.'\n ,\n 'It has been scientifically proven that owning cats is good for our health and can decrease the occurrence of high blood pressure and other illnesses.'\n ,\n 'A cat can’t climb head first down a tree because every claw on a cat’s paw points the same way. To get down from a tree, a cat must back down.'\n ,\n 'Cats are subject to gum disease and to dental caries. They should have their teeth cleaned by the vet or the cat dentist once a year.'\n , 'A domestic cat can run at speeds of 30 mph.',\n 'Cat families usually play best in even numbers. Cats and kittens should be aquired in pairs whenever possible.'\n ,\n 'A cat’s back is extremely flexible because it has up to 53 loosely fitting vertebrae. Humans only have 34.'\n ,\n 'The claws on the cat’s back paws aren’t as sharp as the claws on the front paws because the claws in the back don’t retract and, consequently, become worn.'\n ,\n 'Cat paws act as tempetature regulators, shock absorbers, hunting and grooming tools, sensors, and more'\n , 'Cats see six times better in the dark and at night than humans.',\n \"The cat's tail is used to maintain balance.\",\n 'Cats have 300 million neurons; dogs have about 160 million',\n 'Both humans and cats have identical regions in the brain responsible for emotion.'\n ,\n 'The lightest cat on record is a blue point Himalayan called Tinker Toy, who weighed 1 pound, 6 ounces (616 g). Tinker Toy was 2.75 inches (7 cm) tall and 7.5 inches (19 cm) long.'\n ,\n \"An adult lion's roar can be heard up to five miles (eight kilometers) away.\"\n ,\n 'You check your cats pulse on the inside of the back thigh, where the leg joins to the body. Normal for cats: 110-170 beats per minute.'\n ,\n 'The largest cat breed is the Ragdoll. Male Ragdolls weigh between 12 and 20 lbs (5.4-9.0 k). Females weigh between 10 and 15 lbs (4.5-6.8 k).'\n ,\n \"A cat's normal temperature varies around 101 degrees Fahrenheit.\",\n 'Unlike other cats, lions have a tuft of hair at the end of their tails.'\n ,\n 'Cats don’t have sweat glands over their bodies like humans do. Instead, they sweat only through their paws.'\n , 'The average cat food meal is the equivalent to about five mice.',\n 'The first official cat show in the UK was organised at Crystal Palace in 1871.'\n ,\n 'In just seven years, a single pair of cats and their offspring could produce a staggering total of 420,000 kittens.'\n ]\n fact = myFacts[random.ranint(0, len(myFacts) - 1)]\n return fact\n\n\n@ask.launch\ndef start_skill():\n welcome_message = 'Hello there, would you like to hear a cat fact?'\n return question(welcome_message)\n\n\n<mask token>\n\n\n@ask.intent('NoIntent')\ndef no_intent():\n bye_text = 'Ok! Have a wonderful day!'\n return statement(bye_text)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_cat_fact():\n myFacts = [\n 'Cats should not be fed tuna exclusively, as it lacks taurine, an essential nutrient required for good feline health. Make sure you have the proper Pet supplies to keep your cat happy and healthy.'\n ,\n 'The strongest climber among the big cats, a leopard can carry prey twice its weight up a tree.'\n ,\n 'A cat’s hearing is better than a dog’s. A cat can hear high-frequency sounds up to two octaves higher than a human.'\n , 'Tylenol and chocolate are both poisionous to cats.',\n 'Cats have 30 teeth (12 incisors, 10 premolars, 4 canines, and 4 molars), while dogs have 42. Kittens have baby teeth, which are replaced by permanent teeth around the age of 7 months.'\n ,\n 'It has been scientifically proven that owning cats is good for our health and can decrease the occurrence of high blood pressure and other illnesses.'\n ,\n 'A cat can’t climb head first down a tree because every claw on a cat’s paw points the same way. To get down from a tree, a cat must back down.'\n ,\n 'Cats are subject to gum disease and to dental caries. They should have their teeth cleaned by the vet or the cat dentist once a year.'\n , 'A domestic cat can run at speeds of 30 mph.',\n 'Cat families usually play best in even numbers. Cats and kittens should be aquired in pairs whenever possible.'\n ,\n 'A cat’s back is extremely flexible because it has up to 53 loosely fitting vertebrae. Humans only have 34.'\n ,\n 'The claws on the cat’s back paws aren’t as sharp as the claws on the front paws because the claws in the back don’t retract and, consequently, become worn.'\n ,\n 'Cat paws act as tempetature regulators, shock absorbers, hunting and grooming tools, sensors, and more'\n , 'Cats see six times better in the dark and at night than humans.',\n \"The cat's tail is used to maintain balance.\",\n 'Cats have 300 million neurons; dogs have about 160 million',\n 'Both humans and cats have identical regions in the brain responsible for emotion.'\n ,\n 'The lightest cat on record is a blue point Himalayan called Tinker Toy, who weighed 1 pound, 6 ounces (616 g). Tinker Toy was 2.75 inches (7 cm) tall and 7.5 inches (19 cm) long.'\n ,\n \"An adult lion's roar can be heard up to five miles (eight kilometers) away.\"\n ,\n 'You check your cats pulse on the inside of the back thigh, where the leg joins to the body. Normal for cats: 110-170 beats per minute.'\n ,\n 'The largest cat breed is the Ragdoll. Male Ragdolls weigh between 12 and 20 lbs (5.4-9.0 k). Females weigh between 10 and 15 lbs (4.5-6.8 k).'\n ,\n \"A cat's normal temperature varies around 101 degrees Fahrenheit.\",\n 'Unlike other cats, lions have a tuft of hair at the end of their tails.'\n ,\n 'Cats don’t have sweat glands over their bodies like humans do. Instead, they sweat only through their paws.'\n , 'The average cat food meal is the equivalent to about five mice.',\n 'The first official cat show in the UK was organised at Crystal Palace in 1871.'\n ,\n 'In just seven years, a single pair of cats and their offspring could produce a staggering total of 420,000 kittens.'\n ]\n fact = myFacts[random.ranint(0, len(myFacts) - 1)]\n return fact\n\n\n@ask.launch\ndef start_skill():\n welcome_message = 'Hello there, would you like to hear a cat fact?'\n return question(welcome_message)\n\n\n@ask.intent('YesIntent')\ndef share_headlines():\n fact = get_cat_fact()\n cat_fact = 'Did you know, ' + fact\n return statement(cat_fact)\n\n\n@ask.intent('NoIntent')\ndef no_intent():\n bye_text = 'Ok! Have a wonderful day!'\n return statement(bye_text)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_cat_fact():\n myFacts = [\n 'Cats should not be fed tuna exclusively, as it lacks taurine, an essential nutrient required for good feline health. Make sure you have the proper Pet supplies to keep your cat happy and healthy.'\n ,\n 'The strongest climber among the big cats, a leopard can carry prey twice its weight up a tree.'\n ,\n 'A cat’s hearing is better than a dog’s. A cat can hear high-frequency sounds up to two octaves higher than a human.'\n , 'Tylenol and chocolate are both poisionous to cats.',\n 'Cats have 30 teeth (12 incisors, 10 premolars, 4 canines, and 4 molars), while dogs have 42. Kittens have baby teeth, which are replaced by permanent teeth around the age of 7 months.'\n ,\n 'It has been scientifically proven that owning cats is good for our health and can decrease the occurrence of high blood pressure and other illnesses.'\n ,\n 'A cat can’t climb head first down a tree because every claw on a cat’s paw points the same way. To get down from a tree, a cat must back down.'\n ,\n 'Cats are subject to gum disease and to dental caries. They should have their teeth cleaned by the vet or the cat dentist once a year.'\n , 'A domestic cat can run at speeds of 30 mph.',\n 'Cat families usually play best in even numbers. Cats and kittens should be aquired in pairs whenever possible.'\n ,\n 'A cat’s back is extremely flexible because it has up to 53 loosely fitting vertebrae. Humans only have 34.'\n ,\n 'The claws on the cat’s back paws aren’t as sharp as the claws on the front paws because the claws in the back don’t retract and, consequently, become worn.'\n ,\n 'Cat paws act as tempetature regulators, shock absorbers, hunting and grooming tools, sensors, and more'\n , 'Cats see six times better in the dark and at night than humans.',\n \"The cat's tail is used to maintain balance.\",\n 'Cats have 300 million neurons; dogs have about 160 million',\n 'Both humans and cats have identical regions in the brain responsible for emotion.'\n ,\n 'The lightest cat on record is a blue point Himalayan called Tinker Toy, who weighed 1 pound, 6 ounces (616 g). Tinker Toy was 2.75 inches (7 cm) tall and 7.5 inches (19 cm) long.'\n ,\n \"An adult lion's roar can be heard up to five miles (eight kilometers) away.\"\n ,\n 'You check your cats pulse on the inside of the back thigh, where the leg joins to the body. Normal for cats: 110-170 beats per minute.'\n ,\n 'The largest cat breed is the Ragdoll. Male Ragdolls weigh between 12 and 20 lbs (5.4-9.0 k). Females weigh between 10 and 15 lbs (4.5-6.8 k).'\n ,\n \"A cat's normal temperature varies around 101 degrees Fahrenheit.\",\n 'Unlike other cats, lions have a tuft of hair at the end of their tails.'\n ,\n 'Cats don’t have sweat glands over their bodies like humans do. Instead, they sweat only through their paws.'\n , 'The average cat food meal is the equivalent to about five mice.',\n 'The first official cat show in the UK was organised at Crystal Palace in 1871.'\n ,\n 'In just seven years, a single pair of cats and their offspring could produce a staggering total of 420,000 kittens.'\n ]\n fact = myFacts[random.ranint(0, len(myFacts) - 1)]\n return fact\n\n\n@ask.launch\ndef start_skill():\n welcome_message = 'Hello there, would you like to hear a cat fact?'\n return question(welcome_message)\n\n\n@ask.intent('YesIntent')\ndef share_headlines():\n fact = get_cat_fact()\n cat_fact = 'Did you know, ' + fact\n return statement(cat_fact)\n\n\n@ask.intent('NoIntent')\ndef no_intent():\n bye_text = 'Ok! Have a wonderful day!'\n return statement(bye_text)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-4": "from flask import Flask\nfrom flask_ask import Ask, statement, question, session\nimport random\napp = Flask(__name__)\nask = Ask(app, '/')\n\n\ndef get_cat_fact():\n myFacts = [\n 'Cats should not be fed tuna exclusively, as it lacks taurine, an essential nutrient required for good feline health. Make sure you have the proper Pet supplies to keep your cat happy and healthy.'\n ,\n 'The strongest climber among the big cats, a leopard can carry prey twice its weight up a tree.'\n ,\n 'A cat’s hearing is better than a dog’s. A cat can hear high-frequency sounds up to two octaves higher than a human.'\n , 'Tylenol and chocolate are both poisionous to cats.',\n 'Cats have 30 teeth (12 incisors, 10 premolars, 4 canines, and 4 molars), while dogs have 42. Kittens have baby teeth, which are replaced by permanent teeth around the age of 7 months.'\n ,\n 'It has been scientifically proven that owning cats is good for our health and can decrease the occurrence of high blood pressure and other illnesses.'\n ,\n 'A cat can’t climb head first down a tree because every claw on a cat’s paw points the same way. To get down from a tree, a cat must back down.'\n ,\n 'Cats are subject to gum disease and to dental caries. They should have their teeth cleaned by the vet or the cat dentist once a year.'\n , 'A domestic cat can run at speeds of 30 mph.',\n 'Cat families usually play best in even numbers. Cats and kittens should be aquired in pairs whenever possible.'\n ,\n 'A cat’s back is extremely flexible because it has up to 53 loosely fitting vertebrae. Humans only have 34.'\n ,\n 'The claws on the cat’s back paws aren’t as sharp as the claws on the front paws because the claws in the back don’t retract and, consequently, become worn.'\n ,\n 'Cat paws act as tempetature regulators, shock absorbers, hunting and grooming tools, sensors, and more'\n , 'Cats see six times better in the dark and at night than humans.',\n \"The cat's tail is used to maintain balance.\",\n 'Cats have 300 million neurons; dogs have about 160 million',\n 'Both humans and cats have identical regions in the brain responsible for emotion.'\n ,\n 'The lightest cat on record is a blue point Himalayan called Tinker Toy, who weighed 1 pound, 6 ounces (616 g). Tinker Toy was 2.75 inches (7 cm) tall and 7.5 inches (19 cm) long.'\n ,\n \"An adult lion's roar can be heard up to five miles (eight kilometers) away.\"\n ,\n 'You check your cats pulse on the inside of the back thigh, where the leg joins to the body. Normal for cats: 110-170 beats per minute.'\n ,\n 'The largest cat breed is the Ragdoll. Male Ragdolls weigh between 12 and 20 lbs (5.4-9.0 k). Females weigh between 10 and 15 lbs (4.5-6.8 k).'\n ,\n \"A cat's normal temperature varies around 101 degrees Fahrenheit.\",\n 'Unlike other cats, lions have a tuft of hair at the end of their tails.'\n ,\n 'Cats don’t have sweat glands over their bodies like humans do. Instead, they sweat only through their paws.'\n , 'The average cat food meal is the equivalent to about five mice.',\n 'The first official cat show in the UK was organised at Crystal Palace in 1871.'\n ,\n 'In just seven years, a single pair of cats and their offspring could produce a staggering total of 420,000 kittens.'\n ]\n fact = myFacts[random.ranint(0, len(myFacts) - 1)]\n return fact\n\n\n@ask.launch\ndef start_skill():\n welcome_message = 'Hello there, would you like to hear a cat fact?'\n return question(welcome_message)\n\n\n@ask.intent('YesIntent')\ndef share_headlines():\n fact = get_cat_fact()\n cat_fact = 'Did you know, ' + fact\n return statement(cat_fact)\n\n\n@ask.intent('NoIntent')\ndef no_intent():\n bye_text = 'Ok! Have a wonderful day!'\n return statement(bye_text)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-5": "from flask import Flask\nfrom flask_ask import Ask, statement, question, session\n# import json, requests\nimport random\n\n\napp = Flask(__name__)\nask = Ask(app, \"/\")\n\ndef get_cat_fact():\n myFacts = [\n \"Cats should not be fed tuna exclusively, as it lacks taurine, an essential nutrient required for good feline health. Make sure you have the proper Pet supplies to keep your cat happy and healthy.\",\n \"The strongest climber among the big cats, a leopard can carry prey twice its weight up a tree.\",\n \"A cat’s hearing is better than a dog’s. A cat can hear high-frequency sounds up to two octaves higher than a human.\",\n \"Tylenol and chocolate are both poisionous to cats.\",\n \"Cats have 30 teeth (12 incisors, 10 premolars, 4 canines, and 4 molars), while dogs have 42. Kittens have baby teeth, which are replaced by permanent teeth around the age of 7 months.\",\n \"It has been scientifically proven that owning cats is good for our health and can decrease the occurrence of high blood pressure and other illnesses.\",\n \"A cat can’t climb head first down a tree because every claw on a cat’s paw points the same way. To get down from a tree, a cat must back down.\",\n \"Cats are subject to gum disease and to dental caries. They should have their teeth cleaned by the vet or the cat dentist once a year.\",\n \"A domestic cat can run at speeds of 30 mph.\",\n \"Cat families usually play best in even numbers. Cats and kittens should be aquired in pairs whenever possible.\",\n \"A cat’s back is extremely flexible because it has up to 53 loosely fitting vertebrae. Humans only have 34.\",\n \"The claws on the cat’s back paws aren’t as sharp as the claws on the front paws because the claws in the back don’t retract and, consequently, become worn.\",\n \"Cat paws act as tempetature regulators, shock absorbers, hunting and grooming tools, sensors, and more\",\n \"Cats see six times better in the dark and at night than humans.\",\n \"The cat's tail is used to maintain balance.\",\n \"Cats have 300 million neurons; dogs have about 160 million\",\n \"Both humans and cats have identical regions in the brain responsible for emotion.\",\n \"The lightest cat on record is a blue point Himalayan called Tinker Toy, who weighed 1 pound, 6 ounces (616 g). Tinker Toy was 2.75 inches (7 cm) tall and 7.5 inches (19 cm) long.\",\n \"An adult lion's roar can be heard up to five miles (eight kilometers) away.\",\n \"You check your cats pulse on the inside of the back thigh, where the leg joins to the body. Normal for cats: 110-170 beats per minute.\",\n \"The largest cat breed is the Ragdoll. Male Ragdolls weigh between 12 and 20 lbs (5.4-9.0 k). Females weigh between 10 and 15 lbs (4.5-6.8 k).\",\n \"A cat's normal temperature varies around 101 degrees Fahrenheit.\",\n \"Unlike other cats, lions have a tuft of hair at the end of their tails.\",\n \"Cats don’t have sweat glands over their bodies like humans do. Instead, they sweat only through their paws.\",\n \"The average cat food meal is the equivalent to about five mice.\",\n \"The first official cat show in the UK was organised at Crystal Palace in 1871.\",\n \"In just seven years, a single pair of cats and their offspring could produce a staggering total of 420,000 kittens.\"\n ]\n fact = myFacts[random.ranint(0,len(myFacts)-1)]\n return fact\n\n\n@ask.launch\ndef start_skill():\n welcome_message = 'Hello there, would you like to hear a cat fact?'\n return question(welcome_message)\n\n@ask.intent(\"YesIntent\")\ndef share_headlines():\n fact = get_cat_fact()\n cat_fact = 'Did you know, ' + fact\n return statement(cat_fact)\n\n@ask.intent(\"NoIntent\")\ndef no_intent():\n bye_text = 'Ok! Have a wonderful day!'\n return statement(bye_text)\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-ids": [
3,
4,
5,
7,
8
]
}
|
[
3,
4,
5,
7,
8
] |
import numpy as np
from input_parameters.program_constants import ITERATIONS_NUM, TIMESTEPS_NUMB
def init_zero_arrays():
radius_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))
dot_radius_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))
dotdot_radius_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))
delta_radius_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))
mass_out_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))
total_mass_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))
dot_mass_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))
dot_rt_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))
time_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))
dot_time_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))
luminosity_AGN_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))
pressure_contact_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB)) # ;array to hold information about pressures at contact discontinuity
pressure_outer_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB)) # ;array to hold information about pressures at outer shock
bulge_mass_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))
return radius_arr, dot_radius_arr, dotdot_radius_arr, delta_radius_arr, mass_out_arr, total_mass_arr, dot_mass_arr, \
dot_rt_arr, time_arr, dot_time_arr, luminosity_AGN_arr, pressure_contact_arr, pressure_outer_arr, bulge_mass_arr
|
normal
|
{
"blob_id": "e652196f9c74be6f05c6148de152996e449670ea",
"index": 3059,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef init_zero_arrays():\n radius_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n dot_radius_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n dotdot_radius_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n delta_radius_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n mass_out_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n total_mass_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n dot_mass_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n dot_rt_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n time_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n dot_time_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n luminosity_AGN_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n pressure_contact_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n pressure_outer_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n bulge_mass_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n return (radius_arr, dot_radius_arr, dotdot_radius_arr, delta_radius_arr,\n mass_out_arr, total_mass_arr, dot_mass_arr, dot_rt_arr, time_arr,\n dot_time_arr, luminosity_AGN_arr, pressure_contact_arr,\n pressure_outer_arr, bulge_mass_arr)\n",
"step-3": "import numpy as np\nfrom input_parameters.program_constants import ITERATIONS_NUM, TIMESTEPS_NUMB\n\n\ndef init_zero_arrays():\n radius_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n dot_radius_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n dotdot_radius_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n delta_radius_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n mass_out_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n total_mass_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n dot_mass_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n dot_rt_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n time_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n dot_time_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n luminosity_AGN_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n pressure_contact_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n pressure_outer_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n bulge_mass_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n return (radius_arr, dot_radius_arr, dotdot_radius_arr, delta_radius_arr,\n mass_out_arr, total_mass_arr, dot_mass_arr, dot_rt_arr, time_arr,\n dot_time_arr, luminosity_AGN_arr, pressure_contact_arr,\n pressure_outer_arr, bulge_mass_arr)\n",
"step-4": "import numpy as np\n\nfrom input_parameters.program_constants import ITERATIONS_NUM, TIMESTEPS_NUMB\n\n\ndef init_zero_arrays():\n radius_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n dot_radius_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n dotdot_radius_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n delta_radius_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n mass_out_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n total_mass_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n dot_mass_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n dot_rt_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n time_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n dot_time_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n luminosity_AGN_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n pressure_contact_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB)) # ;array to hold information about pressures at contact discontinuity\n pressure_outer_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB)) # ;array to hold information about pressures at outer shock\n bulge_mass_arr = np.zeros((ITERATIONS_NUM, TIMESTEPS_NUMB))\n\n return radius_arr, dot_radius_arr, dotdot_radius_arr, delta_radius_arr, mass_out_arr, total_mass_arr, dot_mass_arr, \\\n dot_rt_arr, time_arr, dot_time_arr, luminosity_AGN_arr, pressure_contact_arr, pressure_outer_arr, bulge_mass_arr\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
'''
held-karp.py
Implementation of the Bellman-Held-Karp Algorithm to exactly solve TSPs,
requiring no external dependencies.
Includes a purely recursive implementation, as well as both top-down and
bottom-up dynamic programming approaches.
'''
import sys
def held_karp_recursive(distance_matrix):
'''
Solution to TSP using the Bellman-Held-Karp Algorithm
Given the adjacency matrix to a corresponding tsp problem, find the
minimum cost Hamiltonian cycle through the graph, as well as the
corresponding path
'''
d = distance_matrix
n = len(d)
def f(i, visited, path_so_far):
'''
Let f(i, visited, path_so_far) be the path of minimum distance from
city i to city 0, that passes through all remaining unvisited cities in
`visited`, where visited is a bitmask such that the bit in the jth
position being 1 represents city j having been visited, and bit j
being 0 represents city j having not been visited, and `path_so_far` is
the current path of minimum distance from city 0 up to city i.
Then the solution we want is f(0, 0, []), and the following recursive
relation holds:
f(i, visited) = min_{j in unvisited} ( d(i,j) + f(j, visited | (1<<j)) )
NOTE: Must be careful not to mutate
'''
# Base case: check if all cities have been visited
if visited == (1 << n) - 1:
# we have visited all cities, return to 0
return d[i][0], path_so_far + [0,]
min_dist = sys.maxint
# visit all unvisited cities
for j in xrange(n):
if not (1 << j) & visited:
dist_from_j, path_with_j = \
f(j, visited | (1 << j), path_so_far + [j,])
# Distance with j
dist_with_j = d[i][j] + dist_from_j
if dist_with_j < min_dist:
min_dist = dist_with_j
min_path = path_with_j
return min_dist, min_path
return f(0, 0, [])
def held_karp_topdown(distance_matrix):
'''
Above algorithm, but making use of memoization to avoid recomputing
overlapping subproblems
'''
d = distance_matrix
n = len(d)
'''
We need a dp table that will store the minimum distances from city i
to city 0 that passes through all unvisitied cities in the bit mask.
There are n cities, and 2^n possible binary strings of length n, so our
table will have dimensions n x 2^n
With this approach, we use another table called 'child' that keeps track
of the child city of i for each combination of (i, visited), and we can
use this table to obtain the actual Hamiltonian cycle of minimum distance.
'''
dp = [[None for i in xrange(2**n)] for j in xrange(n)]
child = [[None for i in xrange(2**n)] for j in xrange(n)]
def f(i, visited):
'''
f is defined as in the purely recursive implementation above.
The only difference here is that we check if the value we are
looking for is already in the defined dp table, and we do not
keep track of the path as we go along, as looking up a solution
for any given value would require having stored the path for
that solution as well, which would be expensive.
As such, we use the `child` table to keep track of where we
came from.
'''
# Check the table
if dp[i][visited]:
return dp[i][visited]
# Base case: check if all cities have been visited
if visited == (1 << n) - 1:
# we have visited all cities, return to 0
dp[i][visited] = d[i][0]
child[i][visited] = 0
return d[i][0]
min_dist = sys.maxint
chosen_j = None
# visit all unvisited cities
for j in xrange(n):
if not (1 << j) & visited:
dist_with_j = d[i][j] + f(j, (1 << j) | visited)
if dist_with_j < min_dist:
min_dist = dist_with_j
chosen_j = j
dp[i][visited] = min_dist
child[i][visited] = chosen_j
return min_dist
# The value we are interested in
ans = f(0,1)
# Can optain the optimal path using the parent matrix
path = [0]
i, visited = 0, 1
next_ = child[i][visited]
while next_ is not None:
path.append(next_)
visited |= (1 << next_)
next_ = child[next_][visited]
return ans, path
def held_karp_bottomup(distance_matrix):
'''
In the bottom up implementation, we compute all possible solutions for the
values `i` and `visited` as in the implementations above, and then
simply look up the value for f(0,0).
With this approach, we use the dp table, the original `distance_matrix`
and knowledge of the optimal cost to work backwards in determing what
the optimal path was.
'''
d = distance_matrix
n = len(d)
dp = [[None for i in xrange(2**n)] for j in xrange(n)]
# Base case:
# Distance from any city i back to 0 after having visited all cities
for i in xrange(n):
dp[i][(1<<n)-1] = d[i][0]
# Fill in all values of the dp table, excluding the values from the
# base case we've already inserted
# Note we started with having visited all cities except for 0
# and work backwards from there
for visited in reversed(xrange((1<<n)-1)):
for i in xrange(n):
min_dist = sys.maxint
for j in xrange(n):
if not (1 << j) & visited:
dist_j = d[i][j] + dp[j][visited | (1 << j)]
if dist_j < min_dist:
min_dist = dist_j
dp[i][visited] = min_dist
ans = dp[0][1]
# We can also optain the optimal path working backwards using
# the table and the knowledge of the cost of the optimal path
path = [0]
i, visited = 0, 1
cost_from_i = dp[i][visited]
while visited != (1 << n)-1:
for j in xrange(n):
if not visited & (1 << j):
cost_from_j = dp[j][visited | (1 << j)]
# require a tolerance for real valued distances
if abs((cost_from_i - cost_from_j) - d[i][j]) < 0.001:
# j was the city selected in the opt solution
path.append(j)
i, visited = j, visited | (1 << j)
cost_from_i = cost_from_j
break
# We have visited all cities, so return to 0
path.append(0)
return ans, path
class Vertex:
''' Simple implementation of a point in Euclidean space '''
def __init__(self, x, y):
self.x = float(x)
self.y = float(y)
def distance(v1, v2):
''' Euclidean distance between two `Vertex` instances '''
return ((v1.x - v2.x)**2 + (v1.y - v2.y)**2)**0.5
def adjacency_matrix(graph):
'''
Construct the corresponding adjacency matrix from a list of verticies in a
graph, assumed to be a complete graph.
'''
m = [[None for v in graph] for v in graph]
for i in xrange(len(m)):
for j in xrange(len(m[i])):
m[i][j] = distance(graph[i], graph[j])
return m
def main():
## Test cases
# g1: (16.0, [0, 2, 1, 3, 0])
g1 = [Vertex(0, 0), Vertex(4, 4), Vertex(4, 0), Vertex(0, 4)]
m1 = adjacency_matrix(g1)
for solver in held_karp_recursive, held_karp_topdown, held_karp_bottomup:
cost, path = solver(m1)
assert cost == 16.0
assert path == [0, 2, 1, 3, 0]
# g2: (15.773387165490545, [0, 3, 1, 2, 4, 0])
g2 = [Vertex(0, 0), Vertex(4, 4), Vertex(0, 3), Vertex(4, 0), Vertex(1, 2)]
m2 = adjacency_matrix(g2)
for solver in held_karp_recursive, held_karp_topdown, held_karp_bottomup:
cost, path = solver(m2)
assert abs(cost - 15.7733871) < 0.001
assert path == [0, 3, 1, 2, 4, 0]
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "3e8fa71c4e23348c6f00fe97729b5717bb6245a1",
"index": 8070,
"step-1": "<mask token>\n\n\ndef held_karp_bottomup(distance_matrix):\n \"\"\"\n In the bottom up implementation, we compute all possible solutions for the\n values `i` and `visited` as in the implementations above, and then\n simply look up the value for f(0,0).\n\n With this approach, we use the dp table, the original `distance_matrix`\n and knowledge of the optimal cost to work backwards in determing what\n the optimal path was.\n \"\"\"\n d = distance_matrix\n n = len(d)\n dp = [[None for i in xrange(2 ** n)] for j in xrange(n)]\n for i in xrange(n):\n dp[i][(1 << n) - 1] = d[i][0]\n for visited in reversed(xrange((1 << n) - 1)):\n for i in xrange(n):\n min_dist = sys.maxint\n for j in xrange(n):\n if not 1 << j & visited:\n dist_j = d[i][j] + dp[j][visited | 1 << j]\n if dist_j < min_dist:\n min_dist = dist_j\n dp[i][visited] = min_dist\n ans = dp[0][1]\n path = [0]\n i, visited = 0, 1\n cost_from_i = dp[i][visited]\n while visited != (1 << n) - 1:\n for j in xrange(n):\n if not visited & 1 << j:\n cost_from_j = dp[j][visited | 1 << j]\n if abs(cost_from_i - cost_from_j - d[i][j]) < 0.001:\n path.append(j)\n i, visited = j, visited | 1 << j\n cost_from_i = cost_from_j\n break\n path.append(0)\n return ans, path\n\n\nclass Vertex:\n \"\"\" Simple implementation of a point in Euclidean space \"\"\"\n\n def __init__(self, x, y):\n self.x = float(x)\n self.y = float(y)\n\n\n<mask token>\n\n\ndef adjacency_matrix(graph):\n \"\"\"\n Construct the corresponding adjacency matrix from a list of verticies in a\n graph, assumed to be a complete graph.\n \"\"\"\n m = [[None for v in graph] for v in graph]\n for i in xrange(len(m)):\n for j in xrange(len(m[i])):\n m[i][j] = distance(graph[i], graph[j])\n return m\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef held_karp_recursive(distance_matrix):\n \"\"\"\n Solution to TSP using the Bellman-Held-Karp Algorithm\n\n Given the adjacency matrix to a corresponding tsp problem, find the\n minimum cost Hamiltonian cycle through the graph, as well as the\n corresponding path\n \"\"\"\n d = distance_matrix\n n = len(d)\n\n def f(i, visited, path_so_far):\n \"\"\"\n Let f(i, visited, path_so_far) be the path of minimum distance from\n city i to city 0, that passes through all remaining unvisited cities in\n `visited`, where visited is a bitmask such that the bit in the jth\n position being 1 represents city j having been visited, and bit j\n being 0 represents city j having not been visited, and `path_so_far` is\n the current path of minimum distance from city 0 up to city i.\n\n Then the solution we want is f(0, 0, []), and the following recursive\n relation holds:\n\n f(i, visited) = min_{j in unvisited} ( d(i,j) + f(j, visited | (1<<j)) )\n\n NOTE: Must be careful not to mutate\n \"\"\"\n if visited == (1 << n) - 1:\n return d[i][0], path_so_far + [0]\n min_dist = sys.maxint\n for j in xrange(n):\n if not 1 << j & visited:\n dist_from_j, path_with_j = f(j, visited | 1 << j, \n path_so_far + [j])\n dist_with_j = d[i][j] + dist_from_j\n if dist_with_j < min_dist:\n min_dist = dist_with_j\n min_path = path_with_j\n return min_dist, min_path\n return f(0, 0, [])\n\n\ndef held_karp_topdown(distance_matrix):\n \"\"\"\n Above algorithm, but making use of memoization to avoid recomputing\n overlapping subproblems\n \"\"\"\n d = distance_matrix\n n = len(d)\n \"\"\"\n We need a dp table that will store the minimum distances from city i\n to city 0 that passes through all unvisitied cities in the bit mask.\n There are n cities, and 2^n possible binary strings of length n, so our\n table will have dimensions n x 2^n\n\n With this approach, we use another table called 'child' that keeps track\n of the child city of i for each combination of (i, visited), and we can\n use this table to obtain the actual Hamiltonian cycle of minimum distance.\n \"\"\"\n dp = [[None for i in xrange(2 ** n)] for j in xrange(n)]\n child = [[None for i in xrange(2 ** n)] for j in xrange(n)]\n\n def f(i, visited):\n \"\"\"\n f is defined as in the purely recursive implementation above.\n The only difference here is that we check if the value we are\n looking for is already in the defined dp table, and we do not\n keep track of the path as we go along, as looking up a solution\n for any given value would require having stored the path for\n that solution as well, which would be expensive.\n\n As such, we use the `child` table to keep track of where we\n came from.\n \"\"\"\n if dp[i][visited]:\n return dp[i][visited]\n if visited == (1 << n) - 1:\n dp[i][visited] = d[i][0]\n child[i][visited] = 0\n return d[i][0]\n min_dist = sys.maxint\n chosen_j = None\n for j in xrange(n):\n if not 1 << j & visited:\n dist_with_j = d[i][j] + f(j, 1 << j | visited)\n if dist_with_j < min_dist:\n min_dist = dist_with_j\n chosen_j = j\n dp[i][visited] = min_dist\n child[i][visited] = chosen_j\n return min_dist\n ans = f(0, 1)\n path = [0]\n i, visited = 0, 1\n next_ = child[i][visited]\n while next_ is not None:\n path.append(next_)\n visited |= 1 << next_\n next_ = child[next_][visited]\n return ans, path\n\n\ndef held_karp_bottomup(distance_matrix):\n \"\"\"\n In the bottom up implementation, we compute all possible solutions for the\n values `i` and `visited` as in the implementations above, and then\n simply look up the value for f(0,0).\n\n With this approach, we use the dp table, the original `distance_matrix`\n and knowledge of the optimal cost to work backwards in determing what\n the optimal path was.\n \"\"\"\n d = distance_matrix\n n = len(d)\n dp = [[None for i in xrange(2 ** n)] for j in xrange(n)]\n for i in xrange(n):\n dp[i][(1 << n) - 1] = d[i][0]\n for visited in reversed(xrange((1 << n) - 1)):\n for i in xrange(n):\n min_dist = sys.maxint\n for j in xrange(n):\n if not 1 << j & visited:\n dist_j = d[i][j] + dp[j][visited | 1 << j]\n if dist_j < min_dist:\n min_dist = dist_j\n dp[i][visited] = min_dist\n ans = dp[0][1]\n path = [0]\n i, visited = 0, 1\n cost_from_i = dp[i][visited]\n while visited != (1 << n) - 1:\n for j in xrange(n):\n if not visited & 1 << j:\n cost_from_j = dp[j][visited | 1 << j]\n if abs(cost_from_i - cost_from_j - d[i][j]) < 0.001:\n path.append(j)\n i, visited = j, visited | 1 << j\n cost_from_i = cost_from_j\n break\n path.append(0)\n return ans, path\n\n\nclass Vertex:\n \"\"\" Simple implementation of a point in Euclidean space \"\"\"\n\n def __init__(self, x, y):\n self.x = float(x)\n self.y = float(y)\n\n\ndef distance(v1, v2):\n \"\"\" Euclidean distance between two `Vertex` instances \"\"\"\n return ((v1.x - v2.x) ** 2 + (v1.y - v2.y) ** 2) ** 0.5\n\n\ndef adjacency_matrix(graph):\n \"\"\"\n Construct the corresponding adjacency matrix from a list of verticies in a\n graph, assumed to be a complete graph.\n \"\"\"\n m = [[None for v in graph] for v in graph]\n for i in xrange(len(m)):\n for j in xrange(len(m[i])):\n m[i][j] = distance(graph[i], graph[j])\n return m\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef held_karp_recursive(distance_matrix):\n \"\"\"\n Solution to TSP using the Bellman-Held-Karp Algorithm\n\n Given the adjacency matrix to a corresponding tsp problem, find the\n minimum cost Hamiltonian cycle through the graph, as well as the\n corresponding path\n \"\"\"\n d = distance_matrix\n n = len(d)\n\n def f(i, visited, path_so_far):\n \"\"\"\n Let f(i, visited, path_so_far) be the path of minimum distance from\n city i to city 0, that passes through all remaining unvisited cities in\n `visited`, where visited is a bitmask such that the bit in the jth\n position being 1 represents city j having been visited, and bit j\n being 0 represents city j having not been visited, and `path_so_far` is\n the current path of minimum distance from city 0 up to city i.\n\n Then the solution we want is f(0, 0, []), and the following recursive\n relation holds:\n\n f(i, visited) = min_{j in unvisited} ( d(i,j) + f(j, visited | (1<<j)) )\n\n NOTE: Must be careful not to mutate\n \"\"\"\n if visited == (1 << n) - 1:\n return d[i][0], path_so_far + [0]\n min_dist = sys.maxint\n for j in xrange(n):\n if not 1 << j & visited:\n dist_from_j, path_with_j = f(j, visited | 1 << j, \n path_so_far + [j])\n dist_with_j = d[i][j] + dist_from_j\n if dist_with_j < min_dist:\n min_dist = dist_with_j\n min_path = path_with_j\n return min_dist, min_path\n return f(0, 0, [])\n\n\ndef held_karp_topdown(distance_matrix):\n \"\"\"\n Above algorithm, but making use of memoization to avoid recomputing\n overlapping subproblems\n \"\"\"\n d = distance_matrix\n n = len(d)\n \"\"\"\n We need a dp table that will store the minimum distances from city i\n to city 0 that passes through all unvisitied cities in the bit mask.\n There are n cities, and 2^n possible binary strings of length n, so our\n table will have dimensions n x 2^n\n\n With this approach, we use another table called 'child' that keeps track\n of the child city of i for each combination of (i, visited), and we can\n use this table to obtain the actual Hamiltonian cycle of minimum distance.\n \"\"\"\n dp = [[None for i in xrange(2 ** n)] for j in xrange(n)]\n child = [[None for i in xrange(2 ** n)] for j in xrange(n)]\n\n def f(i, visited):\n \"\"\"\n f is defined as in the purely recursive implementation above.\n The only difference here is that we check if the value we are\n looking for is already in the defined dp table, and we do not\n keep track of the path as we go along, as looking up a solution\n for any given value would require having stored the path for\n that solution as well, which would be expensive.\n\n As such, we use the `child` table to keep track of where we\n came from.\n \"\"\"\n if dp[i][visited]:\n return dp[i][visited]\n if visited == (1 << n) - 1:\n dp[i][visited] = d[i][0]\n child[i][visited] = 0\n return d[i][0]\n min_dist = sys.maxint\n chosen_j = None\n for j in xrange(n):\n if not 1 << j & visited:\n dist_with_j = d[i][j] + f(j, 1 << j | visited)\n if dist_with_j < min_dist:\n min_dist = dist_with_j\n chosen_j = j\n dp[i][visited] = min_dist\n child[i][visited] = chosen_j\n return min_dist\n ans = f(0, 1)\n path = [0]\n i, visited = 0, 1\n next_ = child[i][visited]\n while next_ is not None:\n path.append(next_)\n visited |= 1 << next_\n next_ = child[next_][visited]\n return ans, path\n\n\ndef held_karp_bottomup(distance_matrix):\n \"\"\"\n In the bottom up implementation, we compute all possible solutions for the\n values `i` and `visited` as in the implementations above, and then\n simply look up the value for f(0,0).\n\n With this approach, we use the dp table, the original `distance_matrix`\n and knowledge of the optimal cost to work backwards in determing what\n the optimal path was.\n \"\"\"\n d = distance_matrix\n n = len(d)\n dp = [[None for i in xrange(2 ** n)] for j in xrange(n)]\n for i in xrange(n):\n dp[i][(1 << n) - 1] = d[i][0]\n for visited in reversed(xrange((1 << n) - 1)):\n for i in xrange(n):\n min_dist = sys.maxint\n for j in xrange(n):\n if not 1 << j & visited:\n dist_j = d[i][j] + dp[j][visited | 1 << j]\n if dist_j < min_dist:\n min_dist = dist_j\n dp[i][visited] = min_dist\n ans = dp[0][1]\n path = [0]\n i, visited = 0, 1\n cost_from_i = dp[i][visited]\n while visited != (1 << n) - 1:\n for j in xrange(n):\n if not visited & 1 << j:\n cost_from_j = dp[j][visited | 1 << j]\n if abs(cost_from_i - cost_from_j - d[i][j]) < 0.001:\n path.append(j)\n i, visited = j, visited | 1 << j\n cost_from_i = cost_from_j\n break\n path.append(0)\n return ans, path\n\n\nclass Vertex:\n \"\"\" Simple implementation of a point in Euclidean space \"\"\"\n\n def __init__(self, x, y):\n self.x = float(x)\n self.y = float(y)\n\n\ndef distance(v1, v2):\n \"\"\" Euclidean distance between two `Vertex` instances \"\"\"\n return ((v1.x - v2.x) ** 2 + (v1.y - v2.y) ** 2) ** 0.5\n\n\ndef adjacency_matrix(graph):\n \"\"\"\n Construct the corresponding adjacency matrix from a list of verticies in a\n graph, assumed to be a complete graph.\n \"\"\"\n m = [[None for v in graph] for v in graph]\n for i in xrange(len(m)):\n for j in xrange(len(m[i])):\n m[i][j] = distance(graph[i], graph[j])\n return m\n\n\ndef main():\n g1 = [Vertex(0, 0), Vertex(4, 4), Vertex(4, 0), Vertex(0, 4)]\n m1 = adjacency_matrix(g1)\n for solver in (held_karp_recursive, held_karp_topdown, held_karp_bottomup):\n cost, path = solver(m1)\n assert cost == 16.0\n assert path == [0, 2, 1, 3, 0]\n g2 = [Vertex(0, 0), Vertex(4, 4), Vertex(0, 3), Vertex(4, 0), Vertex(1, 2)]\n m2 = adjacency_matrix(g2)\n for solver in (held_karp_recursive, held_karp_topdown, held_karp_bottomup):\n cost, path = solver(m2)\n assert abs(cost - 15.7733871) < 0.001\n assert path == [0, 3, 1, 2, 4, 0]\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef held_karp_recursive(distance_matrix):\n \"\"\"\n Solution to TSP using the Bellman-Held-Karp Algorithm\n\n Given the adjacency matrix to a corresponding tsp problem, find the\n minimum cost Hamiltonian cycle through the graph, as well as the\n corresponding path\n \"\"\"\n d = distance_matrix\n n = len(d)\n\n def f(i, visited, path_so_far):\n \"\"\"\n Let f(i, visited, path_so_far) be the path of minimum distance from\n city i to city 0, that passes through all remaining unvisited cities in\n `visited`, where visited is a bitmask such that the bit in the jth\n position being 1 represents city j having been visited, and bit j\n being 0 represents city j having not been visited, and `path_so_far` is\n the current path of minimum distance from city 0 up to city i.\n\n Then the solution we want is f(0, 0, []), and the following recursive\n relation holds:\n\n f(i, visited) = min_{j in unvisited} ( d(i,j) + f(j, visited | (1<<j)) )\n\n NOTE: Must be careful not to mutate\n \"\"\"\n if visited == (1 << n) - 1:\n return d[i][0], path_so_far + [0]\n min_dist = sys.maxint\n for j in xrange(n):\n if not 1 << j & visited:\n dist_from_j, path_with_j = f(j, visited | 1 << j, \n path_so_far + [j])\n dist_with_j = d[i][j] + dist_from_j\n if dist_with_j < min_dist:\n min_dist = dist_with_j\n min_path = path_with_j\n return min_dist, min_path\n return f(0, 0, [])\n\n\ndef held_karp_topdown(distance_matrix):\n \"\"\"\n Above algorithm, but making use of memoization to avoid recomputing\n overlapping subproblems\n \"\"\"\n d = distance_matrix\n n = len(d)\n \"\"\"\n We need a dp table that will store the minimum distances from city i\n to city 0 that passes through all unvisitied cities in the bit mask.\n There are n cities, and 2^n possible binary strings of length n, so our\n table will have dimensions n x 2^n\n\n With this approach, we use another table called 'child' that keeps track\n of the child city of i for each combination of (i, visited), and we can\n use this table to obtain the actual Hamiltonian cycle of minimum distance.\n \"\"\"\n dp = [[None for i in xrange(2 ** n)] for j in xrange(n)]\n child = [[None for i in xrange(2 ** n)] for j in xrange(n)]\n\n def f(i, visited):\n \"\"\"\n f is defined as in the purely recursive implementation above.\n The only difference here is that we check if the value we are\n looking for is already in the defined dp table, and we do not\n keep track of the path as we go along, as looking up a solution\n for any given value would require having stored the path for\n that solution as well, which would be expensive.\n\n As such, we use the `child` table to keep track of where we\n came from.\n \"\"\"\n if dp[i][visited]:\n return dp[i][visited]\n if visited == (1 << n) - 1:\n dp[i][visited] = d[i][0]\n child[i][visited] = 0\n return d[i][0]\n min_dist = sys.maxint\n chosen_j = None\n for j in xrange(n):\n if not 1 << j & visited:\n dist_with_j = d[i][j] + f(j, 1 << j | visited)\n if dist_with_j < min_dist:\n min_dist = dist_with_j\n chosen_j = j\n dp[i][visited] = min_dist\n child[i][visited] = chosen_j\n return min_dist\n ans = f(0, 1)\n path = [0]\n i, visited = 0, 1\n next_ = child[i][visited]\n while next_ is not None:\n path.append(next_)\n visited |= 1 << next_\n next_ = child[next_][visited]\n return ans, path\n\n\ndef held_karp_bottomup(distance_matrix):\n \"\"\"\n In the bottom up implementation, we compute all possible solutions for the\n values `i` and `visited` as in the implementations above, and then\n simply look up the value for f(0,0).\n\n With this approach, we use the dp table, the original `distance_matrix`\n and knowledge of the optimal cost to work backwards in determing what\n the optimal path was.\n \"\"\"\n d = distance_matrix\n n = len(d)\n dp = [[None for i in xrange(2 ** n)] for j in xrange(n)]\n for i in xrange(n):\n dp[i][(1 << n) - 1] = d[i][0]\n for visited in reversed(xrange((1 << n) - 1)):\n for i in xrange(n):\n min_dist = sys.maxint\n for j in xrange(n):\n if not 1 << j & visited:\n dist_j = d[i][j] + dp[j][visited | 1 << j]\n if dist_j < min_dist:\n min_dist = dist_j\n dp[i][visited] = min_dist\n ans = dp[0][1]\n path = [0]\n i, visited = 0, 1\n cost_from_i = dp[i][visited]\n while visited != (1 << n) - 1:\n for j in xrange(n):\n if not visited & 1 << j:\n cost_from_j = dp[j][visited | 1 << j]\n if abs(cost_from_i - cost_from_j - d[i][j]) < 0.001:\n path.append(j)\n i, visited = j, visited | 1 << j\n cost_from_i = cost_from_j\n break\n path.append(0)\n return ans, path\n\n\nclass Vertex:\n \"\"\" Simple implementation of a point in Euclidean space \"\"\"\n\n def __init__(self, x, y):\n self.x = float(x)\n self.y = float(y)\n\n\ndef distance(v1, v2):\n \"\"\" Euclidean distance between two `Vertex` instances \"\"\"\n return ((v1.x - v2.x) ** 2 + (v1.y - v2.y) ** 2) ** 0.5\n\n\ndef adjacency_matrix(graph):\n \"\"\"\n Construct the corresponding adjacency matrix from a list of verticies in a\n graph, assumed to be a complete graph.\n \"\"\"\n m = [[None for v in graph] for v in graph]\n for i in xrange(len(m)):\n for j in xrange(len(m[i])):\n m[i][j] = distance(graph[i], graph[j])\n return m\n\n\ndef main():\n g1 = [Vertex(0, 0), Vertex(4, 4), Vertex(4, 0), Vertex(0, 4)]\n m1 = adjacency_matrix(g1)\n for solver in (held_karp_recursive, held_karp_topdown, held_karp_bottomup):\n cost, path = solver(m1)\n assert cost == 16.0\n assert path == [0, 2, 1, 3, 0]\n g2 = [Vertex(0, 0), Vertex(4, 4), Vertex(0, 3), Vertex(4, 0), Vertex(1, 2)]\n m2 = adjacency_matrix(g2)\n for solver in (held_karp_recursive, held_karp_topdown, held_karp_bottomup):\n cost, path = solver(m2)\n assert abs(cost - 15.7733871) < 0.001\n assert path == [0, 3, 1, 2, 4, 0]\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "'''\nheld-karp.py\n\nImplementation of the Bellman-Held-Karp Algorithm to exactly solve TSPs,\nrequiring no external dependencies.\n\nIncludes a purely recursive implementation, as well as both top-down and\nbottom-up dynamic programming approaches.\n'''\nimport sys\n\n\ndef held_karp_recursive(distance_matrix):\n '''\n Solution to TSP using the Bellman-Held-Karp Algorithm\n\n Given the adjacency matrix to a corresponding tsp problem, find the\n minimum cost Hamiltonian cycle through the graph, as well as the\n corresponding path\n '''\n d = distance_matrix\n n = len(d)\n\n def f(i, visited, path_so_far):\n '''\n Let f(i, visited, path_so_far) be the path of minimum distance from\n city i to city 0, that passes through all remaining unvisited cities in\n `visited`, where visited is a bitmask such that the bit in the jth\n position being 1 represents city j having been visited, and bit j\n being 0 represents city j having not been visited, and `path_so_far` is\n the current path of minimum distance from city 0 up to city i.\n\n Then the solution we want is f(0, 0, []), and the following recursive\n relation holds:\n\n f(i, visited) = min_{j in unvisited} ( d(i,j) + f(j, visited | (1<<j)) )\n\n NOTE: Must be careful not to mutate\n '''\n # Base case: check if all cities have been visited\n if visited == (1 << n) - 1:\n # we have visited all cities, return to 0\n return d[i][0], path_so_far + [0,]\n\n min_dist = sys.maxint\n # visit all unvisited cities\n for j in xrange(n):\n if not (1 << j) & visited:\n dist_from_j, path_with_j = \\\n f(j, visited | (1 << j), path_so_far + [j,])\n # Distance with j\n dist_with_j = d[i][j] + dist_from_j\n if dist_with_j < min_dist:\n min_dist = dist_with_j\n min_path = path_with_j\n\n return min_dist, min_path\n\n return f(0, 0, [])\n\n\ndef held_karp_topdown(distance_matrix):\n '''\n Above algorithm, but making use of memoization to avoid recomputing\n overlapping subproblems\n '''\n d = distance_matrix\n n = len(d)\n\n '''\n We need a dp table that will store the minimum distances from city i\n to city 0 that passes through all unvisitied cities in the bit mask.\n There are n cities, and 2^n possible binary strings of length n, so our\n table will have dimensions n x 2^n\n\n With this approach, we use another table called 'child' that keeps track\n of the child city of i for each combination of (i, visited), and we can\n use this table to obtain the actual Hamiltonian cycle of minimum distance.\n '''\n dp = [[None for i in xrange(2**n)] for j in xrange(n)]\n child = [[None for i in xrange(2**n)] for j in xrange(n)]\n\n def f(i, visited):\n '''\n f is defined as in the purely recursive implementation above.\n The only difference here is that we check if the value we are\n looking for is already in the defined dp table, and we do not\n keep track of the path as we go along, as looking up a solution\n for any given value would require having stored the path for\n that solution as well, which would be expensive.\n\n As such, we use the `child` table to keep track of where we\n came from.\n '''\n # Check the table\n if dp[i][visited]:\n return dp[i][visited]\n # Base case: check if all cities have been visited\n if visited == (1 << n) - 1:\n # we have visited all cities, return to 0\n dp[i][visited] = d[i][0]\n child[i][visited] = 0\n return d[i][0]\n\n min_dist = sys.maxint\n chosen_j = None\n # visit all unvisited cities\n for j in xrange(n):\n if not (1 << j) & visited:\n dist_with_j = d[i][j] + f(j, (1 << j) | visited)\n if dist_with_j < min_dist:\n min_dist = dist_with_j\n chosen_j = j\n\n dp[i][visited] = min_dist\n child[i][visited] = chosen_j\n return min_dist\n\n # The value we are interested in\n ans = f(0,1)\n\n # Can optain the optimal path using the parent matrix\n path = [0]\n i, visited = 0, 1\n next_ = child[i][visited]\n while next_ is not None:\n path.append(next_)\n visited |= (1 << next_)\n next_ = child[next_][visited]\n\n return ans, path\n\n\ndef held_karp_bottomup(distance_matrix):\n '''\n In the bottom up implementation, we compute all possible solutions for the\n values `i` and `visited` as in the implementations above, and then\n simply look up the value for f(0,0).\n\n With this approach, we use the dp table, the original `distance_matrix`\n and knowledge of the optimal cost to work backwards in determing what\n the optimal path was.\n '''\n d = distance_matrix\n n = len(d)\n\n dp = [[None for i in xrange(2**n)] for j in xrange(n)]\n\n # Base case:\n # Distance from any city i back to 0 after having visited all cities\n for i in xrange(n):\n dp[i][(1<<n)-1] = d[i][0]\n\n # Fill in all values of the dp table, excluding the values from the\n # base case we've already inserted\n # Note we started with having visited all cities except for 0\n # and work backwards from there\n for visited in reversed(xrange((1<<n)-1)):\n for i in xrange(n):\n min_dist = sys.maxint\n for j in xrange(n):\n if not (1 << j) & visited:\n dist_j = d[i][j] + dp[j][visited | (1 << j)]\n if dist_j < min_dist:\n min_dist = dist_j\n dp[i][visited] = min_dist\n\n ans = dp[0][1]\n\n # We can also optain the optimal path working backwards using\n # the table and the knowledge of the cost of the optimal path\n path = [0]\n i, visited = 0, 1\n cost_from_i = dp[i][visited]\n while visited != (1 << n)-1:\n for j in xrange(n):\n if not visited & (1 << j):\n cost_from_j = dp[j][visited | (1 << j)]\n # require a tolerance for real valued distances\n if abs((cost_from_i - cost_from_j) - d[i][j]) < 0.001:\n # j was the city selected in the opt solution\n path.append(j)\n i, visited = j, visited | (1 << j)\n cost_from_i = cost_from_j\n break\n # We have visited all cities, so return to 0\n path.append(0)\n\n return ans, path\n\n\nclass Vertex:\n ''' Simple implementation of a point in Euclidean space '''\n def __init__(self, x, y):\n self.x = float(x)\n self.y = float(y)\n\n\ndef distance(v1, v2):\n ''' Euclidean distance between two `Vertex` instances '''\n return ((v1.x - v2.x)**2 + (v1.y - v2.y)**2)**0.5\n\n\ndef adjacency_matrix(graph):\n '''\n Construct the corresponding adjacency matrix from a list of verticies in a\n graph, assumed to be a complete graph.\n '''\n m = [[None for v in graph] for v in graph]\n for i in xrange(len(m)):\n for j in xrange(len(m[i])):\n m[i][j] = distance(graph[i], graph[j])\n return m\n\n\ndef main():\n\n ## Test cases\n\n # g1: (16.0, [0, 2, 1, 3, 0])\n g1 = [Vertex(0, 0), Vertex(4, 4), Vertex(4, 0), Vertex(0, 4)]\n m1 = adjacency_matrix(g1)\n for solver in held_karp_recursive, held_karp_topdown, held_karp_bottomup:\n cost, path = solver(m1)\n assert cost == 16.0\n assert path == [0, 2, 1, 3, 0]\n\n # g2: (15.773387165490545, [0, 3, 1, 2, 4, 0])\n g2 = [Vertex(0, 0), Vertex(4, 4), Vertex(0, 3), Vertex(4, 0), Vertex(1, 2)]\n m2 = adjacency_matrix(g2)\n for solver in held_karp_recursive, held_karp_topdown, held_karp_bottomup:\n cost, path = solver(m2)\n assert abs(cost - 15.7733871) < 0.001\n assert path == [0, 3, 1, 2, 4, 0]\n\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
5,
8,
9,
10,
12
]
}
|
[
5,
8,
9,
10,
12
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for coluna in range(0, n_colunas):
if tabela[0][coluna] < menor_valor:
menor_valor = tabela[0][coluna]
menor_posicao = coluna
<|reserved_special_token_0|>
while menor_posicao != -1:
solucao_dividida.clear()
solucao_dividida.append('Vazio')
for linha in range(1, n_linhas):
if tabela[linha][menor_posicao] == 0:
solucao_dividida.append(float('inf'))
else:
solucao_dividida.append(tabela[linha][n_colunas - 1] / tabela[
linha][menor_posicao])
if solucao_dividida[1] > 0:
menor_solucao = solucao_dividida[1]
else:
menor_solucao = float('inf')
menor_solucao_posicao = 1
for i in range(1, n_linhas):
if solucao_dividida[i] > 0 and solucao_dividida[i] < menor_solucao:
menor_solucao = solucao_dividida[i]
menor_solucao_posicao = i
pivo = tabela[menor_solucao_posicao][menor_posicao]
for coluna in range(0, n_colunas):
if pivo == 0:
tabela[menor_solucao_posicao][coluna] = float('inf')
else:
tabela[menor_solucao_posicao][coluna] = tabela[
menor_solucao_posicao][coluna] / pivo
linhas[menor_solucao_posicao] = colunas[menor_posicao]
for linha in range(0, n_linhas):
if linha != menor_solucao_posicao:
if tabela[menor_solucao_posicao][menor_posicao] == 0:
razao = float('inf')
else:
razao = tabela[linha][menor_posicao] / tabela[
menor_solucao_posicao][menor_posicao]
for coluna in range(0, n_colunas):
tabela[linha][coluna] = tabela[linha][coluna] - razao * tabela[
menor_solucao_posicao][coluna]
menor_posicao = -1
menor_valor = 0
for coluna in range(0, n_colunas):
if tabela[0][coluna] < menor_valor:
menor_valor = tabela[0][coluna]
menor_posicao = coluna
print(tabela)
for i in range(0, n_linhas):
print(linhas[i], '=', tabela[i][n_colunas - 1])
<|reserved_special_token_1|>
tabela = [[1, -45, -20, 0, 0, 0, 0], [0, 20, 5, 1, 0, 0, 9500], [0, 0.04,
0.12, 0, 1, 0, 40], [0, 1, 1, 0, 0, 1, 551]]
colunas = ['Z', 'A', 'B', 'S1', 'S2', 'S3', 'Solução']
linhas = ['Z', 'S1', 'S2', 'S3']
n_colunas = 7
n_linhas = 4
menor_posicao = -1
menor_valor = 0
for coluna in range(0, n_colunas):
if tabela[0][coluna] < menor_valor:
menor_valor = tabela[0][coluna]
menor_posicao = coluna
solucao_dividida = []
while menor_posicao != -1:
solucao_dividida.clear()
solucao_dividida.append('Vazio')
for linha in range(1, n_linhas):
if tabela[linha][menor_posicao] == 0:
solucao_dividida.append(float('inf'))
else:
solucao_dividida.append(tabela[linha][n_colunas - 1] / tabela[
linha][menor_posicao])
if solucao_dividida[1] > 0:
menor_solucao = solucao_dividida[1]
else:
menor_solucao = float('inf')
menor_solucao_posicao = 1
for i in range(1, n_linhas):
if solucao_dividida[i] > 0 and solucao_dividida[i] < menor_solucao:
menor_solucao = solucao_dividida[i]
menor_solucao_posicao = i
pivo = tabela[menor_solucao_posicao][menor_posicao]
for coluna in range(0, n_colunas):
if pivo == 0:
tabela[menor_solucao_posicao][coluna] = float('inf')
else:
tabela[menor_solucao_posicao][coluna] = tabela[
menor_solucao_posicao][coluna] / pivo
linhas[menor_solucao_posicao] = colunas[menor_posicao]
for linha in range(0, n_linhas):
if linha != menor_solucao_posicao:
if tabela[menor_solucao_posicao][menor_posicao] == 0:
razao = float('inf')
else:
razao = tabela[linha][menor_posicao] / tabela[
menor_solucao_posicao][menor_posicao]
for coluna in range(0, n_colunas):
tabela[linha][coluna] = tabela[linha][coluna] - razao * tabela[
menor_solucao_posicao][coluna]
menor_posicao = -1
menor_valor = 0
for coluna in range(0, n_colunas):
if tabela[0][coluna] < menor_valor:
menor_valor = tabela[0][coluna]
menor_posicao = coluna
print(tabela)
for i in range(0, n_linhas):
print(linhas[i], '=', tabela[i][n_colunas - 1])
<|reserved_special_token_1|>
tabela = [[1,-45,-20,0,0,0,0],[0,20,5,1,0,0,9500],[0,0.04,0.12,0,1,0,40],[0,1,1,0,0,1,551]]
colunas = ["Z","A","B","S1","S2","S3","Solução"]
linhas = ["Z","S1","S2","S3"]
n_colunas=7
n_linhas=4
#Inicio do algoritmo
#Buscar o menor numero negativo na linha 0
menor_posicao=-1
menor_valor=0
for coluna in range(0,n_colunas):
if(tabela[0][coluna]<menor_valor):
menor_valor=tabela[0][coluna]
menor_posicao=coluna
#O menor numero negativo na linha 0 esta na coluna menor_posicao, caso nao haja um numero negativo a posicao é -1
solucao_dividida=[]
while(menor_posicao!=-1): #O loop terminara quando nao houver numero negativo na linha Z
#Vamos agora dividir a ultima coluna pelos elementos da coluna i em cada linha
solucao_dividida.clear()
solucao_dividida.append("Vazio")
for linha in range (1,n_linhas):
if(tabela[linha][menor_posicao]==0):
solucao_dividida.append(float("inf"))
else:
solucao_dividida.append(tabela[linha][n_colunas-1]/tabela[linha][menor_posicao])
#Agora iremos procurar a linha com a menor solucao_dividida positiva
if(solucao_dividida[1]>0):
menor_solucao=solucao_dividida[1]
else:
menor_solucao=float("inf")
menor_solucao_posicao=1
for i in range (1,n_linhas):
if(solucao_dividida[i]>0 and solucao_dividida[i]<menor_solucao):
menor_solucao=solucao_dividida[i]
menor_solucao_posicao=i
#Agora vamos pegar o elemento tabela[menor_solucao_posicao][menor_posicao] e dividir a linha menor_solucao_posicao por ele
pivo=tabela[menor_solucao_posicao][menor_posicao]
for coluna in range(0,n_colunas):
if(pivo==0):
tabela[menor_solucao_posicao][coluna]=float("inf")
else:
tabela[menor_solucao_posicao][coluna]=tabela[menor_solucao_posicao][coluna]/pivo
linhas[menor_solucao_posicao]=colunas[menor_posicao] #mudando o cabecalho da tabela
#Agora vamos pegar a linha menor_solucao_posicao e somar nas demais de forma que a coluna menor_posicao seja zerada em todas as linhas
#menos na linha menor_solucao_posicao
for linha in range (0,n_linhas):
if(linha!=menor_solucao_posicao):
if(tabela[menor_solucao_posicao][menor_posicao]==0):
razao=float("inf")
else:
razao=tabela[linha][menor_posicao]/tabela[menor_solucao_posicao][menor_posicao]
for coluna in range (0,n_colunas):
tabela[linha][coluna]=tabela[linha][coluna]-(razao*tabela[menor_solucao_posicao][coluna])
#Buscar o menor numero negativo na linha 0
menor_posicao=-1
menor_valor=0
for coluna in range(0,n_colunas):
if(tabela[0][coluna]<menor_valor):
menor_valor=tabela[0][coluna]
menor_posicao=coluna
#O menor numero negativo na linha 0 esta na coluna menor_posicao, caso nao haja um numero negativo a posicao é -1
#Caso menor_posicao==-1 o while termina
print(tabela)
for i in range (0,n_linhas):
print(linhas[i],"=",tabela[i][n_colunas-1])
|
flexible
|
{
"blob_id": "785dcaf7de68174d84af3459cde02927bc2e10cc",
"index": 8951,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor coluna in range(0, n_colunas):\n if tabela[0][coluna] < menor_valor:\n menor_valor = tabela[0][coluna]\n menor_posicao = coluna\n<mask token>\nwhile menor_posicao != -1:\n solucao_dividida.clear()\n solucao_dividida.append('Vazio')\n for linha in range(1, n_linhas):\n if tabela[linha][menor_posicao] == 0:\n solucao_dividida.append(float('inf'))\n else:\n solucao_dividida.append(tabela[linha][n_colunas - 1] / tabela[\n linha][menor_posicao])\n if solucao_dividida[1] > 0:\n menor_solucao = solucao_dividida[1]\n else:\n menor_solucao = float('inf')\n menor_solucao_posicao = 1\n for i in range(1, n_linhas):\n if solucao_dividida[i] > 0 and solucao_dividida[i] < menor_solucao:\n menor_solucao = solucao_dividida[i]\n menor_solucao_posicao = i\n pivo = tabela[menor_solucao_posicao][menor_posicao]\n for coluna in range(0, n_colunas):\n if pivo == 0:\n tabela[menor_solucao_posicao][coluna] = float('inf')\n else:\n tabela[menor_solucao_posicao][coluna] = tabela[\n menor_solucao_posicao][coluna] / pivo\n linhas[menor_solucao_posicao] = colunas[menor_posicao]\n for linha in range(0, n_linhas):\n if linha != menor_solucao_posicao:\n if tabela[menor_solucao_posicao][menor_posicao] == 0:\n razao = float('inf')\n else:\n razao = tabela[linha][menor_posicao] / tabela[\n menor_solucao_posicao][menor_posicao]\n for coluna in range(0, n_colunas):\n tabela[linha][coluna] = tabela[linha][coluna] - razao * tabela[\n menor_solucao_posicao][coluna]\n menor_posicao = -1\n menor_valor = 0\n for coluna in range(0, n_colunas):\n if tabela[0][coluna] < menor_valor:\n menor_valor = tabela[0][coluna]\n menor_posicao = coluna\nprint(tabela)\nfor i in range(0, n_linhas):\n print(linhas[i], '=', tabela[i][n_colunas - 1])\n",
"step-3": "tabela = [[1, -45, -20, 0, 0, 0, 0], [0, 20, 5, 1, 0, 0, 9500], [0, 0.04, \n 0.12, 0, 1, 0, 40], [0, 1, 1, 0, 0, 1, 551]]\ncolunas = ['Z', 'A', 'B', 'S1', 'S2', 'S3', 'Solução']\nlinhas = ['Z', 'S1', 'S2', 'S3']\nn_colunas = 7\nn_linhas = 4\nmenor_posicao = -1\nmenor_valor = 0\nfor coluna in range(0, n_colunas):\n if tabela[0][coluna] < menor_valor:\n menor_valor = tabela[0][coluna]\n menor_posicao = coluna\nsolucao_dividida = []\nwhile menor_posicao != -1:\n solucao_dividida.clear()\n solucao_dividida.append('Vazio')\n for linha in range(1, n_linhas):\n if tabela[linha][menor_posicao] == 0:\n solucao_dividida.append(float('inf'))\n else:\n solucao_dividida.append(tabela[linha][n_colunas - 1] / tabela[\n linha][menor_posicao])\n if solucao_dividida[1] > 0:\n menor_solucao = solucao_dividida[1]\n else:\n menor_solucao = float('inf')\n menor_solucao_posicao = 1\n for i in range(1, n_linhas):\n if solucao_dividida[i] > 0 and solucao_dividida[i] < menor_solucao:\n menor_solucao = solucao_dividida[i]\n menor_solucao_posicao = i\n pivo = tabela[menor_solucao_posicao][menor_posicao]\n for coluna in range(0, n_colunas):\n if pivo == 0:\n tabela[menor_solucao_posicao][coluna] = float('inf')\n else:\n tabela[menor_solucao_posicao][coluna] = tabela[\n menor_solucao_posicao][coluna] / pivo\n linhas[menor_solucao_posicao] = colunas[menor_posicao]\n for linha in range(0, n_linhas):\n if linha != menor_solucao_posicao:\n if tabela[menor_solucao_posicao][menor_posicao] == 0:\n razao = float('inf')\n else:\n razao = tabela[linha][menor_posicao] / tabela[\n menor_solucao_posicao][menor_posicao]\n for coluna in range(0, n_colunas):\n tabela[linha][coluna] = tabela[linha][coluna] - razao * tabela[\n menor_solucao_posicao][coluna]\n menor_posicao = -1\n menor_valor = 0\n for coluna in range(0, n_colunas):\n if tabela[0][coluna] < menor_valor:\n menor_valor = tabela[0][coluna]\n menor_posicao = coluna\nprint(tabela)\nfor i in range(0, n_linhas):\n print(linhas[i], '=', tabela[i][n_colunas - 1])\n",
"step-4": "tabela = [[1,-45,-20,0,0,0,0],[0,20,5,1,0,0,9500],[0,0.04,0.12,0,1,0,40],[0,1,1,0,0,1,551]]\r\ncolunas = [\"Z\",\"A\",\"B\",\"S1\",\"S2\",\"S3\",\"Solução\"]\r\nlinhas = [\"Z\",\"S1\",\"S2\",\"S3\"]\r\nn_colunas=7\r\nn_linhas=4\r\n\r\n#Inicio do algoritmo\r\n\r\n#Buscar o menor numero negativo na linha 0\r\nmenor_posicao=-1\r\nmenor_valor=0\r\nfor coluna in range(0,n_colunas):\r\n\tif(tabela[0][coluna]<menor_valor):\r\n\t\tmenor_valor=tabela[0][coluna]\r\n\t\tmenor_posicao=coluna\r\n#O menor numero negativo na linha 0 esta na coluna menor_posicao, caso nao haja um numero negativo a posicao é -1\r\nsolucao_dividida=[]\r\nwhile(menor_posicao!=-1): #O loop terminara quando nao houver numero negativo na linha Z\r\n\t#Vamos agora dividir a ultima coluna pelos elementos da coluna i em cada linha\r\n\tsolucao_dividida.clear()\r\n\tsolucao_dividida.append(\"Vazio\")\r\n\tfor linha in range (1,n_linhas):\r\n\t\tif(tabela[linha][menor_posicao]==0):\r\n\t\t\tsolucao_dividida.append(float(\"inf\"))\r\n\t\telse:\r\n\t\t\tsolucao_dividida.append(tabela[linha][n_colunas-1]/tabela[linha][menor_posicao])\r\n\t#Agora iremos procurar a linha com a menor solucao_dividida positiva\r\n\tif(solucao_dividida[1]>0):\r\n\t\tmenor_solucao=solucao_dividida[1]\r\n\telse:\r\n\t\tmenor_solucao=float(\"inf\")\r\n\tmenor_solucao_posicao=1\r\n\tfor i in range (1,n_linhas):\r\n\t\tif(solucao_dividida[i]>0 and solucao_dividida[i]<menor_solucao):\r\n\t\t\tmenor_solucao=solucao_dividida[i]\r\n\t\t\tmenor_solucao_posicao=i\r\n\t#Agora vamos pegar o elemento tabela[menor_solucao_posicao][menor_posicao] e dividir a linha menor_solucao_posicao por ele\r\n\tpivo=tabela[menor_solucao_posicao][menor_posicao]\r\n\tfor coluna in range(0,n_colunas):\r\n\t\tif(pivo==0):\r\n\t\t\ttabela[menor_solucao_posicao][coluna]=float(\"inf\")\r\n\t\telse:\r\n\t\t\ttabela[menor_solucao_posicao][coluna]=tabela[menor_solucao_posicao][coluna]/pivo\r\n\tlinhas[menor_solucao_posicao]=colunas[menor_posicao] #mudando o cabecalho da tabela\r\n\t#Agora vamos pegar a linha menor_solucao_posicao e somar nas demais de forma que a coluna menor_posicao seja zerada em todas as linhas\r\n\t#menos na linha menor_solucao_posicao\r\n\tfor linha in range (0,n_linhas):\r\n\t\tif(linha!=menor_solucao_posicao):\r\n\t\t\tif(tabela[menor_solucao_posicao][menor_posicao]==0):\r\n\t\t\t\trazao=float(\"inf\")\r\n\t\t\telse:\r\n\t\t\t\trazao=tabela[linha][menor_posicao]/tabela[menor_solucao_posicao][menor_posicao]\r\n\t\t\tfor coluna in range (0,n_colunas):\r\n\t\t\t\ttabela[linha][coluna]=tabela[linha][coluna]-(razao*tabela[menor_solucao_posicao][coluna])\r\n\t#Buscar o menor numero negativo na linha 0\r\n\tmenor_posicao=-1\r\n\tmenor_valor=0\r\n\tfor coluna in range(0,n_colunas):\r\n\t\tif(tabela[0][coluna]<menor_valor):\r\n\t\t\tmenor_valor=tabela[0][coluna]\r\n\t\t\tmenor_posicao=coluna\r\n\t#O menor numero negativo na linha 0 esta na coluna menor_posicao, caso nao haja um numero negativo a posicao é -1\r\n\t#Caso menor_posicao==-1 o while termina\r\nprint(tabela)\r\nfor i in range (0,n_linhas):\r\n\tprint(linhas[i],\"=\",tabela[i][n_colunas-1])",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def loadConfiguration(fileloc):
"""Loads configuration from file location"""
global config
with open(fileloc, 'r') as file_:
conf = load(file_)
if config is None:
config = conf
else:
config.update(conf)
def get(key):
"""Gets the configuration value for key """
return config[key]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def loadConfiguration(fileloc):
"""Loads configuration from file location"""
global config
with open(fileloc, 'r') as file_:
conf = load(file_)
if config is None:
config = conf
else:
config.update(conf)
def get(key):
"""Gets the configuration value for key """
return config[key]
loadConfiguration(default_file_loc)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
default_file_loc = 'config.json'
config = None
def loadConfiguration(fileloc):
"""Loads configuration from file location"""
global config
with open(fileloc, 'r') as file_:
conf = load(file_)
if config is None:
config = conf
else:
config.update(conf)
def get(key):
"""Gets the configuration value for key """
return config[key]
loadConfiguration(default_file_loc)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from json import load
default_file_loc = 'config.json'
config = None
def loadConfiguration(fileloc):
"""Loads configuration from file location"""
global config
with open(fileloc, 'r') as file_:
conf = load(file_)
if config is None:
config = conf
else:
config.update(conf)
def get(key):
"""Gets the configuration value for key """
return config[key]
loadConfiguration(default_file_loc)
<|reserved_special_token_1|>
'''
Module for handling configurable portions of tools
'''
from json import load
default_file_loc = 'config.json'
config = None
def loadConfiguration(fileloc):
'''Loads configuration from file location'''
global config
with open(fileloc, 'r') as file_:
conf = load(file_)
if config is None:
config = conf
else:
config.update(conf)
def get(key):
'''Gets the configuration value for key '''
return config[key]
loadConfiguration(default_file_loc)
|
flexible
|
{
"blob_id": "5261ae90a67e2df8dd1c679a8046ee3e0cbc6221",
"index": 3264,
"step-1": "<mask token>\n\n\ndef loadConfiguration(fileloc):\n \"\"\"Loads configuration from file location\"\"\"\n global config\n with open(fileloc, 'r') as file_:\n conf = load(file_)\n if config is None:\n config = conf\n else:\n config.update(conf)\n\n\ndef get(key):\n \"\"\"Gets the configuration value for key \"\"\"\n return config[key]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef loadConfiguration(fileloc):\n \"\"\"Loads configuration from file location\"\"\"\n global config\n with open(fileloc, 'r') as file_:\n conf = load(file_)\n if config is None:\n config = conf\n else:\n config.update(conf)\n\n\ndef get(key):\n \"\"\"Gets the configuration value for key \"\"\"\n return config[key]\n\n\nloadConfiguration(default_file_loc)\n",
"step-3": "<mask token>\ndefault_file_loc = 'config.json'\nconfig = None\n\n\ndef loadConfiguration(fileloc):\n \"\"\"Loads configuration from file location\"\"\"\n global config\n with open(fileloc, 'r') as file_:\n conf = load(file_)\n if config is None:\n config = conf\n else:\n config.update(conf)\n\n\ndef get(key):\n \"\"\"Gets the configuration value for key \"\"\"\n return config[key]\n\n\nloadConfiguration(default_file_loc)\n",
"step-4": "<mask token>\nfrom json import load\ndefault_file_loc = 'config.json'\nconfig = None\n\n\ndef loadConfiguration(fileloc):\n \"\"\"Loads configuration from file location\"\"\"\n global config\n with open(fileloc, 'r') as file_:\n conf = load(file_)\n if config is None:\n config = conf\n else:\n config.update(conf)\n\n\ndef get(key):\n \"\"\"Gets the configuration value for key \"\"\"\n return config[key]\n\n\nloadConfiguration(default_file_loc)\n",
"step-5": "'''\nModule for handling configurable portions of tools\n'''\n\nfrom json import load\n\ndefault_file_loc = 'config.json'\nconfig = None\n\ndef loadConfiguration(fileloc):\n '''Loads configuration from file location'''\n global config\n with open(fileloc, 'r') as file_:\n conf = load(file_)\n if config is None:\n config = conf\n else:\n config.update(conf)\n\ndef get(key):\n '''Gets the configuration value for key '''\n return config[key]\n\nloadConfiguration(default_file_loc)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from turtle import *
from shapes import *
#1-
#1.triangle
def eTriangle():
forward(100)
right(120)
forward(100)
right(120)
forward(100)
right(120)
mainloop()
#2.square
def square():
forward(100)
right(90)
forward(100)
right(90)
forward(100)
right(90)
forward(100)
mainloop()
#3.pentagon
def pentagon():
forward(100)
right(72)
forward(100)
right(72)
forward(100)
right(72)
forward(100)
right(72)
forward(100)
mainloop()
#4.hexagon
def hexagon():
forward(100)
right(60)
forward(100)
right(60)
forward(100)
right(60)
forward(100)
right(60)
forward(100)
right(60)
forward(100)
mainloop()
#5.octagon
def octagon():
forward(100)
right(45)
forward(100)
right(45)
forward(100)
right(45)
forward(100)
right(45)
forward(100)
right(45)
forward(100)
right(45)
forward(100)
right(45)
forward(100)
mainloop()
#6.star
def star():
forward(100)
right(144)
forward(100)
right(144)
forward(100)
right(144)
forward(100)
right(144)
forward(100)
mainloop()
#7.circle
def circle():
for i in range(370):
forward(2)
right(1)
mainloop()
#2-
from shapes import *
eTriangle()
square()
pentagon()
hexagon()
octagon()
star()
circle()
mainloop()
#3-
bgcolor("MidnightBlue")
starp(20, True, "yellow", "MidnightBlue")
right (20)
forward(100)
starp(20, True, "yellow", "MidnightBlue")
right (30)
forward(150)
starp(20, True, "yellow", "MidnightBlue")
right (40)
forward(200)
starp(20, True, "yellow", "MidnightBlue")
right (50)
forward(250)
starp(20, True, "yellow", "MidnightBlue")
right (60)
forward(300)
starp(20, True, "yellow", "MidnightBlue")
forward(100)
starp(20, True, "yellow", "MidnightBlue")
forward(100)
starp(20, True, "yellow", "MidnightBlue")
left (90)
forward(300)
starp(20, True, "yellow", "MidnightBlue")
right (50)
forward (300)
starp(20, True, "yellow", "MidnightBlue")
right(50)
forward(300)
starp(20, True, "yellow", "MidnightBlue")
right (50)
forward (275)
circlep(3, True, "SlateGrey", "MidnightBlue")
right(60)
forward(20)
mainloop()
#4-
bgcolor("skyblue")
right(90)
penup()
forward(100)
right(90)
forward(200)
fillcolor("Green")
begin_fill()
forward (300)
left(90)
forward (300)
left(90)
forward(1250)
left(90)
forward(300)
left(90)
forward(1000)
end_fill()
right (90)
pendown()
rectangle(200, 450, True, "Red")
left(180)
forward(200)
left(90)
penup()
forward(100)
right(90)
pendown
rectangle(50, 100, True, "Brown")
penup()
right(90)
forward(50)
right(90)
forward(50)
right (90)
forward(10)
circle(.1, True, "Black")
penup()
forward(40)
left(90)
forward(50)
pendown()
fillcolor("grey")
begin_fill()
left (20)
forward(400)
left (75)
forward(50)
left(105)
forward(400)
left(75)
forward(50)
end_fill()
right(5)
penup()
forward(200)
right(90)
forward(200)
right(90)
left(40)
pendown()
fillcolor("brown")
begin_fill()
forward(293.717)
right(80)
forward(293.717)
right(140)
forward(450)
end_fill()
penup()
left(90)
forward(75)
left(90)
forward(75)
pendown()
square(50, True, "blue", "Black")
right(90)
square(25, False, "blue", "black")
right(90)
forward(50)
right(90)
forward(25)
square(25, False, "blue", "black")
penup()
left(90)
forward(25)
right(90)
forward(200)
pendown()
square(50, True, "blue", "Black")
right(90)
square(25, False, "blue", "black")
right(90)
forward(50)
right(90)
forward(25)
square(25, False, "blue", "black")
penup()
left(90)
forward(250)
left (90)
forward(400)
circlep(3, True, "yellow", "yellow")
mainloop()
#5-
def door():
rectangle(50, 100, True, "Brown")
penup()
right(90)
forward(50)
right(90)
forward(50)
right (90)
forward(10)
circle(.1, True, "Black")
def grass():
fillcolor("Green")
begin_fill()
forward (300)
left(90)
forward (300)
left(90)
forward(1250)
left(90)
forward(300)
left(90)
forward(1000)
end_fill()
def house():
rectangle(200, 450, True, "Red")
def roof():
fillcolor("brown")
begin_fill()
forward(293.717)
right(80)
forward(293.717)
right(140)
forward(450)
end_fill()
def window():
square(50, True, "blue", "Black")
right(90)
square(25, False, "blue", "black")
right(90)
forward(50)
right(90)
forward(25)
square(25, False, "blue", "black")
def sun():
circlep(3, True, "yellow", "yellow")
def sidewalk():
fillcolor("grey")
begin_fill()
left (20)
forward(400)
left (75)
forward(50)
left(105)
forward(400)
left(75)
forward(50)
end_fill()
bgcolor("skyblue")
right(90)
penup()
forward(100)
right(90)
forward(200)
grass()
right (90)
pendown()
house()
left(180)
forward(200)
left(90)
penup()
forward(100)
right(90)
pendown
door()
penup()
forward(40)
left(90)
forward(50)
pendown()
sidewalk()
right(5)
penup()
forward(200)
right(90)
forward(200)
right(90)
left(40)
pendown()
roof()
penup()
left(90)
forward(75)
left(90)
forward(75)
pendown()
window()
penup()
left(90)
forward(25)
right(90)
forward(200)
pendown()
window()
penup()
left(90)
forward(250)
left (90)
forward(400)
sun()
mainloop()
#6-
import random
def craystar():
color('red', 'yellow')
begin_fill()
for i in range(36):
forward(200)
left(170)
end_fill()
def craytriangle():
color('black', 'blue')
begin_fill()
i = 60
while i > 0:
forward(i)
right(120)
i -= 5
end_fill()
def craysquare():
color("green", "Blue")
begin_fill()
for i in range(12):
for i in range(4):
forward(60)
right(90)
for i in range(12):
forward (random.randint(1,60))
right(90)
end_fill()
craysquare()
forward (50)
craysquare()
forward (50)
craysquare()
forward (50)
craystar()
forward(random.randint(1,100))
right(random.randint(1, 90))
craytriangle()
forward(random.randint(1,100))
right(random.randint(1, 90))
craystar()
forward(random.randint(1,100))
right(random.randint(1, 90))
craytriangle()
forward(random.randint(1,100))
right(random.randint(1, 90))
craystar()
forward(random.randint(1,100))
right(random.randint(1, 90))
craytriangle()
mainloop()
|
normal
|
{
"blob_id": "92d689e5caa2d8c65f86af0f8b49b009d162a783",
"index": 7379,
"step-1": "<mask token>\n\n\ndef square():\n forward(100)\n right(90)\n forward(100)\n right(90)\n forward(100)\n right(90)\n forward(100)\n mainloop()\n\n\ndef pentagon():\n forward(100)\n right(72)\n forward(100)\n right(72)\n forward(100)\n right(72)\n forward(100)\n right(72)\n forward(100)\n mainloop()\n\n\n<mask token>\n\n\ndef octagon():\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n mainloop()\n\n\n<mask token>\n\n\ndef house():\n rectangle(200, 450, True, 'Red')\n\n\n<mask token>\n\n\ndef sun():\n circlep(3, True, 'yellow', 'yellow')\n\n\ndef sidewalk():\n fillcolor('grey')\n begin_fill()\n left(20)\n forward(400)\n left(75)\n forward(50)\n left(105)\n forward(400)\n left(75)\n forward(50)\n end_fill()\n\n\n<mask token>\n\n\ndef craystar():\n color('red', 'yellow')\n begin_fill()\n for i in range(36):\n forward(200)\n left(170)\n end_fill()\n\n\ndef craytriangle():\n color('black', 'blue')\n begin_fill()\n i = 60\n while i > 0:\n forward(i)\n right(120)\n i -= 5\n end_fill()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef square():\n forward(100)\n right(90)\n forward(100)\n right(90)\n forward(100)\n right(90)\n forward(100)\n mainloop()\n\n\ndef pentagon():\n forward(100)\n right(72)\n forward(100)\n right(72)\n forward(100)\n right(72)\n forward(100)\n right(72)\n forward(100)\n mainloop()\n\n\n<mask token>\n\n\ndef octagon():\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n mainloop()\n\n\ndef star():\n forward(100)\n right(144)\n forward(100)\n right(144)\n forward(100)\n right(144)\n forward(100)\n right(144)\n forward(100)\n mainloop()\n\n\n<mask token>\n\n\ndef house():\n rectangle(200, 450, True, 'Red')\n\n\n<mask token>\n\n\ndef sun():\n circlep(3, True, 'yellow', 'yellow')\n\n\ndef sidewalk():\n fillcolor('grey')\n begin_fill()\n left(20)\n forward(400)\n left(75)\n forward(50)\n left(105)\n forward(400)\n left(75)\n forward(50)\n end_fill()\n\n\n<mask token>\n\n\ndef craystar():\n color('red', 'yellow')\n begin_fill()\n for i in range(36):\n forward(200)\n left(170)\n end_fill()\n\n\ndef craytriangle():\n color('black', 'blue')\n begin_fill()\n i = 60\n while i > 0:\n forward(i)\n right(120)\n i -= 5\n end_fill()\n\n\ndef craysquare():\n color('green', 'Blue')\n begin_fill()\n for i in range(12):\n for i in range(4):\n forward(60)\n right(90)\n for i in range(12):\n forward(random.randint(1, 60))\n right(90)\n end_fill()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef square():\n forward(100)\n right(90)\n forward(100)\n right(90)\n forward(100)\n right(90)\n forward(100)\n mainloop()\n\n\ndef pentagon():\n forward(100)\n right(72)\n forward(100)\n right(72)\n forward(100)\n right(72)\n forward(100)\n right(72)\n forward(100)\n mainloop()\n\n\n<mask token>\n\n\ndef octagon():\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n mainloop()\n\n\ndef star():\n forward(100)\n right(144)\n forward(100)\n right(144)\n forward(100)\n right(144)\n forward(100)\n right(144)\n forward(100)\n mainloop()\n\n\ndef circle():\n for i in range(370):\n forward(2)\n right(1)\n mainloop()\n\n\n<mask token>\n\n\ndef house():\n rectangle(200, 450, True, 'Red')\n\n\ndef roof():\n fillcolor('brown')\n begin_fill()\n forward(293.717)\n right(80)\n forward(293.717)\n right(140)\n forward(450)\n end_fill()\n\n\n<mask token>\n\n\ndef sun():\n circlep(3, True, 'yellow', 'yellow')\n\n\ndef sidewalk():\n fillcolor('grey')\n begin_fill()\n left(20)\n forward(400)\n left(75)\n forward(50)\n left(105)\n forward(400)\n left(75)\n forward(50)\n end_fill()\n\n\n<mask token>\n\n\ndef craystar():\n color('red', 'yellow')\n begin_fill()\n for i in range(36):\n forward(200)\n left(170)\n end_fill()\n\n\ndef craytriangle():\n color('black', 'blue')\n begin_fill()\n i = 60\n while i > 0:\n forward(i)\n right(120)\n i -= 5\n end_fill()\n\n\ndef craysquare():\n color('green', 'Blue')\n begin_fill()\n for i in range(12):\n for i in range(4):\n forward(60)\n right(90)\n for i in range(12):\n forward(random.randint(1, 60))\n right(90)\n end_fill()\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef eTriangle():\n forward(100)\n right(120)\n forward(100)\n right(120)\n forward(100)\n right(120)\n mainloop()\n\n\ndef square():\n forward(100)\n right(90)\n forward(100)\n right(90)\n forward(100)\n right(90)\n forward(100)\n mainloop()\n\n\ndef pentagon():\n forward(100)\n right(72)\n forward(100)\n right(72)\n forward(100)\n right(72)\n forward(100)\n right(72)\n forward(100)\n mainloop()\n\n\n<mask token>\n\n\ndef octagon():\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n mainloop()\n\n\ndef star():\n forward(100)\n right(144)\n forward(100)\n right(144)\n forward(100)\n right(144)\n forward(100)\n right(144)\n forward(100)\n mainloop()\n\n\ndef circle():\n for i in range(370):\n forward(2)\n right(1)\n mainloop()\n\n\n<mask token>\n\n\ndef door():\n rectangle(50, 100, True, 'Brown')\n penup()\n right(90)\n forward(50)\n right(90)\n forward(50)\n right(90)\n forward(10)\n circle(0.1, True, 'Black')\n\n\n<mask token>\n\n\ndef house():\n rectangle(200, 450, True, 'Red')\n\n\ndef roof():\n fillcolor('brown')\n begin_fill()\n forward(293.717)\n right(80)\n forward(293.717)\n right(140)\n forward(450)\n end_fill()\n\n\n<mask token>\n\n\ndef sun():\n circlep(3, True, 'yellow', 'yellow')\n\n\ndef sidewalk():\n fillcolor('grey')\n begin_fill()\n left(20)\n forward(400)\n left(75)\n forward(50)\n left(105)\n forward(400)\n left(75)\n forward(50)\n end_fill()\n\n\n<mask token>\n\n\ndef craystar():\n color('red', 'yellow')\n begin_fill()\n for i in range(36):\n forward(200)\n left(170)\n end_fill()\n\n\ndef craytriangle():\n color('black', 'blue')\n begin_fill()\n i = 60\n while i > 0:\n forward(i)\n right(120)\n i -= 5\n end_fill()\n\n\ndef craysquare():\n color('green', 'Blue')\n begin_fill()\n for i in range(12):\n for i in range(4):\n forward(60)\n right(90)\n for i in range(12):\n forward(random.randint(1, 60))\n right(90)\n end_fill()\n\n\n<mask token>\n",
"step-5": "from turtle import *\nfrom shapes import *\n#1-\n #1.triangle\ndef eTriangle():\n forward(100)\n right(120)\n forward(100)\n right(120)\n forward(100)\n right(120)\n mainloop()\n #2.square\ndef square():\n forward(100)\n right(90)\n forward(100)\n right(90)\n forward(100)\n right(90)\n forward(100)\n mainloop()\n #3.pentagon\ndef pentagon():\n forward(100)\n right(72)\n forward(100)\n right(72)\n forward(100)\n right(72)\n forward(100)\n right(72)\n forward(100)\n mainloop()\n #4.hexagon\ndef hexagon():\n forward(100)\n right(60)\n forward(100)\n right(60)\n forward(100)\n right(60)\n forward(100)\n right(60)\n forward(100)\n right(60)\n forward(100)\n mainloop()\n #5.octagon\ndef octagon():\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n right(45)\n forward(100)\n mainloop()\n #6.star\ndef star():\n forward(100)\n right(144)\n forward(100)\n right(144)\n forward(100)\n right(144)\n forward(100)\n right(144)\n forward(100)\n mainloop()\n #7.circle\ndef circle():\n for i in range(370):\n forward(2)\n right(1)\n mainloop()\n\n#2- \nfrom shapes import *\neTriangle()\nsquare()\npentagon()\nhexagon()\noctagon()\nstar()\ncircle()\nmainloop()\n\n#3- \nbgcolor(\"MidnightBlue\")\nstarp(20, True, \"yellow\", \"MidnightBlue\")\nright (20)\nforward(100)\n\nstarp(20, True, \"yellow\", \"MidnightBlue\")\nright (30)\nforward(150)\n\nstarp(20, True, \"yellow\", \"MidnightBlue\")\nright (40)\nforward(200)\n\nstarp(20, True, \"yellow\", \"MidnightBlue\")\nright (50)\nforward(250)\n\nstarp(20, True, \"yellow\", \"MidnightBlue\")\nright (60)\nforward(300)\n\nstarp(20, True, \"yellow\", \"MidnightBlue\")\n\nforward(100)\n\nstarp(20, True, \"yellow\", \"MidnightBlue\")\n\nforward(100)\n\nstarp(20, True, \"yellow\", \"MidnightBlue\")\n\nleft (90)\nforward(300)\n\nstarp(20, True, \"yellow\", \"MidnightBlue\")\nright (50)\nforward (300)\n\nstarp(20, True, \"yellow\", \"MidnightBlue\")\nright(50)\nforward(300)\n\nstarp(20, True, \"yellow\", \"MidnightBlue\")\n\nright (50)\n\nforward (275)\n\ncirclep(3, True, \"SlateGrey\", \"MidnightBlue\")\nright(60)\nforward(20)\n\n\n\nmainloop()\n\n\n#4- \nbgcolor(\"skyblue\")\nright(90)\npenup()\nforward(100)\nright(90)\nforward(200)\n\nfillcolor(\"Green\")\nbegin_fill()\nforward (300)\nleft(90)\nforward (300)\nleft(90)\nforward(1250)\nleft(90)\nforward(300)\nleft(90)\nforward(1000)\nend_fill()\n\nright (90)\n\npendown()\nrectangle(200, 450, True, \"Red\")\nleft(180)\nforward(200)\nleft(90)\npenup()\nforward(100)\nright(90)\npendown\nrectangle(50, 100, True, \"Brown\")\n\npenup()\nright(90)\nforward(50)\nright(90)\nforward(50)\nright (90)\nforward(10)\n\ncircle(.1, True, \"Black\")\n\npenup()\nforward(40)\nleft(90)\nforward(50)\n\npendown()\nfillcolor(\"grey\")\nbegin_fill()\nleft (20)\nforward(400)\nleft (75)\nforward(50)\nleft(105)\nforward(400)\nleft(75)\nforward(50)\nend_fill()\n\nright(5)\npenup()\nforward(200)\nright(90)\nforward(200)\nright(90)\nleft(40)\npendown()\nfillcolor(\"brown\")\nbegin_fill()\nforward(293.717)\nright(80)\nforward(293.717)\nright(140)\nforward(450)\nend_fill()\n\npenup()\nleft(90)\nforward(75)\nleft(90)\nforward(75)\npendown()\nsquare(50, True, \"blue\", \"Black\")\nright(90)\nsquare(25, False, \"blue\", \"black\")\nright(90)\nforward(50)\nright(90)\nforward(25)\nsquare(25, False, \"blue\", \"black\")\n\npenup()\nleft(90)\nforward(25)\nright(90)\nforward(200)\npendown()\nsquare(50, True, \"blue\", \"Black\")\nright(90)\nsquare(25, False, \"blue\", \"black\")\nright(90)\nforward(50)\nright(90)\nforward(25)\nsquare(25, False, \"blue\", \"black\")\n\npenup()\nleft(90)\nforward(250)\nleft (90)\nforward(400)\n\ncirclep(3, True, \"yellow\", \"yellow\")\nmainloop()\n\n#5- \n\ndef door():\n rectangle(50, 100, True, \"Brown\")\n penup()\n right(90)\n forward(50)\n right(90)\n forward(50)\n right (90)\n forward(10)\n\n circle(.1, True, \"Black\")\n\n\ndef grass():\n fillcolor(\"Green\")\n begin_fill()\n forward (300)\n left(90)\n forward (300)\n left(90)\n forward(1250)\n left(90)\n forward(300)\n left(90)\n forward(1000)\n end_fill()\n\ndef house():\n rectangle(200, 450, True, \"Red\")\n \n\ndef roof():\n fillcolor(\"brown\")\n begin_fill()\n forward(293.717)\n right(80)\n forward(293.717)\n right(140)\n forward(450)\n end_fill()\n\ndef window():\n square(50, True, \"blue\", \"Black\")\n right(90)\n square(25, False, \"blue\", \"black\")\n right(90)\n forward(50)\n right(90)\n forward(25)\n square(25, False, \"blue\", \"black\")\n\ndef sun():\n circlep(3, True, \"yellow\", \"yellow\")\n\ndef sidewalk():\n fillcolor(\"grey\")\n begin_fill()\n left (20)\n forward(400)\n left (75)\n forward(50)\n left(105)\n forward(400)\n left(75)\n forward(50)\n end_fill()\n\n\n\n\n\n\n\n\nbgcolor(\"skyblue\")\nright(90)\npenup()\nforward(100)\nright(90)\nforward(200)\n\ngrass()\n\nright (90)\npendown()\n\nhouse()\n\nleft(180)\nforward(200)\nleft(90)\npenup()\nforward(100)\nright(90)\npendown\n\ndoor()\n\npenup()\nforward(40)\nleft(90)\nforward(50)\npendown()\n\nsidewalk()\n\nright(5)\npenup()\nforward(200)\nright(90)\nforward(200)\nright(90)\nleft(40)\npendown()\n\nroof()\n\npenup()\nleft(90)\nforward(75)\nleft(90)\nforward(75)\n\npendown()\n\nwindow()\n\npenup()\nleft(90)\nforward(25)\nright(90)\nforward(200)\npendown()\n\nwindow()\n\npenup()\nleft(90)\nforward(250)\nleft (90)\nforward(400)\n\nsun()\nmainloop()\n\n#6- \nimport random\n\ndef craystar():\n color('red', 'yellow')\n begin_fill()\n for i in range(36):\n forward(200)\n left(170)\n end_fill() \n\ndef craytriangle():\n color('black', 'blue')\n begin_fill()\n i = 60\n \n while i > 0:\n forward(i)\n right(120)\n i -= 5\n end_fill()\n\ndef craysquare():\n color(\"green\", \"Blue\")\n begin_fill()\n for i in range(12):\n for i in range(4):\n forward(60)\n right(90)\n for i in range(12):\n forward (random.randint(1,60))\n right(90)\n end_fill()\n\n\ncraysquare()\nforward (50)\ncraysquare()\nforward (50)\ncraysquare()\nforward (50)\ncraystar()\nforward(random.randint(1,100))\nright(random.randint(1, 90))\ncraytriangle()\nforward(random.randint(1,100))\nright(random.randint(1, 90))\ncraystar()\nforward(random.randint(1,100))\nright(random.randint(1, 90))\ncraytriangle()\nforward(random.randint(1,100))\nright(random.randint(1, 90))\ncraystar()\nforward(random.randint(1,100))\nright(random.randint(1, 90))\ncraytriangle()\nmainloop()",
"step-ids": [
8,
10,
12,
14,
20
]
}
|
[
8,
10,
12,
14,
20
] |
#!/usr/bin/env python
import vtk
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Create the RenderWindow, Renderer and both Actors
#
ren1 = vtk.vtkRenderer()
ren2 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.SetMultiSamples(0)
renWin.AddRenderer(ren1)
renWin.AddRenderer(ren2)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# create pipeline
#
pl3d = vtk.vtkMultiBlockPLOT3DReader()
pl3d.SetXYZFileName("" + str(VTK_DATA_ROOT) + "/Data/combxyz.bin")
pl3d.SetQFileName("" + str(VTK_DATA_ROOT) + "/Data/combq.bin")
pl3d.SetScalarFunctionNumber(110)
pl3d.SetVectorFunctionNumber(202)
pl3d.Update()
output = pl3d.GetOutput().GetBlock(0)
probeLine = vtk.vtkLineSource()
probeLine.SetPoint1(1,1,29)
probeLine.SetPoint2(16.5,5,31.7693)
probeLine.SetResolution(500)
probe = vtk.vtkProbeFilter()
probe.SetInputConnection(probeLine.GetOutputPort())
probe.SetSourceData(output)
probe.Update()
probeTube = vtk.vtkTubeFilter()
probeTube.SetInputData(probe.GetPolyDataOutput())
probeTube.SetNumberOfSides(5)
probeTube.SetRadius(.05)
probeMapper = vtk.vtkPolyDataMapper()
probeMapper.SetInputConnection(probeTube.GetOutputPort())
probeMapper.SetScalarRange(output.GetScalarRange())
probeActor = vtk.vtkActor()
probeActor.SetMapper(probeMapper)
displayLine = vtk.vtkLineSource()
displayLine.SetPoint1(0,0,0)
displayLine.SetPoint2(1,0,0)
displayLine.SetResolution(probeLine.GetResolution())
displayMerge = vtk.vtkMergeFilter()
displayMerge.SetGeometryConnection(displayLine.GetOutputPort())
displayMerge.SetScalarsData(probe.GetPolyDataOutput())
displayMerge.Update()
displayWarp = vtk.vtkWarpScalar()
displayWarp.SetInputData(displayMerge.GetPolyDataOutput())
displayWarp.SetNormal(0,1,0)
displayWarp.SetScaleFactor(.000001)
displayWarp.Update()
displayMapper = vtk.vtkPolyDataMapper()
displayMapper.SetInputData(displayWarp.GetPolyDataOutput())
displayMapper.SetScalarRange(output.GetScalarRange())
displayActor = vtk.vtkActor()
displayActor.SetMapper(displayMapper)
outline = vtk.vtkStructuredGridOutlineFilter()
outline.SetInputData(output)
outlineMapper = vtk.vtkPolyDataMapper()
outlineMapper.SetInputConnection(outline.GetOutputPort())
outlineActor = vtk.vtkActor()
outlineActor.SetMapper(outlineMapper)
outlineActor.GetProperty().SetColor(0,0,0)
ren1.AddActor(outlineActor)
ren1.AddActor(probeActor)
ren1.SetBackground(1,1,1)
ren1.SetViewport(0,.25,1,1)
ren2.AddActor(displayActor)
ren2.SetBackground(0,0,0)
ren2.SetViewport(0,0,1,.25)
renWin.SetSize(300,300)
ren1.ResetCamera()
cam1 = ren1.GetActiveCamera()
cam1.SetClippingRange(3.95297,50)
cam1.SetFocalPoint(8.88908,0.595038,29.3342)
cam1.SetPosition(9.9,-26,41)
cam1.SetViewUp(0.060772,-0.319905,0.945498)
ren2.ResetCamera()
cam2 = ren2.GetActiveCamera()
cam2.ParallelProjectionOn()
cam2.SetParallelScale(.15)
iren.Initialize()
# render the image
#
# prevent the tk window from showing up then start the event loop
# --- end of script --
|
normal
|
{
"blob_id": "7399612f64eb8e500bc676e6d507be5fe375f40f",
"index": 3746,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nrenWin.SetMultiSamples(0)\nrenWin.AddRenderer(ren1)\nrenWin.AddRenderer(ren2)\n<mask token>\niren.SetRenderWindow(renWin)\n<mask token>\npl3d.SetXYZFileName('' + str(VTK_DATA_ROOT) + '/Data/combxyz.bin')\npl3d.SetQFileName('' + str(VTK_DATA_ROOT) + '/Data/combq.bin')\npl3d.SetScalarFunctionNumber(110)\npl3d.SetVectorFunctionNumber(202)\npl3d.Update()\n<mask token>\nprobeLine.SetPoint1(1, 1, 29)\nprobeLine.SetPoint2(16.5, 5, 31.7693)\nprobeLine.SetResolution(500)\n<mask token>\nprobe.SetInputConnection(probeLine.GetOutputPort())\nprobe.SetSourceData(output)\nprobe.Update()\n<mask token>\nprobeTube.SetInputData(probe.GetPolyDataOutput())\nprobeTube.SetNumberOfSides(5)\nprobeTube.SetRadius(0.05)\n<mask token>\nprobeMapper.SetInputConnection(probeTube.GetOutputPort())\nprobeMapper.SetScalarRange(output.GetScalarRange())\n<mask token>\nprobeActor.SetMapper(probeMapper)\n<mask token>\ndisplayLine.SetPoint1(0, 0, 0)\ndisplayLine.SetPoint2(1, 0, 0)\ndisplayLine.SetResolution(probeLine.GetResolution())\n<mask token>\ndisplayMerge.SetGeometryConnection(displayLine.GetOutputPort())\ndisplayMerge.SetScalarsData(probe.GetPolyDataOutput())\ndisplayMerge.Update()\n<mask token>\ndisplayWarp.SetInputData(displayMerge.GetPolyDataOutput())\ndisplayWarp.SetNormal(0, 1, 0)\ndisplayWarp.SetScaleFactor(1e-06)\ndisplayWarp.Update()\n<mask token>\ndisplayMapper.SetInputData(displayWarp.GetPolyDataOutput())\ndisplayMapper.SetScalarRange(output.GetScalarRange())\n<mask token>\ndisplayActor.SetMapper(displayMapper)\n<mask token>\noutline.SetInputData(output)\n<mask token>\noutlineMapper.SetInputConnection(outline.GetOutputPort())\n<mask token>\noutlineActor.SetMapper(outlineMapper)\noutlineActor.GetProperty().SetColor(0, 0, 0)\nren1.AddActor(outlineActor)\nren1.AddActor(probeActor)\nren1.SetBackground(1, 1, 1)\nren1.SetViewport(0, 0.25, 1, 1)\nren2.AddActor(displayActor)\nren2.SetBackground(0, 0, 0)\nren2.SetViewport(0, 0, 1, 0.25)\nrenWin.SetSize(300, 300)\nren1.ResetCamera()\n<mask token>\ncam1.SetClippingRange(3.95297, 50)\ncam1.SetFocalPoint(8.88908, 0.595038, 29.3342)\ncam1.SetPosition(9.9, -26, 41)\ncam1.SetViewUp(0.060772, -0.319905, 0.945498)\nren2.ResetCamera()\n<mask token>\ncam2.ParallelProjectionOn()\ncam2.SetParallelScale(0.15)\niren.Initialize()\n",
"step-3": "<mask token>\nVTK_DATA_ROOT = vtkGetDataRoot()\nren1 = vtk.vtkRenderer()\nren2 = vtk.vtkRenderer()\nrenWin = vtk.vtkRenderWindow()\nrenWin.SetMultiSamples(0)\nrenWin.AddRenderer(ren1)\nrenWin.AddRenderer(ren2)\niren = vtk.vtkRenderWindowInteractor()\niren.SetRenderWindow(renWin)\npl3d = vtk.vtkMultiBlockPLOT3DReader()\npl3d.SetXYZFileName('' + str(VTK_DATA_ROOT) + '/Data/combxyz.bin')\npl3d.SetQFileName('' + str(VTK_DATA_ROOT) + '/Data/combq.bin')\npl3d.SetScalarFunctionNumber(110)\npl3d.SetVectorFunctionNumber(202)\npl3d.Update()\noutput = pl3d.GetOutput().GetBlock(0)\nprobeLine = vtk.vtkLineSource()\nprobeLine.SetPoint1(1, 1, 29)\nprobeLine.SetPoint2(16.5, 5, 31.7693)\nprobeLine.SetResolution(500)\nprobe = vtk.vtkProbeFilter()\nprobe.SetInputConnection(probeLine.GetOutputPort())\nprobe.SetSourceData(output)\nprobe.Update()\nprobeTube = vtk.vtkTubeFilter()\nprobeTube.SetInputData(probe.GetPolyDataOutput())\nprobeTube.SetNumberOfSides(5)\nprobeTube.SetRadius(0.05)\nprobeMapper = vtk.vtkPolyDataMapper()\nprobeMapper.SetInputConnection(probeTube.GetOutputPort())\nprobeMapper.SetScalarRange(output.GetScalarRange())\nprobeActor = vtk.vtkActor()\nprobeActor.SetMapper(probeMapper)\ndisplayLine = vtk.vtkLineSource()\ndisplayLine.SetPoint1(0, 0, 0)\ndisplayLine.SetPoint2(1, 0, 0)\ndisplayLine.SetResolution(probeLine.GetResolution())\ndisplayMerge = vtk.vtkMergeFilter()\ndisplayMerge.SetGeometryConnection(displayLine.GetOutputPort())\ndisplayMerge.SetScalarsData(probe.GetPolyDataOutput())\ndisplayMerge.Update()\ndisplayWarp = vtk.vtkWarpScalar()\ndisplayWarp.SetInputData(displayMerge.GetPolyDataOutput())\ndisplayWarp.SetNormal(0, 1, 0)\ndisplayWarp.SetScaleFactor(1e-06)\ndisplayWarp.Update()\ndisplayMapper = vtk.vtkPolyDataMapper()\ndisplayMapper.SetInputData(displayWarp.GetPolyDataOutput())\ndisplayMapper.SetScalarRange(output.GetScalarRange())\ndisplayActor = vtk.vtkActor()\ndisplayActor.SetMapper(displayMapper)\noutline = vtk.vtkStructuredGridOutlineFilter()\noutline.SetInputData(output)\noutlineMapper = vtk.vtkPolyDataMapper()\noutlineMapper.SetInputConnection(outline.GetOutputPort())\noutlineActor = vtk.vtkActor()\noutlineActor.SetMapper(outlineMapper)\noutlineActor.GetProperty().SetColor(0, 0, 0)\nren1.AddActor(outlineActor)\nren1.AddActor(probeActor)\nren1.SetBackground(1, 1, 1)\nren1.SetViewport(0, 0.25, 1, 1)\nren2.AddActor(displayActor)\nren2.SetBackground(0, 0, 0)\nren2.SetViewport(0, 0, 1, 0.25)\nrenWin.SetSize(300, 300)\nren1.ResetCamera()\ncam1 = ren1.GetActiveCamera()\ncam1.SetClippingRange(3.95297, 50)\ncam1.SetFocalPoint(8.88908, 0.595038, 29.3342)\ncam1.SetPosition(9.9, -26, 41)\ncam1.SetViewUp(0.060772, -0.319905, 0.945498)\nren2.ResetCamera()\ncam2 = ren2.GetActiveCamera()\ncam2.ParallelProjectionOn()\ncam2.SetParallelScale(0.15)\niren.Initialize()\n",
"step-4": "import vtk\nfrom vtk.util.misc import vtkGetDataRoot\nVTK_DATA_ROOT = vtkGetDataRoot()\nren1 = vtk.vtkRenderer()\nren2 = vtk.vtkRenderer()\nrenWin = vtk.vtkRenderWindow()\nrenWin.SetMultiSamples(0)\nrenWin.AddRenderer(ren1)\nrenWin.AddRenderer(ren2)\niren = vtk.vtkRenderWindowInteractor()\niren.SetRenderWindow(renWin)\npl3d = vtk.vtkMultiBlockPLOT3DReader()\npl3d.SetXYZFileName('' + str(VTK_DATA_ROOT) + '/Data/combxyz.bin')\npl3d.SetQFileName('' + str(VTK_DATA_ROOT) + '/Data/combq.bin')\npl3d.SetScalarFunctionNumber(110)\npl3d.SetVectorFunctionNumber(202)\npl3d.Update()\noutput = pl3d.GetOutput().GetBlock(0)\nprobeLine = vtk.vtkLineSource()\nprobeLine.SetPoint1(1, 1, 29)\nprobeLine.SetPoint2(16.5, 5, 31.7693)\nprobeLine.SetResolution(500)\nprobe = vtk.vtkProbeFilter()\nprobe.SetInputConnection(probeLine.GetOutputPort())\nprobe.SetSourceData(output)\nprobe.Update()\nprobeTube = vtk.vtkTubeFilter()\nprobeTube.SetInputData(probe.GetPolyDataOutput())\nprobeTube.SetNumberOfSides(5)\nprobeTube.SetRadius(0.05)\nprobeMapper = vtk.vtkPolyDataMapper()\nprobeMapper.SetInputConnection(probeTube.GetOutputPort())\nprobeMapper.SetScalarRange(output.GetScalarRange())\nprobeActor = vtk.vtkActor()\nprobeActor.SetMapper(probeMapper)\ndisplayLine = vtk.vtkLineSource()\ndisplayLine.SetPoint1(0, 0, 0)\ndisplayLine.SetPoint2(1, 0, 0)\ndisplayLine.SetResolution(probeLine.GetResolution())\ndisplayMerge = vtk.vtkMergeFilter()\ndisplayMerge.SetGeometryConnection(displayLine.GetOutputPort())\ndisplayMerge.SetScalarsData(probe.GetPolyDataOutput())\ndisplayMerge.Update()\ndisplayWarp = vtk.vtkWarpScalar()\ndisplayWarp.SetInputData(displayMerge.GetPolyDataOutput())\ndisplayWarp.SetNormal(0, 1, 0)\ndisplayWarp.SetScaleFactor(1e-06)\ndisplayWarp.Update()\ndisplayMapper = vtk.vtkPolyDataMapper()\ndisplayMapper.SetInputData(displayWarp.GetPolyDataOutput())\ndisplayMapper.SetScalarRange(output.GetScalarRange())\ndisplayActor = vtk.vtkActor()\ndisplayActor.SetMapper(displayMapper)\noutline = vtk.vtkStructuredGridOutlineFilter()\noutline.SetInputData(output)\noutlineMapper = vtk.vtkPolyDataMapper()\noutlineMapper.SetInputConnection(outline.GetOutputPort())\noutlineActor = vtk.vtkActor()\noutlineActor.SetMapper(outlineMapper)\noutlineActor.GetProperty().SetColor(0, 0, 0)\nren1.AddActor(outlineActor)\nren1.AddActor(probeActor)\nren1.SetBackground(1, 1, 1)\nren1.SetViewport(0, 0.25, 1, 1)\nren2.AddActor(displayActor)\nren2.SetBackground(0, 0, 0)\nren2.SetViewport(0, 0, 1, 0.25)\nrenWin.SetSize(300, 300)\nren1.ResetCamera()\ncam1 = ren1.GetActiveCamera()\ncam1.SetClippingRange(3.95297, 50)\ncam1.SetFocalPoint(8.88908, 0.595038, 29.3342)\ncam1.SetPosition(9.9, -26, 41)\ncam1.SetViewUp(0.060772, -0.319905, 0.945498)\nren2.ResetCamera()\ncam2 = ren2.GetActiveCamera()\ncam2.ParallelProjectionOn()\ncam2.SetParallelScale(0.15)\niren.Initialize()\n",
"step-5": "#!/usr/bin/env python\nimport vtk\nfrom vtk.util.misc import vtkGetDataRoot\nVTK_DATA_ROOT = vtkGetDataRoot()\n\n# Create the RenderWindow, Renderer and both Actors\n#\nren1 = vtk.vtkRenderer()\nren2 = vtk.vtkRenderer()\nrenWin = vtk.vtkRenderWindow()\nrenWin.SetMultiSamples(0)\nrenWin.AddRenderer(ren1)\nrenWin.AddRenderer(ren2)\niren = vtk.vtkRenderWindowInteractor()\niren.SetRenderWindow(renWin)\n# create pipeline\n#\npl3d = vtk.vtkMultiBlockPLOT3DReader()\npl3d.SetXYZFileName(\"\" + str(VTK_DATA_ROOT) + \"/Data/combxyz.bin\")\npl3d.SetQFileName(\"\" + str(VTK_DATA_ROOT) + \"/Data/combq.bin\")\npl3d.SetScalarFunctionNumber(110)\npl3d.SetVectorFunctionNumber(202)\npl3d.Update()\noutput = pl3d.GetOutput().GetBlock(0)\nprobeLine = vtk.vtkLineSource()\nprobeLine.SetPoint1(1,1,29)\nprobeLine.SetPoint2(16.5,5,31.7693)\nprobeLine.SetResolution(500)\nprobe = vtk.vtkProbeFilter()\nprobe.SetInputConnection(probeLine.GetOutputPort())\nprobe.SetSourceData(output)\nprobe.Update()\nprobeTube = vtk.vtkTubeFilter()\nprobeTube.SetInputData(probe.GetPolyDataOutput())\nprobeTube.SetNumberOfSides(5)\nprobeTube.SetRadius(.05)\nprobeMapper = vtk.vtkPolyDataMapper()\nprobeMapper.SetInputConnection(probeTube.GetOutputPort())\nprobeMapper.SetScalarRange(output.GetScalarRange())\nprobeActor = vtk.vtkActor()\nprobeActor.SetMapper(probeMapper)\ndisplayLine = vtk.vtkLineSource()\ndisplayLine.SetPoint1(0,0,0)\ndisplayLine.SetPoint2(1,0,0)\ndisplayLine.SetResolution(probeLine.GetResolution())\ndisplayMerge = vtk.vtkMergeFilter()\ndisplayMerge.SetGeometryConnection(displayLine.GetOutputPort())\ndisplayMerge.SetScalarsData(probe.GetPolyDataOutput())\ndisplayMerge.Update()\ndisplayWarp = vtk.vtkWarpScalar()\ndisplayWarp.SetInputData(displayMerge.GetPolyDataOutput())\ndisplayWarp.SetNormal(0,1,0)\ndisplayWarp.SetScaleFactor(.000001)\ndisplayWarp.Update()\ndisplayMapper = vtk.vtkPolyDataMapper()\ndisplayMapper.SetInputData(displayWarp.GetPolyDataOutput())\ndisplayMapper.SetScalarRange(output.GetScalarRange())\ndisplayActor = vtk.vtkActor()\ndisplayActor.SetMapper(displayMapper)\noutline = vtk.vtkStructuredGridOutlineFilter()\noutline.SetInputData(output)\noutlineMapper = vtk.vtkPolyDataMapper()\noutlineMapper.SetInputConnection(outline.GetOutputPort())\noutlineActor = vtk.vtkActor()\noutlineActor.SetMapper(outlineMapper)\noutlineActor.GetProperty().SetColor(0,0,0)\nren1.AddActor(outlineActor)\nren1.AddActor(probeActor)\nren1.SetBackground(1,1,1)\nren1.SetViewport(0,.25,1,1)\nren2.AddActor(displayActor)\nren2.SetBackground(0,0,0)\nren2.SetViewport(0,0,1,.25)\nrenWin.SetSize(300,300)\nren1.ResetCamera()\ncam1 = ren1.GetActiveCamera()\ncam1.SetClippingRange(3.95297,50)\ncam1.SetFocalPoint(8.88908,0.595038,29.3342)\ncam1.SetPosition(9.9,-26,41)\ncam1.SetViewUp(0.060772,-0.319905,0.945498)\nren2.ResetCamera()\ncam2 = ren2.GetActiveCamera()\ncam2.ParallelProjectionOn()\ncam2.SetParallelScale(.15)\niren.Initialize()\n# render the image\n#\n# prevent the tk window from showing up then start the event loop\n# --- end of script --\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Ui_Login(QtWidgets.QDialog):
def __init__(self):
super(Ui_Login, self).__init__()
uic.loadUi('login.ui', self)
self.icon = self.findChild(QtWidgets.QLabel, 'ilogin')
self.icon.setStyleSheet('image: url(sorce/roundicon.png)')
self.inputUsername = self.findChild(QtWidgets.QLineEdit, 'username')
self.inputPassword = self.findChild(QtWidgets.QLineEdit, 'password')
self.daftarButton = self.findChild(QtWidgets.QPushButton, 'daftarBtn')
self.daftarButton.clicked.connect(self.forDaftar)
self.loginButton = self.findChild(QtWidgets.QPushButton, 'login_2')
self.loginButton.clicked.connect(self.testButton)
self.show()
def testButton(self):
user = self.inputUsername.text()
pw = self.inputPassword.text()
con = pymysql.connect(db='bookingfutsal', user='root', passwd='',
host='localhost', port=3306, autocommit=True)
cur = con.cursor()
sql = 'SELECT * FROM admin WHERE username=%s AND password=%s'
data = cur.execute(sql, (user, pw))
if len(cur.fetchall()) > 0:
self.close()
super(Ui_Login, self).__init__()
uic.loadUi('booking.ui', self)
self.gambar = self.findChild(QtWidgets.QLabel, 'piclap')
self.gambar.setStyleSheet('background-image: url(sorce/lp2.jpg)')
self.bNamaPembayar = self.findChild(QtWidgets.QLineEdit,
'namapembayar')
self.bNominalDp = self.findChild(QtWidgets.QLineEdit, 'nominaldp')
self.bBooking = self.findChild(QtWidgets.QPushButton, 'booking')
self.bBooking.clicked.connect(self.bookingFunc)
self.show()
<|reserved_special_token_0|>
def daftarFunc(self):
user = self.dUsername.text()
pw = self.dPassword.text()
con = pymysql.connect(db='bookingfutsal', user='root', passwd='',
host='localhost', port=3306, autocommit=True)
cur = con.cursor()
insert = user, pw
sql = 'INSERT INTO admin (username, password) VALUES' + str(insert)
data = cur.execute(sql)
self.close()
self.__init__()
def bookingFunc(self):
nama = self.bNamaPembayar.text()
nominal = self.bNominalDp.text()
con = pymysql.connect(db='bookingfutsal', user='root', passwd='',
host='localhost', port=3306, autocommit=True)
cur = con.cursor()
insert = nama, nominal
sql = 'INSERT INTO pembayaran (atasNama, namaPembayaran) VALUES' + str(
insert)
data = cur.execute(sql)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Ui_Login(QtWidgets.QDialog):
def __init__(self):
super(Ui_Login, self).__init__()
uic.loadUi('login.ui', self)
self.icon = self.findChild(QtWidgets.QLabel, 'ilogin')
self.icon.setStyleSheet('image: url(sorce/roundicon.png)')
self.inputUsername = self.findChild(QtWidgets.QLineEdit, 'username')
self.inputPassword = self.findChild(QtWidgets.QLineEdit, 'password')
self.daftarButton = self.findChild(QtWidgets.QPushButton, 'daftarBtn')
self.daftarButton.clicked.connect(self.forDaftar)
self.loginButton = self.findChild(QtWidgets.QPushButton, 'login_2')
self.loginButton.clicked.connect(self.testButton)
self.show()
def testButton(self):
user = self.inputUsername.text()
pw = self.inputPassword.text()
con = pymysql.connect(db='bookingfutsal', user='root', passwd='',
host='localhost', port=3306, autocommit=True)
cur = con.cursor()
sql = 'SELECT * FROM admin WHERE username=%s AND password=%s'
data = cur.execute(sql, (user, pw))
if len(cur.fetchall()) > 0:
self.close()
super(Ui_Login, self).__init__()
uic.loadUi('booking.ui', self)
self.gambar = self.findChild(QtWidgets.QLabel, 'piclap')
self.gambar.setStyleSheet('background-image: url(sorce/lp2.jpg)')
self.bNamaPembayar = self.findChild(QtWidgets.QLineEdit,
'namapembayar')
self.bNominalDp = self.findChild(QtWidgets.QLineEdit, 'nominaldp')
self.bBooking = self.findChild(QtWidgets.QPushButton, 'booking')
self.bBooking.clicked.connect(self.bookingFunc)
self.show()
def forDaftar(self):
self.close()
super(Ui_Login, self).__init__()
uic.loadUi('daftar.ui', self)
self.dUsername = self.findChild(QtWidgets.QLineEdit, 'username')
self.dPassword = self.findChild(QtWidgets.QLineEdit, 'password')
self.dAlamat = self.findChild(QtWidgets.QLineEdit, 'alamat')
self.dNoTelpU = self.findChild(QtWidgets.QLineEdit, 'notelepon')
self.dDaftarButton = self.findChild(QtWidgets.QPushButton, 'daftar')
self.dDaftarButton.clicked.connect(self.daftarFunc)
self.show()
def daftarFunc(self):
user = self.dUsername.text()
pw = self.dPassword.text()
con = pymysql.connect(db='bookingfutsal', user='root', passwd='',
host='localhost', port=3306, autocommit=True)
cur = con.cursor()
insert = user, pw
sql = 'INSERT INTO admin (username, password) VALUES' + str(insert)
data = cur.execute(sql)
self.close()
self.__init__()
def bookingFunc(self):
nama = self.bNamaPembayar.text()
nominal = self.bNominalDp.text()
con = pymysql.connect(db='bookingfutsal', user='root', passwd='',
host='localhost', port=3306, autocommit=True)
cur = con.cursor()
insert = nama, nominal
sql = 'INSERT INTO pembayaran (atasNama, namaPembayaran) VALUES' + str(
insert)
data = cur.execute(sql)
<|reserved_special_token_0|>
app.exec_()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Ui_Login(QtWidgets.QDialog):
def __init__(self):
super(Ui_Login, self).__init__()
uic.loadUi('login.ui', self)
self.icon = self.findChild(QtWidgets.QLabel, 'ilogin')
self.icon.setStyleSheet('image: url(sorce/roundicon.png)')
self.inputUsername = self.findChild(QtWidgets.QLineEdit, 'username')
self.inputPassword = self.findChild(QtWidgets.QLineEdit, 'password')
self.daftarButton = self.findChild(QtWidgets.QPushButton, 'daftarBtn')
self.daftarButton.clicked.connect(self.forDaftar)
self.loginButton = self.findChild(QtWidgets.QPushButton, 'login_2')
self.loginButton.clicked.connect(self.testButton)
self.show()
def testButton(self):
user = self.inputUsername.text()
pw = self.inputPassword.text()
con = pymysql.connect(db='bookingfutsal', user='root', passwd='',
host='localhost', port=3306, autocommit=True)
cur = con.cursor()
sql = 'SELECT * FROM admin WHERE username=%s AND password=%s'
data = cur.execute(sql, (user, pw))
if len(cur.fetchall()) > 0:
self.close()
super(Ui_Login, self).__init__()
uic.loadUi('booking.ui', self)
self.gambar = self.findChild(QtWidgets.QLabel, 'piclap')
self.gambar.setStyleSheet('background-image: url(sorce/lp2.jpg)')
self.bNamaPembayar = self.findChild(QtWidgets.QLineEdit,
'namapembayar')
self.bNominalDp = self.findChild(QtWidgets.QLineEdit, 'nominaldp')
self.bBooking = self.findChild(QtWidgets.QPushButton, 'booking')
self.bBooking.clicked.connect(self.bookingFunc)
self.show()
def forDaftar(self):
self.close()
super(Ui_Login, self).__init__()
uic.loadUi('daftar.ui', self)
self.dUsername = self.findChild(QtWidgets.QLineEdit, 'username')
self.dPassword = self.findChild(QtWidgets.QLineEdit, 'password')
self.dAlamat = self.findChild(QtWidgets.QLineEdit, 'alamat')
self.dNoTelpU = self.findChild(QtWidgets.QLineEdit, 'notelepon')
self.dDaftarButton = self.findChild(QtWidgets.QPushButton, 'daftar')
self.dDaftarButton.clicked.connect(self.daftarFunc)
self.show()
def daftarFunc(self):
user = self.dUsername.text()
pw = self.dPassword.text()
con = pymysql.connect(db='bookingfutsal', user='root', passwd='',
host='localhost', port=3306, autocommit=True)
cur = con.cursor()
insert = user, pw
sql = 'INSERT INTO admin (username, password) VALUES' + str(insert)
data = cur.execute(sql)
self.close()
self.__init__()
def bookingFunc(self):
nama = self.bNamaPembayar.text()
nominal = self.bNominalDp.text()
con = pymysql.connect(db='bookingfutsal', user='root', passwd='',
host='localhost', port=3306, autocommit=True)
cur = con.cursor()
insert = nama, nominal
sql = 'INSERT INTO pembayaran (atasNama, namaPembayaran) VALUES' + str(
insert)
data = cur.execute(sql)
app = QtWidgets.QApplication(sys.argv)
window = Ui_Login()
app.exec_()
<|reserved_special_token_1|>
from PyQt5 import QtWidgets, uic
import sys
import pymysql
import mysql.connector
class Ui_Login(QtWidgets.QDialog):
def __init__(self):
super(Ui_Login, self).__init__()
uic.loadUi('login.ui', self)
self.icon = self.findChild(QtWidgets.QLabel, 'ilogin')
self.icon.setStyleSheet('image: url(sorce/roundicon.png)')
self.inputUsername = self.findChild(QtWidgets.QLineEdit, 'username')
self.inputPassword = self.findChild(QtWidgets.QLineEdit, 'password')
self.daftarButton = self.findChild(QtWidgets.QPushButton, 'daftarBtn')
self.daftarButton.clicked.connect(self.forDaftar)
self.loginButton = self.findChild(QtWidgets.QPushButton, 'login_2')
self.loginButton.clicked.connect(self.testButton)
self.show()
def testButton(self):
user = self.inputUsername.text()
pw = self.inputPassword.text()
con = pymysql.connect(db='bookingfutsal', user='root', passwd='',
host='localhost', port=3306, autocommit=True)
cur = con.cursor()
sql = 'SELECT * FROM admin WHERE username=%s AND password=%s'
data = cur.execute(sql, (user, pw))
if len(cur.fetchall()) > 0:
self.close()
super(Ui_Login, self).__init__()
uic.loadUi('booking.ui', self)
self.gambar = self.findChild(QtWidgets.QLabel, 'piclap')
self.gambar.setStyleSheet('background-image: url(sorce/lp2.jpg)')
self.bNamaPembayar = self.findChild(QtWidgets.QLineEdit,
'namapembayar')
self.bNominalDp = self.findChild(QtWidgets.QLineEdit, 'nominaldp')
self.bBooking = self.findChild(QtWidgets.QPushButton, 'booking')
self.bBooking.clicked.connect(self.bookingFunc)
self.show()
def forDaftar(self):
self.close()
super(Ui_Login, self).__init__()
uic.loadUi('daftar.ui', self)
self.dUsername = self.findChild(QtWidgets.QLineEdit, 'username')
self.dPassword = self.findChild(QtWidgets.QLineEdit, 'password')
self.dAlamat = self.findChild(QtWidgets.QLineEdit, 'alamat')
self.dNoTelpU = self.findChild(QtWidgets.QLineEdit, 'notelepon')
self.dDaftarButton = self.findChild(QtWidgets.QPushButton, 'daftar')
self.dDaftarButton.clicked.connect(self.daftarFunc)
self.show()
def daftarFunc(self):
user = self.dUsername.text()
pw = self.dPassword.text()
con = pymysql.connect(db='bookingfutsal', user='root', passwd='',
host='localhost', port=3306, autocommit=True)
cur = con.cursor()
insert = user, pw
sql = 'INSERT INTO admin (username, password) VALUES' + str(insert)
data = cur.execute(sql)
self.close()
self.__init__()
def bookingFunc(self):
nama = self.bNamaPembayar.text()
nominal = self.bNominalDp.text()
con = pymysql.connect(db='bookingfutsal', user='root', passwd='',
host='localhost', port=3306, autocommit=True)
cur = con.cursor()
insert = nama, nominal
sql = 'INSERT INTO pembayaran (atasNama, namaPembayaran) VALUES' + str(
insert)
data = cur.execute(sql)
app = QtWidgets.QApplication(sys.argv)
window = Ui_Login()
app.exec_()
<|reserved_special_token_1|>
# This is a sample Python script.
# Press Shift+F10 to execute it or replace it with your code.
# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.
from PyQt5 import QtWidgets, uic
import sys
import pymysql
import mysql.connector
class Ui_Login(QtWidgets.QDialog):
def __init__(self):
super(Ui_Login, self).__init__()
uic.loadUi('login.ui', self)
self.icon = self.findChild(QtWidgets.QLabel, 'ilogin')
self.icon.setStyleSheet("image: url(sorce/roundicon.png)")
self.inputUsername = self.findChild(QtWidgets.QLineEdit, 'username')
self.inputPassword = self.findChild(QtWidgets.QLineEdit, 'password')
self.daftarButton = self.findChild(QtWidgets.QPushButton, 'daftarBtn')
self.daftarButton.clicked.connect(self.forDaftar)
self.loginButton = self.findChild(QtWidgets.QPushButton, 'login_2')
self.loginButton.clicked.connect(self.testButton)
self.show()
def testButton(self):
user = self.inputUsername.text()
pw = self.inputPassword.text()
con = pymysql.connect(db='bookingfutsal',
user='root',
passwd='',
host='localhost',
port=3306,
autocommit=True)
cur = con.cursor()
sql = "SELECT * FROM admin WHERE username=%s AND password=%s"
data = cur.execute(sql, (user, pw))
if(len(cur.fetchall()) > 0):
self.close()
super(Ui_Login, self).__init__()
uic.loadUi('booking.ui', self)
self.gambar = self.findChild(QtWidgets.QLabel, 'piclap')
self.gambar.setStyleSheet("background-image: url(sorce/lp2.jpg)")
self.bNamaPembayar = self.findChild(QtWidgets.QLineEdit, 'namapembayar')
self.bNominalDp = self.findChild(QtWidgets.QLineEdit, 'nominaldp')
self.bBooking = self.findChild(QtWidgets.QPushButton, 'booking')
self.bBooking.clicked.connect(self.bookingFunc)
self.show()
def forDaftar(self):
self.close()
super(Ui_Login, self).__init__()
uic.loadUi('daftar.ui', self)
self.dUsername = self.findChild(QtWidgets.QLineEdit, 'username')
self.dPassword = self.findChild(QtWidgets.QLineEdit, 'password')
self.dAlamat = self.findChild(QtWidgets.QLineEdit, 'alamat')
self.dNoTelpU = self.findChild(QtWidgets.QLineEdit, 'notelepon')
self.dDaftarButton = self.findChild(QtWidgets.QPushButton, 'daftar')
self.dDaftarButton.clicked.connect(self.daftarFunc)
self.show()
def daftarFunc(self):
user = self.dUsername.text()
pw = self.dPassword.text()
con = pymysql.connect(db='bookingfutsal',
user='root',
passwd='',
host='localhost',
port=3306,
autocommit=True)
cur = con.cursor()
insert = (user, pw)
sql = "INSERT INTO admin (username, password) VALUES" + str(insert)
data = cur.execute(sql)
self.close()
self.__init__();
# booking.Ui_Booking().Boking()
# koneksi.Koneksi()
def bookingFunc(self):
nama = self.bNamaPembayar.text()
nominal = self.bNominalDp.text()
con = pymysql.connect(db='bookingfutsal',
user='root',
passwd='',
host='localhost',
port=3306,
autocommit=True)
cur = con.cursor()
insert = (nama, nominal)
sql = "INSERT INTO pembayaran (atasNama, namaPembayaran) VALUES" + str(insert)
data = cur.execute(sql)
app = QtWidgets.QApplication(sys.argv)
window = Ui_Login()
app.exec_()
|
flexible
|
{
"blob_id": "0ff6e22f8704a0c6c0ffff3c53761b9d3a531b6d",
"index": 683,
"step-1": "<mask token>\n\n\nclass Ui_Login(QtWidgets.QDialog):\n\n def __init__(self):\n super(Ui_Login, self).__init__()\n uic.loadUi('login.ui', self)\n self.icon = self.findChild(QtWidgets.QLabel, 'ilogin')\n self.icon.setStyleSheet('image: url(sorce/roundicon.png)')\n self.inputUsername = self.findChild(QtWidgets.QLineEdit, 'username')\n self.inputPassword = self.findChild(QtWidgets.QLineEdit, 'password')\n self.daftarButton = self.findChild(QtWidgets.QPushButton, 'daftarBtn')\n self.daftarButton.clicked.connect(self.forDaftar)\n self.loginButton = self.findChild(QtWidgets.QPushButton, 'login_2')\n self.loginButton.clicked.connect(self.testButton)\n self.show()\n\n def testButton(self):\n user = self.inputUsername.text()\n pw = self.inputPassword.text()\n con = pymysql.connect(db='bookingfutsal', user='root', passwd='',\n host='localhost', port=3306, autocommit=True)\n cur = con.cursor()\n sql = 'SELECT * FROM admin WHERE username=%s AND password=%s'\n data = cur.execute(sql, (user, pw))\n if len(cur.fetchall()) > 0:\n self.close()\n super(Ui_Login, self).__init__()\n uic.loadUi('booking.ui', self)\n self.gambar = self.findChild(QtWidgets.QLabel, 'piclap')\n self.gambar.setStyleSheet('background-image: url(sorce/lp2.jpg)')\n self.bNamaPembayar = self.findChild(QtWidgets.QLineEdit,\n 'namapembayar')\n self.bNominalDp = self.findChild(QtWidgets.QLineEdit, 'nominaldp')\n self.bBooking = self.findChild(QtWidgets.QPushButton, 'booking')\n self.bBooking.clicked.connect(self.bookingFunc)\n self.show()\n <mask token>\n\n def daftarFunc(self):\n user = self.dUsername.text()\n pw = self.dPassword.text()\n con = pymysql.connect(db='bookingfutsal', user='root', passwd='',\n host='localhost', port=3306, autocommit=True)\n cur = con.cursor()\n insert = user, pw\n sql = 'INSERT INTO admin (username, password) VALUES' + str(insert)\n data = cur.execute(sql)\n self.close()\n self.__init__()\n\n def bookingFunc(self):\n nama = self.bNamaPembayar.text()\n nominal = self.bNominalDp.text()\n con = pymysql.connect(db='bookingfutsal', user='root', passwd='',\n host='localhost', port=3306, autocommit=True)\n cur = con.cursor()\n insert = nama, nominal\n sql = 'INSERT INTO pembayaran (atasNama, namaPembayaran) VALUES' + str(\n insert)\n data = cur.execute(sql)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Ui_Login(QtWidgets.QDialog):\n\n def __init__(self):\n super(Ui_Login, self).__init__()\n uic.loadUi('login.ui', self)\n self.icon = self.findChild(QtWidgets.QLabel, 'ilogin')\n self.icon.setStyleSheet('image: url(sorce/roundicon.png)')\n self.inputUsername = self.findChild(QtWidgets.QLineEdit, 'username')\n self.inputPassword = self.findChild(QtWidgets.QLineEdit, 'password')\n self.daftarButton = self.findChild(QtWidgets.QPushButton, 'daftarBtn')\n self.daftarButton.clicked.connect(self.forDaftar)\n self.loginButton = self.findChild(QtWidgets.QPushButton, 'login_2')\n self.loginButton.clicked.connect(self.testButton)\n self.show()\n\n def testButton(self):\n user = self.inputUsername.text()\n pw = self.inputPassword.text()\n con = pymysql.connect(db='bookingfutsal', user='root', passwd='',\n host='localhost', port=3306, autocommit=True)\n cur = con.cursor()\n sql = 'SELECT * FROM admin WHERE username=%s AND password=%s'\n data = cur.execute(sql, (user, pw))\n if len(cur.fetchall()) > 0:\n self.close()\n super(Ui_Login, self).__init__()\n uic.loadUi('booking.ui', self)\n self.gambar = self.findChild(QtWidgets.QLabel, 'piclap')\n self.gambar.setStyleSheet('background-image: url(sorce/lp2.jpg)')\n self.bNamaPembayar = self.findChild(QtWidgets.QLineEdit,\n 'namapembayar')\n self.bNominalDp = self.findChild(QtWidgets.QLineEdit, 'nominaldp')\n self.bBooking = self.findChild(QtWidgets.QPushButton, 'booking')\n self.bBooking.clicked.connect(self.bookingFunc)\n self.show()\n\n def forDaftar(self):\n self.close()\n super(Ui_Login, self).__init__()\n uic.loadUi('daftar.ui', self)\n self.dUsername = self.findChild(QtWidgets.QLineEdit, 'username')\n self.dPassword = self.findChild(QtWidgets.QLineEdit, 'password')\n self.dAlamat = self.findChild(QtWidgets.QLineEdit, 'alamat')\n self.dNoTelpU = self.findChild(QtWidgets.QLineEdit, 'notelepon')\n self.dDaftarButton = self.findChild(QtWidgets.QPushButton, 'daftar')\n self.dDaftarButton.clicked.connect(self.daftarFunc)\n self.show()\n\n def daftarFunc(self):\n user = self.dUsername.text()\n pw = self.dPassword.text()\n con = pymysql.connect(db='bookingfutsal', user='root', passwd='',\n host='localhost', port=3306, autocommit=True)\n cur = con.cursor()\n insert = user, pw\n sql = 'INSERT INTO admin (username, password) VALUES' + str(insert)\n data = cur.execute(sql)\n self.close()\n self.__init__()\n\n def bookingFunc(self):\n nama = self.bNamaPembayar.text()\n nominal = self.bNominalDp.text()\n con = pymysql.connect(db='bookingfutsal', user='root', passwd='',\n host='localhost', port=3306, autocommit=True)\n cur = con.cursor()\n insert = nama, nominal\n sql = 'INSERT INTO pembayaran (atasNama, namaPembayaran) VALUES' + str(\n insert)\n data = cur.execute(sql)\n\n\n<mask token>\napp.exec_()\n",
"step-3": "<mask token>\n\n\nclass Ui_Login(QtWidgets.QDialog):\n\n def __init__(self):\n super(Ui_Login, self).__init__()\n uic.loadUi('login.ui', self)\n self.icon = self.findChild(QtWidgets.QLabel, 'ilogin')\n self.icon.setStyleSheet('image: url(sorce/roundicon.png)')\n self.inputUsername = self.findChild(QtWidgets.QLineEdit, 'username')\n self.inputPassword = self.findChild(QtWidgets.QLineEdit, 'password')\n self.daftarButton = self.findChild(QtWidgets.QPushButton, 'daftarBtn')\n self.daftarButton.clicked.connect(self.forDaftar)\n self.loginButton = self.findChild(QtWidgets.QPushButton, 'login_2')\n self.loginButton.clicked.connect(self.testButton)\n self.show()\n\n def testButton(self):\n user = self.inputUsername.text()\n pw = self.inputPassword.text()\n con = pymysql.connect(db='bookingfutsal', user='root', passwd='',\n host='localhost', port=3306, autocommit=True)\n cur = con.cursor()\n sql = 'SELECT * FROM admin WHERE username=%s AND password=%s'\n data = cur.execute(sql, (user, pw))\n if len(cur.fetchall()) > 0:\n self.close()\n super(Ui_Login, self).__init__()\n uic.loadUi('booking.ui', self)\n self.gambar = self.findChild(QtWidgets.QLabel, 'piclap')\n self.gambar.setStyleSheet('background-image: url(sorce/lp2.jpg)')\n self.bNamaPembayar = self.findChild(QtWidgets.QLineEdit,\n 'namapembayar')\n self.bNominalDp = self.findChild(QtWidgets.QLineEdit, 'nominaldp')\n self.bBooking = self.findChild(QtWidgets.QPushButton, 'booking')\n self.bBooking.clicked.connect(self.bookingFunc)\n self.show()\n\n def forDaftar(self):\n self.close()\n super(Ui_Login, self).__init__()\n uic.loadUi('daftar.ui', self)\n self.dUsername = self.findChild(QtWidgets.QLineEdit, 'username')\n self.dPassword = self.findChild(QtWidgets.QLineEdit, 'password')\n self.dAlamat = self.findChild(QtWidgets.QLineEdit, 'alamat')\n self.dNoTelpU = self.findChild(QtWidgets.QLineEdit, 'notelepon')\n self.dDaftarButton = self.findChild(QtWidgets.QPushButton, 'daftar')\n self.dDaftarButton.clicked.connect(self.daftarFunc)\n self.show()\n\n def daftarFunc(self):\n user = self.dUsername.text()\n pw = self.dPassword.text()\n con = pymysql.connect(db='bookingfutsal', user='root', passwd='',\n host='localhost', port=3306, autocommit=True)\n cur = con.cursor()\n insert = user, pw\n sql = 'INSERT INTO admin (username, password) VALUES' + str(insert)\n data = cur.execute(sql)\n self.close()\n self.__init__()\n\n def bookingFunc(self):\n nama = self.bNamaPembayar.text()\n nominal = self.bNominalDp.text()\n con = pymysql.connect(db='bookingfutsal', user='root', passwd='',\n host='localhost', port=3306, autocommit=True)\n cur = con.cursor()\n insert = nama, nominal\n sql = 'INSERT INTO pembayaran (atasNama, namaPembayaran) VALUES' + str(\n insert)\n data = cur.execute(sql)\n\n\napp = QtWidgets.QApplication(sys.argv)\nwindow = Ui_Login()\napp.exec_()\n",
"step-4": "from PyQt5 import QtWidgets, uic\nimport sys\nimport pymysql\nimport mysql.connector\n\n\nclass Ui_Login(QtWidgets.QDialog):\n\n def __init__(self):\n super(Ui_Login, self).__init__()\n uic.loadUi('login.ui', self)\n self.icon = self.findChild(QtWidgets.QLabel, 'ilogin')\n self.icon.setStyleSheet('image: url(sorce/roundicon.png)')\n self.inputUsername = self.findChild(QtWidgets.QLineEdit, 'username')\n self.inputPassword = self.findChild(QtWidgets.QLineEdit, 'password')\n self.daftarButton = self.findChild(QtWidgets.QPushButton, 'daftarBtn')\n self.daftarButton.clicked.connect(self.forDaftar)\n self.loginButton = self.findChild(QtWidgets.QPushButton, 'login_2')\n self.loginButton.clicked.connect(self.testButton)\n self.show()\n\n def testButton(self):\n user = self.inputUsername.text()\n pw = self.inputPassword.text()\n con = pymysql.connect(db='bookingfutsal', user='root', passwd='',\n host='localhost', port=3306, autocommit=True)\n cur = con.cursor()\n sql = 'SELECT * FROM admin WHERE username=%s AND password=%s'\n data = cur.execute(sql, (user, pw))\n if len(cur.fetchall()) > 0:\n self.close()\n super(Ui_Login, self).__init__()\n uic.loadUi('booking.ui', self)\n self.gambar = self.findChild(QtWidgets.QLabel, 'piclap')\n self.gambar.setStyleSheet('background-image: url(sorce/lp2.jpg)')\n self.bNamaPembayar = self.findChild(QtWidgets.QLineEdit,\n 'namapembayar')\n self.bNominalDp = self.findChild(QtWidgets.QLineEdit, 'nominaldp')\n self.bBooking = self.findChild(QtWidgets.QPushButton, 'booking')\n self.bBooking.clicked.connect(self.bookingFunc)\n self.show()\n\n def forDaftar(self):\n self.close()\n super(Ui_Login, self).__init__()\n uic.loadUi('daftar.ui', self)\n self.dUsername = self.findChild(QtWidgets.QLineEdit, 'username')\n self.dPassword = self.findChild(QtWidgets.QLineEdit, 'password')\n self.dAlamat = self.findChild(QtWidgets.QLineEdit, 'alamat')\n self.dNoTelpU = self.findChild(QtWidgets.QLineEdit, 'notelepon')\n self.dDaftarButton = self.findChild(QtWidgets.QPushButton, 'daftar')\n self.dDaftarButton.clicked.connect(self.daftarFunc)\n self.show()\n\n def daftarFunc(self):\n user = self.dUsername.text()\n pw = self.dPassword.text()\n con = pymysql.connect(db='bookingfutsal', user='root', passwd='',\n host='localhost', port=3306, autocommit=True)\n cur = con.cursor()\n insert = user, pw\n sql = 'INSERT INTO admin (username, password) VALUES' + str(insert)\n data = cur.execute(sql)\n self.close()\n self.__init__()\n\n def bookingFunc(self):\n nama = self.bNamaPembayar.text()\n nominal = self.bNominalDp.text()\n con = pymysql.connect(db='bookingfutsal', user='root', passwd='',\n host='localhost', port=3306, autocommit=True)\n cur = con.cursor()\n insert = nama, nominal\n sql = 'INSERT INTO pembayaran (atasNama, namaPembayaran) VALUES' + str(\n insert)\n data = cur.execute(sql)\n\n\napp = QtWidgets.QApplication(sys.argv)\nwindow = Ui_Login()\napp.exec_()\n",
"step-5": "# This is a sample Python script.\r\n\r\n# Press Shift+F10 to execute it or replace it with your code.\r\n# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.\r\n\r\nfrom PyQt5 import QtWidgets, uic\r\nimport sys\r\nimport pymysql\r\n\r\nimport mysql.connector\r\n\r\nclass Ui_Login(QtWidgets.QDialog):\r\n def __init__(self):\r\n super(Ui_Login, self).__init__()\r\n uic.loadUi('login.ui', self)\r\n\r\n self.icon = self.findChild(QtWidgets.QLabel, 'ilogin')\r\n self.icon.setStyleSheet(\"image: url(sorce/roundicon.png)\")\r\n\r\n self.inputUsername = self.findChild(QtWidgets.QLineEdit, 'username')\r\n self.inputPassword = self.findChild(QtWidgets.QLineEdit, 'password')\r\n\r\n self.daftarButton = self.findChild(QtWidgets.QPushButton, 'daftarBtn')\r\n self.daftarButton.clicked.connect(self.forDaftar)\r\n\r\n self.loginButton = self.findChild(QtWidgets.QPushButton, 'login_2')\r\n self.loginButton.clicked.connect(self.testButton)\r\n\r\n self.show()\r\n\r\n def testButton(self):\r\n user = self.inputUsername.text()\r\n pw = self.inputPassword.text()\r\n con = pymysql.connect(db='bookingfutsal',\r\n user='root',\r\n passwd='',\r\n host='localhost',\r\n port=3306,\r\n autocommit=True)\r\n cur = con.cursor()\r\n sql = \"SELECT * FROM admin WHERE username=%s AND password=%s\"\r\n data = cur.execute(sql, (user, pw))\r\n if(len(cur.fetchall()) > 0):\r\n self.close()\r\n\r\n super(Ui_Login, self).__init__()\r\n uic.loadUi('booking.ui', self)\r\n\r\n self.gambar = self.findChild(QtWidgets.QLabel, 'piclap')\r\n self.gambar.setStyleSheet(\"background-image: url(sorce/lp2.jpg)\")\r\n\r\n self.bNamaPembayar = self.findChild(QtWidgets.QLineEdit, 'namapembayar')\r\n self.bNominalDp = self.findChild(QtWidgets.QLineEdit, 'nominaldp')\r\n\r\n self.bBooking = self.findChild(QtWidgets.QPushButton, 'booking')\r\n self.bBooking.clicked.connect(self.bookingFunc)\r\n\r\n self.show()\r\n\r\n\r\n def forDaftar(self):\r\n self.close()\r\n\r\n super(Ui_Login, self).__init__()\r\n uic.loadUi('daftar.ui', self)\r\n\r\n self.dUsername = self.findChild(QtWidgets.QLineEdit, 'username')\r\n self.dPassword = self.findChild(QtWidgets.QLineEdit, 'password')\r\n self.dAlamat = self.findChild(QtWidgets.QLineEdit, 'alamat')\r\n self.dNoTelpU = self.findChild(QtWidgets.QLineEdit, 'notelepon')\r\n\r\n self.dDaftarButton = self.findChild(QtWidgets.QPushButton, 'daftar')\r\n self.dDaftarButton.clicked.connect(self.daftarFunc)\r\n\r\n self.show()\r\n\r\n def daftarFunc(self):\r\n user = self.dUsername.text()\r\n pw = self.dPassword.text()\r\n con = pymysql.connect(db='bookingfutsal',\r\n user='root',\r\n passwd='',\r\n host='localhost',\r\n port=3306,\r\n autocommit=True)\r\n cur = con.cursor()\r\n insert = (user, pw)\r\n sql = \"INSERT INTO admin (username, password) VALUES\" + str(insert)\r\n data = cur.execute(sql)\r\n\r\n self.close()\r\n\r\n self.__init__();\r\n\r\n# booking.Ui_Booking().Boking()\r\n# koneksi.Koneksi()\r\n\r\n def bookingFunc(self):\r\n nama = self.bNamaPembayar.text()\r\n nominal = self.bNominalDp.text()\r\n\r\n con = pymysql.connect(db='bookingfutsal',\r\n user='root',\r\n passwd='',\r\n host='localhost',\r\n port=3306,\r\n autocommit=True)\r\n cur = con.cursor()\r\n insert = (nama, nominal)\r\n sql = \"INSERT INTO pembayaran (atasNama, namaPembayaran) VALUES\" + str(insert)\r\n data = cur.execute(sql)\r\n\r\napp = QtWidgets.QApplication(sys.argv)\r\nwindow = Ui_Login()\r\napp.exec_()",
"step-ids": [
5,
7,
8,
9,
10
]
}
|
[
5,
7,
8,
9,
10
] |
# Copyright 2016-2021 Swiss National Supercomputing Centre (CSCS/ETH Zurich)
# ReFrame Project Developers. See the top-level LICENSE file for details.
#
# SPDX-License-Identifier: BSD-3-Clause
#
# Meta-class for creating regression tests.
#
import reframe.core.namespaces as namespaces
import reframe.core.parameters as parameters
import reframe.core.variables as variables
from reframe.core.exceptions import ReframeSyntaxError
from reframe.core.hooks import HookRegistry
class RegressionTestMeta(type):
class MetaNamespace(namespaces.LocalNamespace):
'''Custom namespace to control the cls attribute assignment.
Regular Python class attributes can be overriden by either
parameters or variables respecting the order of execution.
A variable or a parameter may not be declared more than once in the
same class body. Overriding a variable with a parameter or the other
way around has an undefined behaviour. A variable's value may be
updated multiple times within the same class body. A parameter's
value may not be updated more than once within the same class body.
'''
def __setitem__(self, key, value):
if isinstance(value, variables.TestVar):
# Insert the attribute in the variable namespace
self['_rfm_local_var_space'][key] = value
value.__set_name__(self, key)
# Override the regular class attribute (if present)
self._namespace.pop(key, None)
elif isinstance(value, parameters.TestParam):
# Insert the attribute in the parameter namespace
self['_rfm_local_param_space'][key] = value
# Override the regular class attribute (if present)
self._namespace.pop(key, None)
elif key in self['_rfm_local_param_space']:
raise ValueError(
f'cannot override parameter {key!r}'
)
else:
# Insert the items manually to overide the namespace clash
# check from the base namespace.
self._namespace[key] = value
def __getitem__(self, key):
'''Expose and control access to the local namespaces.
Variables may only be retrieved if their value has been previously
set. Accessing a parameter in the class body is disallowed (the
actual test parameter is set during the class instantiation).
'''
try:
return super().__getitem__(key)
except KeyError as err:
try:
# Handle variable access
return self['_rfm_local_var_space'][key]
except KeyError:
# Handle parameter access
if key in self['_rfm_local_param_space']:
raise ValueError(
'accessing a test parameter from the class '
'body is disallowed'
) from None
else:
# As the last resource, look if key is a variable in
# any of the base classes. If so, make its value
# available in the current class' namespace.
for b in self['_rfm_bases']:
if key in b._rfm_var_space:
# Store a deep-copy of the variable's
# value and return.
v = b._rfm_var_space[key].default_value
self._namespace[key] = v
return self._namespace[key]
# If 'key' is neither a variable nor a parameter,
# raise the exception from the base __getitem__.
raise err from None
@classmethod
def __prepare__(metacls, name, bases, **kwargs):
namespace = super().__prepare__(name, bases, **kwargs)
# Keep reference to the bases inside the namespace
namespace['_rfm_bases'] = [
b for b in bases if hasattr(b, '_rfm_var_space')
]
# Regression test parameter space defined at the class level
local_param_space = namespaces.LocalNamespace()
namespace['_rfm_local_param_space'] = local_param_space
# Directive to insert a regression test parameter directly in the
# class body as: `P0 = parameter([0,1,2,3])`.
namespace['parameter'] = parameters.TestParam
# Regression test var space defined at the class level
local_var_space = namespaces.LocalNamespace()
namespace['_rfm_local_var_space'] = local_var_space
# Directives to add/modify a regression test variable
namespace['variable'] = variables.TestVar
namespace['required'] = variables.Undefined
return metacls.MetaNamespace(namespace)
def __new__(metacls, name, bases, namespace, **kwargs):
return super().__new__(metacls, name, bases, dict(namespace), **kwargs)
def __init__(cls, name, bases, namespace, **kwargs):
super().__init__(name, bases, namespace, **kwargs)
# Create a set with the attribute names already in use.
cls._rfm_dir = set()
for base in bases:
if hasattr(base, '_rfm_dir'):
cls._rfm_dir.update(base._rfm_dir)
used_attribute_names = set(cls._rfm_dir)
# Build the var space and extend the target namespace
variables.VarSpace(cls, used_attribute_names)
used_attribute_names.update(cls._rfm_var_space.vars)
# Build the parameter space
parameters.ParamSpace(cls, used_attribute_names)
# Update used names set with the local __dict__
cls._rfm_dir.update(cls.__dict__)
# Set up the hooks for the pipeline stages based on the _rfm_attach
# attribute; all dependencies will be resolved first in the post-setup
# phase if not assigned elsewhere
hooks = HookRegistry.create(namespace)
for b in bases:
if hasattr(b, '_rfm_pipeline_hooks'):
hooks.update(getattr(b, '_rfm_pipeline_hooks'))
cls._rfm_pipeline_hooks = hooks # HookRegistry(local_hooks)
cls._final_methods = {v.__name__ for v in namespace.values()
if hasattr(v, '_rfm_final')}
# Add the final functions from its parents
cls._final_methods.update(*(b._final_methods for b in bases
if hasattr(b, '_final_methods')))
if hasattr(cls, '_rfm_special_test') and cls._rfm_special_test:
return
for v in namespace.values():
for b in bases:
if not hasattr(b, '_final_methods'):
continue
if callable(v) and v.__name__ in b._final_methods:
msg = (f"'{cls.__qualname__}.{v.__name__}' attempts to "
f"override final method "
f"'{b.__qualname__}.{v.__name__}'; "
f"you should use the pipeline hooks instead")
raise ReframeSyntaxError(msg)
def __call__(cls, *args, **kwargs):
'''Intercept reframe-specific constructor arguments.
When registering a regression test using any supported decorator,
this decorator may pass additional arguments to the class constructor
to perform specific reframe-internal actions. This gives extra control
over the class instantiation process, allowing reframe to instantiate
the regression test class differently if this class was registered or
not (e.g. when deep-copying a regression test object). These interal
arguments must be intercepted before the object initialization, since
these would otherwise affect the __init__ method's signature, and these
internal mechanisms must be fully transparent to the user.
'''
obj = cls.__new__(cls, *args, **kwargs)
# Intercept constructor arguments
kwargs.pop('_rfm_use_params', None)
obj.__init__(*args, **kwargs)
return obj
def __getattr__(cls, name):
''' Attribute lookup method for the MetaNamespace.
This metaclass implements a custom namespace, where built-in `variable`
and `parameter` types are stored in their own sub-namespaces (see
:class:`reframe.core.meta.RegressionTestMeta.MetaNamespace`).
This method will perform an attribute lookup on these sub-namespaces if
a call to the default `__getattribute__` method fails to retrieve the
requested class attribute.
'''
try:
return cls._rfm_var_space.vars[name]
except KeyError:
try:
return cls._rfm_param_space.params[name]
except KeyError:
raise AttributeError(
f'class {cls.__qualname__!r} has no attribute {name!r}'
) from None
@property
def param_space(cls):
# Make the parameter space available as read-only
return cls._rfm_param_space
def is_abstract(cls):
'''Check if the class is an abstract test.
This is the case when some parameters are undefined, which results in
the length of the parameter space being 0.
:return: bool indicating wheteher the test has undefined parameters.
:meta private:
'''
return len(cls.param_space) == 0
|
normal
|
{
"blob_id": "e754a24fc9c965c50f7fa12036c884a1a54cc29d",
"index": 6853,
"step-1": "<mask token>\n\n\nclass RegressionTestMeta(type):\n\n\n class MetaNamespace(namespaces.LocalNamespace):\n \"\"\"Custom namespace to control the cls attribute assignment.\n\n Regular Python class attributes can be overriden by either\n parameters or variables respecting the order of execution.\n A variable or a parameter may not be declared more than once in the\n same class body. Overriding a variable with a parameter or the other\n way around has an undefined behaviour. A variable's value may be\n updated multiple times within the same class body. A parameter's\n value may not be updated more than once within the same class body.\n \"\"\"\n\n def __setitem__(self, key, value):\n if isinstance(value, variables.TestVar):\n self['_rfm_local_var_space'][key] = value\n value.__set_name__(self, key)\n self._namespace.pop(key, None)\n elif isinstance(value, parameters.TestParam):\n self['_rfm_local_param_space'][key] = value\n self._namespace.pop(key, None)\n elif key in self['_rfm_local_param_space']:\n raise ValueError(f'cannot override parameter {key!r}')\n else:\n self._namespace[key] = value\n\n def __getitem__(self, key):\n \"\"\"Expose and control access to the local namespaces.\n\n Variables may only be retrieved if their value has been previously\n set. Accessing a parameter in the class body is disallowed (the\n actual test parameter is set during the class instantiation).\n \"\"\"\n try:\n return super().__getitem__(key)\n except KeyError as err:\n try:\n return self['_rfm_local_var_space'][key]\n except KeyError:\n if key in self['_rfm_local_param_space']:\n raise ValueError(\n 'accessing a test parameter from the class body is disallowed'\n ) from None\n else:\n for b in self['_rfm_bases']:\n if key in b._rfm_var_space:\n v = b._rfm_var_space[key].default_value\n self._namespace[key] = v\n return self._namespace[key]\n raise err from None\n\n @classmethod\n def __prepare__(metacls, name, bases, **kwargs):\n namespace = super().__prepare__(name, bases, **kwargs)\n namespace['_rfm_bases'] = [b for b in bases if hasattr(b,\n '_rfm_var_space')]\n local_param_space = namespaces.LocalNamespace()\n namespace['_rfm_local_param_space'] = local_param_space\n namespace['parameter'] = parameters.TestParam\n local_var_space = namespaces.LocalNamespace()\n namespace['_rfm_local_var_space'] = local_var_space\n namespace['variable'] = variables.TestVar\n namespace['required'] = variables.Undefined\n return metacls.MetaNamespace(namespace)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass RegressionTestMeta(type):\n\n\n class MetaNamespace(namespaces.LocalNamespace):\n \"\"\"Custom namespace to control the cls attribute assignment.\n\n Regular Python class attributes can be overriden by either\n parameters or variables respecting the order of execution.\n A variable or a parameter may not be declared more than once in the\n same class body. Overriding a variable with a parameter or the other\n way around has an undefined behaviour. A variable's value may be\n updated multiple times within the same class body. A parameter's\n value may not be updated more than once within the same class body.\n \"\"\"\n\n def __setitem__(self, key, value):\n if isinstance(value, variables.TestVar):\n self['_rfm_local_var_space'][key] = value\n value.__set_name__(self, key)\n self._namespace.pop(key, None)\n elif isinstance(value, parameters.TestParam):\n self['_rfm_local_param_space'][key] = value\n self._namespace.pop(key, None)\n elif key in self['_rfm_local_param_space']:\n raise ValueError(f'cannot override parameter {key!r}')\n else:\n self._namespace[key] = value\n\n def __getitem__(self, key):\n \"\"\"Expose and control access to the local namespaces.\n\n Variables may only be retrieved if their value has been previously\n set. Accessing a parameter in the class body is disallowed (the\n actual test parameter is set during the class instantiation).\n \"\"\"\n try:\n return super().__getitem__(key)\n except KeyError as err:\n try:\n return self['_rfm_local_var_space'][key]\n except KeyError:\n if key in self['_rfm_local_param_space']:\n raise ValueError(\n 'accessing a test parameter from the class body is disallowed'\n ) from None\n else:\n for b in self['_rfm_bases']:\n if key in b._rfm_var_space:\n v = b._rfm_var_space[key].default_value\n self._namespace[key] = v\n return self._namespace[key]\n raise err from None\n\n @classmethod\n def __prepare__(metacls, name, bases, **kwargs):\n namespace = super().__prepare__(name, bases, **kwargs)\n namespace['_rfm_bases'] = [b for b in bases if hasattr(b,\n '_rfm_var_space')]\n local_param_space = namespaces.LocalNamespace()\n namespace['_rfm_local_param_space'] = local_param_space\n namespace['parameter'] = parameters.TestParam\n local_var_space = namespaces.LocalNamespace()\n namespace['_rfm_local_var_space'] = local_var_space\n namespace['variable'] = variables.TestVar\n namespace['required'] = variables.Undefined\n return metacls.MetaNamespace(namespace)\n <mask token>\n\n def __init__(cls, name, bases, namespace, **kwargs):\n super().__init__(name, bases, namespace, **kwargs)\n cls._rfm_dir = set()\n for base in bases:\n if hasattr(base, '_rfm_dir'):\n cls._rfm_dir.update(base._rfm_dir)\n used_attribute_names = set(cls._rfm_dir)\n variables.VarSpace(cls, used_attribute_names)\n used_attribute_names.update(cls._rfm_var_space.vars)\n parameters.ParamSpace(cls, used_attribute_names)\n cls._rfm_dir.update(cls.__dict__)\n hooks = HookRegistry.create(namespace)\n for b in bases:\n if hasattr(b, '_rfm_pipeline_hooks'):\n hooks.update(getattr(b, '_rfm_pipeline_hooks'))\n cls._rfm_pipeline_hooks = hooks\n cls._final_methods = {v.__name__ for v in namespace.values() if\n hasattr(v, '_rfm_final')}\n cls._final_methods.update(*(b._final_methods for b in bases if\n hasattr(b, '_final_methods')))\n if hasattr(cls, '_rfm_special_test') and cls._rfm_special_test:\n return\n for v in namespace.values():\n for b in bases:\n if not hasattr(b, '_final_methods'):\n continue\n if callable(v) and v.__name__ in b._final_methods:\n msg = (\n f\"'{cls.__qualname__}.{v.__name__}' attempts to override final method '{b.__qualname__}.{v.__name__}'; you should use the pipeline hooks instead\"\n )\n raise ReframeSyntaxError(msg)\n\n def __call__(cls, *args, **kwargs):\n \"\"\"Intercept reframe-specific constructor arguments.\n\n When registering a regression test using any supported decorator,\n this decorator may pass additional arguments to the class constructor\n to perform specific reframe-internal actions. This gives extra control\n over the class instantiation process, allowing reframe to instantiate\n the regression test class differently if this class was registered or\n not (e.g. when deep-copying a regression test object). These interal\n arguments must be intercepted before the object initialization, since\n these would otherwise affect the __init__ method's signature, and these\n internal mechanisms must be fully transparent to the user.\n \"\"\"\n obj = cls.__new__(cls, *args, **kwargs)\n kwargs.pop('_rfm_use_params', None)\n obj.__init__(*args, **kwargs)\n return obj\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass RegressionTestMeta(type):\n\n\n class MetaNamespace(namespaces.LocalNamespace):\n \"\"\"Custom namespace to control the cls attribute assignment.\n\n Regular Python class attributes can be overriden by either\n parameters or variables respecting the order of execution.\n A variable or a parameter may not be declared more than once in the\n same class body. Overriding a variable with a parameter or the other\n way around has an undefined behaviour. A variable's value may be\n updated multiple times within the same class body. A parameter's\n value may not be updated more than once within the same class body.\n \"\"\"\n\n def __setitem__(self, key, value):\n if isinstance(value, variables.TestVar):\n self['_rfm_local_var_space'][key] = value\n value.__set_name__(self, key)\n self._namespace.pop(key, None)\n elif isinstance(value, parameters.TestParam):\n self['_rfm_local_param_space'][key] = value\n self._namespace.pop(key, None)\n elif key in self['_rfm_local_param_space']:\n raise ValueError(f'cannot override parameter {key!r}')\n else:\n self._namespace[key] = value\n\n def __getitem__(self, key):\n \"\"\"Expose and control access to the local namespaces.\n\n Variables may only be retrieved if their value has been previously\n set. Accessing a parameter in the class body is disallowed (the\n actual test parameter is set during the class instantiation).\n \"\"\"\n try:\n return super().__getitem__(key)\n except KeyError as err:\n try:\n return self['_rfm_local_var_space'][key]\n except KeyError:\n if key in self['_rfm_local_param_space']:\n raise ValueError(\n 'accessing a test parameter from the class body is disallowed'\n ) from None\n else:\n for b in self['_rfm_bases']:\n if key in b._rfm_var_space:\n v = b._rfm_var_space[key].default_value\n self._namespace[key] = v\n return self._namespace[key]\n raise err from None\n\n @classmethod\n def __prepare__(metacls, name, bases, **kwargs):\n namespace = super().__prepare__(name, bases, **kwargs)\n namespace['_rfm_bases'] = [b for b in bases if hasattr(b,\n '_rfm_var_space')]\n local_param_space = namespaces.LocalNamespace()\n namespace['_rfm_local_param_space'] = local_param_space\n namespace['parameter'] = parameters.TestParam\n local_var_space = namespaces.LocalNamespace()\n namespace['_rfm_local_var_space'] = local_var_space\n namespace['variable'] = variables.TestVar\n namespace['required'] = variables.Undefined\n return metacls.MetaNamespace(namespace)\n\n def __new__(metacls, name, bases, namespace, **kwargs):\n return super().__new__(metacls, name, bases, dict(namespace), **kwargs)\n\n def __init__(cls, name, bases, namespace, **kwargs):\n super().__init__(name, bases, namespace, **kwargs)\n cls._rfm_dir = set()\n for base in bases:\n if hasattr(base, '_rfm_dir'):\n cls._rfm_dir.update(base._rfm_dir)\n used_attribute_names = set(cls._rfm_dir)\n variables.VarSpace(cls, used_attribute_names)\n used_attribute_names.update(cls._rfm_var_space.vars)\n parameters.ParamSpace(cls, used_attribute_names)\n cls._rfm_dir.update(cls.__dict__)\n hooks = HookRegistry.create(namespace)\n for b in bases:\n if hasattr(b, '_rfm_pipeline_hooks'):\n hooks.update(getattr(b, '_rfm_pipeline_hooks'))\n cls._rfm_pipeline_hooks = hooks\n cls._final_methods = {v.__name__ for v in namespace.values() if\n hasattr(v, '_rfm_final')}\n cls._final_methods.update(*(b._final_methods for b in bases if\n hasattr(b, '_final_methods')))\n if hasattr(cls, '_rfm_special_test') and cls._rfm_special_test:\n return\n for v in namespace.values():\n for b in bases:\n if not hasattr(b, '_final_methods'):\n continue\n if callable(v) and v.__name__ in b._final_methods:\n msg = (\n f\"'{cls.__qualname__}.{v.__name__}' attempts to override final method '{b.__qualname__}.{v.__name__}'; you should use the pipeline hooks instead\"\n )\n raise ReframeSyntaxError(msg)\n\n def __call__(cls, *args, **kwargs):\n \"\"\"Intercept reframe-specific constructor arguments.\n\n When registering a regression test using any supported decorator,\n this decorator may pass additional arguments to the class constructor\n to perform specific reframe-internal actions. This gives extra control\n over the class instantiation process, allowing reframe to instantiate\n the regression test class differently if this class was registered or\n not (e.g. when deep-copying a regression test object). These interal\n arguments must be intercepted before the object initialization, since\n these would otherwise affect the __init__ method's signature, and these\n internal mechanisms must be fully transparent to the user.\n \"\"\"\n obj = cls.__new__(cls, *args, **kwargs)\n kwargs.pop('_rfm_use_params', None)\n obj.__init__(*args, **kwargs)\n return obj\n <mask token>\n\n @property\n def param_space(cls):\n return cls._rfm_param_space\n <mask token>\n",
"step-4": "<mask token>\n\n\nclass RegressionTestMeta(type):\n\n\n class MetaNamespace(namespaces.LocalNamespace):\n \"\"\"Custom namespace to control the cls attribute assignment.\n\n Regular Python class attributes can be overriden by either\n parameters or variables respecting the order of execution.\n A variable or a parameter may not be declared more than once in the\n same class body. Overriding a variable with a parameter or the other\n way around has an undefined behaviour. A variable's value may be\n updated multiple times within the same class body. A parameter's\n value may not be updated more than once within the same class body.\n \"\"\"\n\n def __setitem__(self, key, value):\n if isinstance(value, variables.TestVar):\n self['_rfm_local_var_space'][key] = value\n value.__set_name__(self, key)\n self._namespace.pop(key, None)\n elif isinstance(value, parameters.TestParam):\n self['_rfm_local_param_space'][key] = value\n self._namespace.pop(key, None)\n elif key in self['_rfm_local_param_space']:\n raise ValueError(f'cannot override parameter {key!r}')\n else:\n self._namespace[key] = value\n\n def __getitem__(self, key):\n \"\"\"Expose and control access to the local namespaces.\n\n Variables may only be retrieved if their value has been previously\n set. Accessing a parameter in the class body is disallowed (the\n actual test parameter is set during the class instantiation).\n \"\"\"\n try:\n return super().__getitem__(key)\n except KeyError as err:\n try:\n return self['_rfm_local_var_space'][key]\n except KeyError:\n if key in self['_rfm_local_param_space']:\n raise ValueError(\n 'accessing a test parameter from the class body is disallowed'\n ) from None\n else:\n for b in self['_rfm_bases']:\n if key in b._rfm_var_space:\n v = b._rfm_var_space[key].default_value\n self._namespace[key] = v\n return self._namespace[key]\n raise err from None\n\n @classmethod\n def __prepare__(metacls, name, bases, **kwargs):\n namespace = super().__prepare__(name, bases, **kwargs)\n namespace['_rfm_bases'] = [b for b in bases if hasattr(b,\n '_rfm_var_space')]\n local_param_space = namespaces.LocalNamespace()\n namespace['_rfm_local_param_space'] = local_param_space\n namespace['parameter'] = parameters.TestParam\n local_var_space = namespaces.LocalNamespace()\n namespace['_rfm_local_var_space'] = local_var_space\n namespace['variable'] = variables.TestVar\n namespace['required'] = variables.Undefined\n return metacls.MetaNamespace(namespace)\n\n def __new__(metacls, name, bases, namespace, **kwargs):\n return super().__new__(metacls, name, bases, dict(namespace), **kwargs)\n\n def __init__(cls, name, bases, namespace, **kwargs):\n super().__init__(name, bases, namespace, **kwargs)\n cls._rfm_dir = set()\n for base in bases:\n if hasattr(base, '_rfm_dir'):\n cls._rfm_dir.update(base._rfm_dir)\n used_attribute_names = set(cls._rfm_dir)\n variables.VarSpace(cls, used_attribute_names)\n used_attribute_names.update(cls._rfm_var_space.vars)\n parameters.ParamSpace(cls, used_attribute_names)\n cls._rfm_dir.update(cls.__dict__)\n hooks = HookRegistry.create(namespace)\n for b in bases:\n if hasattr(b, '_rfm_pipeline_hooks'):\n hooks.update(getattr(b, '_rfm_pipeline_hooks'))\n cls._rfm_pipeline_hooks = hooks\n cls._final_methods = {v.__name__ for v in namespace.values() if\n hasattr(v, '_rfm_final')}\n cls._final_methods.update(*(b._final_methods for b in bases if\n hasattr(b, '_final_methods')))\n if hasattr(cls, '_rfm_special_test') and cls._rfm_special_test:\n return\n for v in namespace.values():\n for b in bases:\n if not hasattr(b, '_final_methods'):\n continue\n if callable(v) and v.__name__ in b._final_methods:\n msg = (\n f\"'{cls.__qualname__}.{v.__name__}' attempts to override final method '{b.__qualname__}.{v.__name__}'; you should use the pipeline hooks instead\"\n )\n raise ReframeSyntaxError(msg)\n\n def __call__(cls, *args, **kwargs):\n \"\"\"Intercept reframe-specific constructor arguments.\n\n When registering a regression test using any supported decorator,\n this decorator may pass additional arguments to the class constructor\n to perform specific reframe-internal actions. This gives extra control\n over the class instantiation process, allowing reframe to instantiate\n the regression test class differently if this class was registered or\n not (e.g. when deep-copying a regression test object). These interal\n arguments must be intercepted before the object initialization, since\n these would otherwise affect the __init__ method's signature, and these\n internal mechanisms must be fully transparent to the user.\n \"\"\"\n obj = cls.__new__(cls, *args, **kwargs)\n kwargs.pop('_rfm_use_params', None)\n obj.__init__(*args, **kwargs)\n return obj\n\n def __getattr__(cls, name):\n \"\"\" Attribute lookup method for the MetaNamespace.\n\n This metaclass implements a custom namespace, where built-in `variable`\n and `parameter` types are stored in their own sub-namespaces (see\n :class:`reframe.core.meta.RegressionTestMeta.MetaNamespace`).\n This method will perform an attribute lookup on these sub-namespaces if\n a call to the default `__getattribute__` method fails to retrieve the\n requested class attribute.\n \"\"\"\n try:\n return cls._rfm_var_space.vars[name]\n except KeyError:\n try:\n return cls._rfm_param_space.params[name]\n except KeyError:\n raise AttributeError(\n f'class {cls.__qualname__!r} has no attribute {name!r}'\n ) from None\n\n @property\n def param_space(cls):\n return cls._rfm_param_space\n\n def is_abstract(cls):\n \"\"\"Check if the class is an abstract test.\n\n This is the case when some parameters are undefined, which results in\n the length of the parameter space being 0.\n\n :return: bool indicating wheteher the test has undefined parameters.\n\n :meta private:\n \"\"\"\n return len(cls.param_space) == 0\n",
"step-5": "# Copyright 2016-2021 Swiss National Supercomputing Centre (CSCS/ETH Zurich)\n# ReFrame Project Developers. See the top-level LICENSE file for details.\n#\n# SPDX-License-Identifier: BSD-3-Clause\n\n#\n# Meta-class for creating regression tests.\n#\n\n\nimport reframe.core.namespaces as namespaces\nimport reframe.core.parameters as parameters\nimport reframe.core.variables as variables\n\nfrom reframe.core.exceptions import ReframeSyntaxError\nfrom reframe.core.hooks import HookRegistry\n\n\nclass RegressionTestMeta(type):\n\n class MetaNamespace(namespaces.LocalNamespace):\n '''Custom namespace to control the cls attribute assignment.\n\n Regular Python class attributes can be overriden by either\n parameters or variables respecting the order of execution.\n A variable or a parameter may not be declared more than once in the\n same class body. Overriding a variable with a parameter or the other\n way around has an undefined behaviour. A variable's value may be\n updated multiple times within the same class body. A parameter's\n value may not be updated more than once within the same class body.\n '''\n\n def __setitem__(self, key, value):\n if isinstance(value, variables.TestVar):\n # Insert the attribute in the variable namespace\n self['_rfm_local_var_space'][key] = value\n value.__set_name__(self, key)\n\n # Override the regular class attribute (if present)\n self._namespace.pop(key, None)\n\n elif isinstance(value, parameters.TestParam):\n # Insert the attribute in the parameter namespace\n self['_rfm_local_param_space'][key] = value\n\n # Override the regular class attribute (if present)\n self._namespace.pop(key, None)\n\n elif key in self['_rfm_local_param_space']:\n raise ValueError(\n f'cannot override parameter {key!r}'\n )\n else:\n # Insert the items manually to overide the namespace clash\n # check from the base namespace.\n self._namespace[key] = value\n\n def __getitem__(self, key):\n '''Expose and control access to the local namespaces.\n\n Variables may only be retrieved if their value has been previously\n set. Accessing a parameter in the class body is disallowed (the\n actual test parameter is set during the class instantiation).\n '''\n try:\n return super().__getitem__(key)\n except KeyError as err:\n try:\n # Handle variable access\n return self['_rfm_local_var_space'][key]\n\n except KeyError:\n # Handle parameter access\n if key in self['_rfm_local_param_space']:\n raise ValueError(\n 'accessing a test parameter from the class '\n 'body is disallowed'\n ) from None\n else:\n # As the last resource, look if key is a variable in\n # any of the base classes. If so, make its value\n # available in the current class' namespace.\n for b in self['_rfm_bases']:\n if key in b._rfm_var_space:\n # Store a deep-copy of the variable's\n # value and return.\n v = b._rfm_var_space[key].default_value\n self._namespace[key] = v\n return self._namespace[key]\n\n # If 'key' is neither a variable nor a parameter,\n # raise the exception from the base __getitem__.\n raise err from None\n\n @classmethod\n def __prepare__(metacls, name, bases, **kwargs):\n namespace = super().__prepare__(name, bases, **kwargs)\n\n # Keep reference to the bases inside the namespace\n namespace['_rfm_bases'] = [\n b for b in bases if hasattr(b, '_rfm_var_space')\n ]\n\n # Regression test parameter space defined at the class level\n local_param_space = namespaces.LocalNamespace()\n namespace['_rfm_local_param_space'] = local_param_space\n\n # Directive to insert a regression test parameter directly in the\n # class body as: `P0 = parameter([0,1,2,3])`.\n namespace['parameter'] = parameters.TestParam\n\n # Regression test var space defined at the class level\n local_var_space = namespaces.LocalNamespace()\n namespace['_rfm_local_var_space'] = local_var_space\n\n # Directives to add/modify a regression test variable\n namespace['variable'] = variables.TestVar\n namespace['required'] = variables.Undefined\n return metacls.MetaNamespace(namespace)\n\n def __new__(metacls, name, bases, namespace, **kwargs):\n return super().__new__(metacls, name, bases, dict(namespace), **kwargs)\n\n def __init__(cls, name, bases, namespace, **kwargs):\n super().__init__(name, bases, namespace, **kwargs)\n\n # Create a set with the attribute names already in use.\n cls._rfm_dir = set()\n for base in bases:\n if hasattr(base, '_rfm_dir'):\n cls._rfm_dir.update(base._rfm_dir)\n\n used_attribute_names = set(cls._rfm_dir)\n\n # Build the var space and extend the target namespace\n variables.VarSpace(cls, used_attribute_names)\n used_attribute_names.update(cls._rfm_var_space.vars)\n\n # Build the parameter space\n parameters.ParamSpace(cls, used_attribute_names)\n\n # Update used names set with the local __dict__\n cls._rfm_dir.update(cls.__dict__)\n\n # Set up the hooks for the pipeline stages based on the _rfm_attach\n # attribute; all dependencies will be resolved first in the post-setup\n # phase if not assigned elsewhere\n hooks = HookRegistry.create(namespace)\n for b in bases:\n if hasattr(b, '_rfm_pipeline_hooks'):\n hooks.update(getattr(b, '_rfm_pipeline_hooks'))\n\n cls._rfm_pipeline_hooks = hooks # HookRegistry(local_hooks)\n cls._final_methods = {v.__name__ for v in namespace.values()\n if hasattr(v, '_rfm_final')}\n\n # Add the final functions from its parents\n cls._final_methods.update(*(b._final_methods for b in bases\n if hasattr(b, '_final_methods')))\n\n if hasattr(cls, '_rfm_special_test') and cls._rfm_special_test:\n return\n\n for v in namespace.values():\n for b in bases:\n if not hasattr(b, '_final_methods'):\n continue\n\n if callable(v) and v.__name__ in b._final_methods:\n msg = (f\"'{cls.__qualname__}.{v.__name__}' attempts to \"\n f\"override final method \"\n f\"'{b.__qualname__}.{v.__name__}'; \"\n f\"you should use the pipeline hooks instead\")\n raise ReframeSyntaxError(msg)\n\n def __call__(cls, *args, **kwargs):\n '''Intercept reframe-specific constructor arguments.\n\n When registering a regression test using any supported decorator,\n this decorator may pass additional arguments to the class constructor\n to perform specific reframe-internal actions. This gives extra control\n over the class instantiation process, allowing reframe to instantiate\n the regression test class differently if this class was registered or\n not (e.g. when deep-copying a regression test object). These interal\n arguments must be intercepted before the object initialization, since\n these would otherwise affect the __init__ method's signature, and these\n internal mechanisms must be fully transparent to the user.\n '''\n obj = cls.__new__(cls, *args, **kwargs)\n\n # Intercept constructor arguments\n kwargs.pop('_rfm_use_params', None)\n\n obj.__init__(*args, **kwargs)\n return obj\n\n def __getattr__(cls, name):\n ''' Attribute lookup method for the MetaNamespace.\n\n This metaclass implements a custom namespace, where built-in `variable`\n and `parameter` types are stored in their own sub-namespaces (see\n :class:`reframe.core.meta.RegressionTestMeta.MetaNamespace`).\n This method will perform an attribute lookup on these sub-namespaces if\n a call to the default `__getattribute__` method fails to retrieve the\n requested class attribute.\n '''\n try:\n return cls._rfm_var_space.vars[name]\n except KeyError:\n try:\n return cls._rfm_param_space.params[name]\n except KeyError:\n raise AttributeError(\n f'class {cls.__qualname__!r} has no attribute {name!r}'\n ) from None\n\n @property\n def param_space(cls):\n # Make the parameter space available as read-only\n return cls._rfm_param_space\n\n def is_abstract(cls):\n '''Check if the class is an abstract test.\n\n This is the case when some parameters are undefined, which results in\n the length of the parameter space being 0.\n\n :return: bool indicating wheteher the test has undefined parameters.\n\n :meta private:\n '''\n return len(cls.param_space) == 0\n",
"step-ids": [
2,
4,
6,
8,
10
]
}
|
[
2,
4,
6,
8,
10
] |
from marshmallow import ValidationError
from werkzeug.exceptions import HTTPException
from flask_jwt_extended.exceptions import JWTExtendedException
from memedata.util import mk_errors
from memedata import config
def jwt_error_handler(error):
code = 401
messages = list(getattr(error, 'args', []))
return mk_errors(code, messages)
def http_error_handler(error):
resp = error.response
if resp is None:
code = error.code
messages = [error.description]
else:
code = getattr(resp, 'status_code', 500)
json = resp.get_json()
if 'errors' in json and json['errors']:
messages = [e['message'] for e in json['errors'] if 'message' in e]
else:
messages = [str(resp.status)]
return mk_errors(code, messages)
def validation_error_handler(error):
code = getattr(error, 'status_code', 500)
messages = getattr(error, 'messages', [])
return mk_errors(code, messages)
def generic_error_handler(error):
code = getattr(error, 'status_code', 500)
if config.debug:
messages = [str(error)]
else:
messages = ['something went wrong!']
return mk_errors(code, messages)
def error_handler(error):
try:
if isinstance(error, JWTExtendedException):
return jwt_error_handler(error)
elif isinstance(error, HTTPException):
return http_error_handler(error)
elif isinstance(error, ValidationError):
return validation_error_handler(error)
else:
return generic_error_handler(error)
except:
return mk_errors(500, 'something went wrong!')
def register_handlers(app):
app.errorhandler(Exception)(error_handler)
app.errorhandler(HTTPException)(error_handler)
app.handle_user_exception = error_handler
|
normal
|
{
"blob_id": "e1da3255668999c3b77aa8c9332b197a9203478e",
"index": 8992,
"step-1": "<mask token>\n\n\ndef jwt_error_handler(error):\n code = 401\n messages = list(getattr(error, 'args', []))\n return mk_errors(code, messages)\n\n\n<mask token>\n\n\ndef validation_error_handler(error):\n code = getattr(error, 'status_code', 500)\n messages = getattr(error, 'messages', [])\n return mk_errors(code, messages)\n\n\ndef generic_error_handler(error):\n code = getattr(error, 'status_code', 500)\n if config.debug:\n messages = [str(error)]\n else:\n messages = ['something went wrong!']\n return mk_errors(code, messages)\n\n\ndef error_handler(error):\n try:\n if isinstance(error, JWTExtendedException):\n return jwt_error_handler(error)\n elif isinstance(error, HTTPException):\n return http_error_handler(error)\n elif isinstance(error, ValidationError):\n return validation_error_handler(error)\n else:\n return generic_error_handler(error)\n except:\n return mk_errors(500, 'something went wrong!')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef jwt_error_handler(error):\n code = 401\n messages = list(getattr(error, 'args', []))\n return mk_errors(code, messages)\n\n\n<mask token>\n\n\ndef validation_error_handler(error):\n code = getattr(error, 'status_code', 500)\n messages = getattr(error, 'messages', [])\n return mk_errors(code, messages)\n\n\ndef generic_error_handler(error):\n code = getattr(error, 'status_code', 500)\n if config.debug:\n messages = [str(error)]\n else:\n messages = ['something went wrong!']\n return mk_errors(code, messages)\n\n\ndef error_handler(error):\n try:\n if isinstance(error, JWTExtendedException):\n return jwt_error_handler(error)\n elif isinstance(error, HTTPException):\n return http_error_handler(error)\n elif isinstance(error, ValidationError):\n return validation_error_handler(error)\n else:\n return generic_error_handler(error)\n except:\n return mk_errors(500, 'something went wrong!')\n\n\ndef register_handlers(app):\n app.errorhandler(Exception)(error_handler)\n app.errorhandler(HTTPException)(error_handler)\n app.handle_user_exception = error_handler\n",
"step-3": "<mask token>\n\n\ndef jwt_error_handler(error):\n code = 401\n messages = list(getattr(error, 'args', []))\n return mk_errors(code, messages)\n\n\ndef http_error_handler(error):\n resp = error.response\n if resp is None:\n code = error.code\n messages = [error.description]\n else:\n code = getattr(resp, 'status_code', 500)\n json = resp.get_json()\n if 'errors' in json and json['errors']:\n messages = [e['message'] for e in json['errors'] if 'message' in e]\n else:\n messages = [str(resp.status)]\n return mk_errors(code, messages)\n\n\ndef validation_error_handler(error):\n code = getattr(error, 'status_code', 500)\n messages = getattr(error, 'messages', [])\n return mk_errors(code, messages)\n\n\ndef generic_error_handler(error):\n code = getattr(error, 'status_code', 500)\n if config.debug:\n messages = [str(error)]\n else:\n messages = ['something went wrong!']\n return mk_errors(code, messages)\n\n\ndef error_handler(error):\n try:\n if isinstance(error, JWTExtendedException):\n return jwt_error_handler(error)\n elif isinstance(error, HTTPException):\n return http_error_handler(error)\n elif isinstance(error, ValidationError):\n return validation_error_handler(error)\n else:\n return generic_error_handler(error)\n except:\n return mk_errors(500, 'something went wrong!')\n\n\ndef register_handlers(app):\n app.errorhandler(Exception)(error_handler)\n app.errorhandler(HTTPException)(error_handler)\n app.handle_user_exception = error_handler\n",
"step-4": "from marshmallow import ValidationError\nfrom werkzeug.exceptions import HTTPException\nfrom flask_jwt_extended.exceptions import JWTExtendedException\nfrom memedata.util import mk_errors\nfrom memedata import config\n\n\ndef jwt_error_handler(error):\n code = 401\n messages = list(getattr(error, 'args', []))\n return mk_errors(code, messages)\n\n\ndef http_error_handler(error):\n resp = error.response\n if resp is None:\n code = error.code\n messages = [error.description]\n else:\n code = getattr(resp, 'status_code', 500)\n json = resp.get_json()\n if 'errors' in json and json['errors']:\n messages = [e['message'] for e in json['errors'] if 'message' in e]\n else:\n messages = [str(resp.status)]\n return mk_errors(code, messages)\n\n\ndef validation_error_handler(error):\n code = getattr(error, 'status_code', 500)\n messages = getattr(error, 'messages', [])\n return mk_errors(code, messages)\n\n\ndef generic_error_handler(error):\n code = getattr(error, 'status_code', 500)\n if config.debug:\n messages = [str(error)]\n else:\n messages = ['something went wrong!']\n return mk_errors(code, messages)\n\n\ndef error_handler(error):\n try:\n if isinstance(error, JWTExtendedException):\n return jwt_error_handler(error)\n elif isinstance(error, HTTPException):\n return http_error_handler(error)\n elif isinstance(error, ValidationError):\n return validation_error_handler(error)\n else:\n return generic_error_handler(error)\n except:\n return mk_errors(500, 'something went wrong!')\n\n\ndef register_handlers(app):\n app.errorhandler(Exception)(error_handler)\n app.errorhandler(HTTPException)(error_handler)\n app.handle_user_exception = error_handler\n",
"step-5": null,
"step-ids": [
4,
5,
6,
7
]
}
|
[
4,
5,
6,
7
] |
from math import sqrt, ceil
def encode_s(s):
encoded_s = ''
s_with_no_spaces = s.replace(' ', '')
step = ceil(sqrt(len(s_with_no_spaces)))
for j in range(0, step):
i = j
while i < len(s_with_no_spaces):
encoded_s = encoded_s + s_with_no_spaces[i]
i += step
if j != step - 1:
encoded_s = encoded_s + ' '
return encoded_s
def decode_s(s):
arr = s.split(' ')
decoded_s = ''
for j in range(0, len(arr[0])):
for word in arr:
if len(word) > j:
decoded_s = decoded_s + word[j]
return decoded_s
def TheRabbitsFoot(s, encode):
if encode:
return encode_s(s)
return decode_s(s)
|
normal
|
{
"blob_id": "a3ed47c285b26dca452fa192eb354a21a78b8424",
"index": 4632,
"step-1": "<mask token>\n\n\ndef TheRabbitsFoot(s, encode):\n if encode:\n return encode_s(s)\n return decode_s(s)\n",
"step-2": "<mask token>\n\n\ndef decode_s(s):\n arr = s.split(' ')\n decoded_s = ''\n for j in range(0, len(arr[0])):\n for word in arr:\n if len(word) > j:\n decoded_s = decoded_s + word[j]\n return decoded_s\n\n\ndef TheRabbitsFoot(s, encode):\n if encode:\n return encode_s(s)\n return decode_s(s)\n",
"step-3": "<mask token>\n\n\ndef encode_s(s):\n encoded_s = ''\n s_with_no_spaces = s.replace(' ', '')\n step = ceil(sqrt(len(s_with_no_spaces)))\n for j in range(0, step):\n i = j\n while i < len(s_with_no_spaces):\n encoded_s = encoded_s + s_with_no_spaces[i]\n i += step\n if j != step - 1:\n encoded_s = encoded_s + ' '\n return encoded_s\n\n\ndef decode_s(s):\n arr = s.split(' ')\n decoded_s = ''\n for j in range(0, len(arr[0])):\n for word in arr:\n if len(word) > j:\n decoded_s = decoded_s + word[j]\n return decoded_s\n\n\ndef TheRabbitsFoot(s, encode):\n if encode:\n return encode_s(s)\n return decode_s(s)\n",
"step-4": "from math import sqrt, ceil\n\n\ndef encode_s(s):\n encoded_s = ''\n s_with_no_spaces = s.replace(' ', '')\n step = ceil(sqrt(len(s_with_no_spaces)))\n for j in range(0, step):\n i = j\n while i < len(s_with_no_spaces):\n encoded_s = encoded_s + s_with_no_spaces[i]\n i += step\n if j != step - 1:\n encoded_s = encoded_s + ' '\n return encoded_s\n\n\ndef decode_s(s):\n arr = s.split(' ')\n decoded_s = ''\n for j in range(0, len(arr[0])):\n for word in arr:\n if len(word) > j:\n decoded_s = decoded_s + word[j]\n return decoded_s\n\n\ndef TheRabbitsFoot(s, encode):\n if encode:\n return encode_s(s)\n return decode_s(s)\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_classify_source_files() ->None:
scalatest_files = {'foo/bar/BazSpec.scala'}
junit_files = {'foo/bar/BazTest.scala'}
lib_files = {'foo/bar/Baz.scala'}
assert {ScalatestTestsGeneratorTarget: scalatest_files,
ScalaJunitTestsGeneratorTarget: junit_files,
ScalaSourcesGeneratorTarget: lib_files} == classify_source_files(
junit_files | lib_files | scalatest_files)
<|reserved_special_token_1|>
from pants.backend.scala.goals.tailor import classify_source_files
from pants.backend.scala.target_types import ScalaJunitTestsGeneratorTarget, ScalaSourcesGeneratorTarget, ScalatestTestsGeneratorTarget
def test_classify_source_files() ->None:
scalatest_files = {'foo/bar/BazSpec.scala'}
junit_files = {'foo/bar/BazTest.scala'}
lib_files = {'foo/bar/Baz.scala'}
assert {ScalatestTestsGeneratorTarget: scalatest_files,
ScalaJunitTestsGeneratorTarget: junit_files,
ScalaSourcesGeneratorTarget: lib_files} == classify_source_files(
junit_files | lib_files | scalatest_files)
<|reserved_special_token_1|>
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from pants.backend.scala.goals.tailor import classify_source_files
from pants.backend.scala.target_types import (
ScalaJunitTestsGeneratorTarget,
ScalaSourcesGeneratorTarget,
ScalatestTestsGeneratorTarget,
)
def test_classify_source_files() -> None:
scalatest_files = {
"foo/bar/BazSpec.scala",
}
junit_files = {
"foo/bar/BazTest.scala",
}
lib_files = {"foo/bar/Baz.scala"}
assert {
ScalatestTestsGeneratorTarget: scalatest_files,
ScalaJunitTestsGeneratorTarget: junit_files,
ScalaSourcesGeneratorTarget: lib_files,
} == classify_source_files(junit_files | lib_files | scalatest_files)
|
flexible
|
{
"blob_id": "42d2d8717ec2c25a99302e8de3090d600f8e80ff",
"index": 674,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_classify_source_files() ->None:\n scalatest_files = {'foo/bar/BazSpec.scala'}\n junit_files = {'foo/bar/BazTest.scala'}\n lib_files = {'foo/bar/Baz.scala'}\n assert {ScalatestTestsGeneratorTarget: scalatest_files,\n ScalaJunitTestsGeneratorTarget: junit_files,\n ScalaSourcesGeneratorTarget: lib_files} == classify_source_files(\n junit_files | lib_files | scalatest_files)\n",
"step-3": "from pants.backend.scala.goals.tailor import classify_source_files\nfrom pants.backend.scala.target_types import ScalaJunitTestsGeneratorTarget, ScalaSourcesGeneratorTarget, ScalatestTestsGeneratorTarget\n\n\ndef test_classify_source_files() ->None:\n scalatest_files = {'foo/bar/BazSpec.scala'}\n junit_files = {'foo/bar/BazTest.scala'}\n lib_files = {'foo/bar/Baz.scala'}\n assert {ScalatestTestsGeneratorTarget: scalatest_files,\n ScalaJunitTestsGeneratorTarget: junit_files,\n ScalaSourcesGeneratorTarget: lib_files} == classify_source_files(\n junit_files | lib_files | scalatest_files)\n",
"step-4": "# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n\nfrom pants.backend.scala.goals.tailor import classify_source_files\nfrom pants.backend.scala.target_types import (\n ScalaJunitTestsGeneratorTarget,\n ScalaSourcesGeneratorTarget,\n ScalatestTestsGeneratorTarget,\n)\n\n\ndef test_classify_source_files() -> None:\n scalatest_files = {\n \"foo/bar/BazSpec.scala\",\n }\n junit_files = {\n \"foo/bar/BazTest.scala\",\n }\n lib_files = {\"foo/bar/Baz.scala\"}\n\n assert {\n ScalatestTestsGeneratorTarget: scalatest_files,\n ScalaJunitTestsGeneratorTarget: junit_files,\n ScalaSourcesGeneratorTarget: lib_files,\n } == classify_source_files(junit_files | lib_files | scalatest_files)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
"""
Load API client for a Tool Registry Service (TRS) endpoint based
either on the GA4GH specification or an existing client library.
"""
import logging
from bravado.requests_client import RequestsClient
from ga4ghtest.core.config import trs_config
from .client import TRSClient
logger = logging.getLogger(__name__)
def _get_trs_opts(service_id):
"""
Look up stored parameters for tool registry services.
"""
return trs_config()[service_id]
def _init_http_client(service_id=None, opts=None):
"""
Initialize and configure HTTP requests client for selected service.
"""
if service_id:
opts = _get_trs_opts(service_id)
http_client = RequestsClient()
http_client.set_api_key(host=opts['host'],
api_key=opts['auth'],
param_in='header')
return http_client
class TRSInterface:
def toolsGet(self):
raise NotImplementedError
def metadataGet(self):
raise NotImplementedError
def toolsIdGet(self, tool_id):
raise NotImplementedError
def toolsIdVersionGet(self, tool_id, tool_version):
raise NotImplementedError
def toolsIdVersionsGet(self, tool_id):
raise NotImplementedError
def toolsIdVersionsVersionIdTypeDescriptorGet(self, tool_id, tool_version, descriptor_type):
raise NotImplementedError
def toolsIdVersionsVersionIdTypeDescriptorRelativePathGet(self, tool_id, tool_version, descriptor_type, rel_path):
raise NotImplementedError
def toolsIdVersionsVersionIdTypeTestsGet(self, tool_id, tool_version, descriptor_type, rel_path):
raise NotImplementedError
def toolsIdVersionsVersionIdTypeFilesGet(self, tool_id, tool_version, descriptor_type):
raise NotImplementedError
def toolsIdVersionsContainerGet(self, tool_id, tool_version):
raise NotImplementedError
class TRSAdapter(TRSInterface):
"""
Adapter class for TRS client functionality.
Args:
trs_client: ...
"""
def __init__(self, trs_client):
self.trs_client = trs_client
def toolsGet(self):
return self.trs_client.get_tools()
def metadataGet(self):
raise self.trs_client.get_tool_types()
def toolsIdGet(self, tool_id):
return self.trs_client.get_tool(tool_id)
def toolsIdVersionGet(self, tool_id, tool_version):
return self.trs_client.get_tool_version(tool_id, tool_version)
def toolsIdVersionsGet(self, tool_id):
return self.trs_client.get_tool_versions(tool_id)
def toolsIdVersionsVersionIdTypeDescriptorGet(self, tool_id, tool_version, descriptor_type):
return self.trs_client.get_tool_descriptor(tool_id, tool_version, descriptor_type)
def toolsIdVersionsVersionIdTypeDescriptorRelativePathGet(self, tool_id, tool_version, descriptor_type, rel_path):
return self.trs_client.get_relative_tool_descriptor(tool_id, tool_version, descriptor_type, rel_path)
def toolsIdVersionsVersionIdTypeTestsGet(self, tool_id, tool_version, descriptor_type, rel_path):
return self.trs_client.get_tool_tests(tool_id, tool_version, descriptor_type, rel_path)
def toolsIdVersionsVersionIdTypeFilesGet(self, tool_id, tool_version, descriptor_type):
return self.trs_client.get_tools_with_relative_path(tool_id, tool_version, descriptor_type)
def toolsIdVersionsContainerGet(self, tool_id, tool_version):
return self.trs_client.get_tool_container_specs(tool_id, tool_version)
def load_trs_client(service_id, http_client=None):
"""Return an API client for the selected workflow execution service."""
trs_client = TRSClient(service=_get_trs_opts(service_id))
return TRSAdapter(trs_client)
|
normal
|
{
"blob_id": "d122267e1da2d9cf68d245148bb496dfba3e7d19",
"index": 4467,
"step-1": "<mask token>\n\n\nclass TRSInterface:\n\n def toolsGet(self):\n raise NotImplementedError\n\n def metadataGet(self):\n raise NotImplementedError\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def toolsIdVersionsVersionIdTypeTestsGet(self, tool_id, tool_version,\n descriptor_type, rel_path):\n raise NotImplementedError\n <mask token>\n\n def toolsIdVersionsContainerGet(self, tool_id, tool_version):\n raise NotImplementedError\n\n\nclass TRSAdapter(TRSInterface):\n \"\"\"\n Adapter class for TRS client functionality.\n\n Args:\n trs_client: ...\n \"\"\"\n\n def __init__(self, trs_client):\n self.trs_client = trs_client\n\n def toolsGet(self):\n return self.trs_client.get_tools()\n\n def metadataGet(self):\n raise self.trs_client.get_tool_types()\n\n def toolsIdGet(self, tool_id):\n return self.trs_client.get_tool(tool_id)\n\n def toolsIdVersionGet(self, tool_id, tool_version):\n return self.trs_client.get_tool_version(tool_id, tool_version)\n\n def toolsIdVersionsGet(self, tool_id):\n return self.trs_client.get_tool_versions(tool_id)\n\n def toolsIdVersionsVersionIdTypeDescriptorGet(self, tool_id,\n tool_version, descriptor_type):\n return self.trs_client.get_tool_descriptor(tool_id, tool_version,\n descriptor_type)\n\n def toolsIdVersionsVersionIdTypeDescriptorRelativePathGet(self, tool_id,\n tool_version, descriptor_type, rel_path):\n return self.trs_client.get_relative_tool_descriptor(tool_id,\n tool_version, descriptor_type, rel_path)\n\n def toolsIdVersionsVersionIdTypeTestsGet(self, tool_id, tool_version,\n descriptor_type, rel_path):\n return self.trs_client.get_tool_tests(tool_id, tool_version,\n descriptor_type, rel_path)\n\n def toolsIdVersionsVersionIdTypeFilesGet(self, tool_id, tool_version,\n descriptor_type):\n return self.trs_client.get_tools_with_relative_path(tool_id,\n tool_version, descriptor_type)\n\n def toolsIdVersionsContainerGet(self, tool_id, tool_version):\n return self.trs_client.get_tool_container_specs(tool_id, tool_version)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TRSInterface:\n\n def toolsGet(self):\n raise NotImplementedError\n\n def metadataGet(self):\n raise NotImplementedError\n <mask token>\n\n def toolsIdVersionGet(self, tool_id, tool_version):\n raise NotImplementedError\n\n def toolsIdVersionsGet(self, tool_id):\n raise NotImplementedError\n <mask token>\n\n def toolsIdVersionsVersionIdTypeDescriptorRelativePathGet(self, tool_id,\n tool_version, descriptor_type, rel_path):\n raise NotImplementedError\n\n def toolsIdVersionsVersionIdTypeTestsGet(self, tool_id, tool_version,\n descriptor_type, rel_path):\n raise NotImplementedError\n <mask token>\n\n def toolsIdVersionsContainerGet(self, tool_id, tool_version):\n raise NotImplementedError\n\n\nclass TRSAdapter(TRSInterface):\n \"\"\"\n Adapter class for TRS client functionality.\n\n Args:\n trs_client: ...\n \"\"\"\n\n def __init__(self, trs_client):\n self.trs_client = trs_client\n\n def toolsGet(self):\n return self.trs_client.get_tools()\n\n def metadataGet(self):\n raise self.trs_client.get_tool_types()\n\n def toolsIdGet(self, tool_id):\n return self.trs_client.get_tool(tool_id)\n\n def toolsIdVersionGet(self, tool_id, tool_version):\n return self.trs_client.get_tool_version(tool_id, tool_version)\n\n def toolsIdVersionsGet(self, tool_id):\n return self.trs_client.get_tool_versions(tool_id)\n\n def toolsIdVersionsVersionIdTypeDescriptorGet(self, tool_id,\n tool_version, descriptor_type):\n return self.trs_client.get_tool_descriptor(tool_id, tool_version,\n descriptor_type)\n\n def toolsIdVersionsVersionIdTypeDescriptorRelativePathGet(self, tool_id,\n tool_version, descriptor_type, rel_path):\n return self.trs_client.get_relative_tool_descriptor(tool_id,\n tool_version, descriptor_type, rel_path)\n\n def toolsIdVersionsVersionIdTypeTestsGet(self, tool_id, tool_version,\n descriptor_type, rel_path):\n return self.trs_client.get_tool_tests(tool_id, tool_version,\n descriptor_type, rel_path)\n\n def toolsIdVersionsVersionIdTypeFilesGet(self, tool_id, tool_version,\n descriptor_type):\n return self.trs_client.get_tools_with_relative_path(tool_id,\n tool_version, descriptor_type)\n\n def toolsIdVersionsContainerGet(self, tool_id, tool_version):\n return self.trs_client.get_tool_container_specs(tool_id, tool_version)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TRSInterface:\n\n def toolsGet(self):\n raise NotImplementedError\n\n def metadataGet(self):\n raise NotImplementedError\n\n def toolsIdGet(self, tool_id):\n raise NotImplementedError\n\n def toolsIdVersionGet(self, tool_id, tool_version):\n raise NotImplementedError\n\n def toolsIdVersionsGet(self, tool_id):\n raise NotImplementedError\n\n def toolsIdVersionsVersionIdTypeDescriptorGet(self, tool_id,\n tool_version, descriptor_type):\n raise NotImplementedError\n\n def toolsIdVersionsVersionIdTypeDescriptorRelativePathGet(self, tool_id,\n tool_version, descriptor_type, rel_path):\n raise NotImplementedError\n\n def toolsIdVersionsVersionIdTypeTestsGet(self, tool_id, tool_version,\n descriptor_type, rel_path):\n raise NotImplementedError\n\n def toolsIdVersionsVersionIdTypeFilesGet(self, tool_id, tool_version,\n descriptor_type):\n raise NotImplementedError\n\n def toolsIdVersionsContainerGet(self, tool_id, tool_version):\n raise NotImplementedError\n\n\nclass TRSAdapter(TRSInterface):\n \"\"\"\n Adapter class for TRS client functionality.\n\n Args:\n trs_client: ...\n \"\"\"\n\n def __init__(self, trs_client):\n self.trs_client = trs_client\n\n def toolsGet(self):\n return self.trs_client.get_tools()\n\n def metadataGet(self):\n raise self.trs_client.get_tool_types()\n\n def toolsIdGet(self, tool_id):\n return self.trs_client.get_tool(tool_id)\n\n def toolsIdVersionGet(self, tool_id, tool_version):\n return self.trs_client.get_tool_version(tool_id, tool_version)\n\n def toolsIdVersionsGet(self, tool_id):\n return self.trs_client.get_tool_versions(tool_id)\n\n def toolsIdVersionsVersionIdTypeDescriptorGet(self, tool_id,\n tool_version, descriptor_type):\n return self.trs_client.get_tool_descriptor(tool_id, tool_version,\n descriptor_type)\n\n def toolsIdVersionsVersionIdTypeDescriptorRelativePathGet(self, tool_id,\n tool_version, descriptor_type, rel_path):\n return self.trs_client.get_relative_tool_descriptor(tool_id,\n tool_version, descriptor_type, rel_path)\n\n def toolsIdVersionsVersionIdTypeTestsGet(self, tool_id, tool_version,\n descriptor_type, rel_path):\n return self.trs_client.get_tool_tests(tool_id, tool_version,\n descriptor_type, rel_path)\n\n def toolsIdVersionsVersionIdTypeFilesGet(self, tool_id, tool_version,\n descriptor_type):\n return self.trs_client.get_tools_with_relative_path(tool_id,\n tool_version, descriptor_type)\n\n def toolsIdVersionsContainerGet(self, tool_id, tool_version):\n return self.trs_client.get_tool_container_specs(tool_id, tool_version)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef _get_trs_opts(service_id):\n \"\"\"\n Look up stored parameters for tool registry services.\n \"\"\"\n return trs_config()[service_id]\n\n\ndef _init_http_client(service_id=None, opts=None):\n \"\"\"\n Initialize and configure HTTP requests client for selected service.\n \"\"\"\n if service_id:\n opts = _get_trs_opts(service_id)\n http_client = RequestsClient()\n http_client.set_api_key(host=opts['host'], api_key=opts['auth'],\n param_in='header')\n return http_client\n\n\nclass TRSInterface:\n\n def toolsGet(self):\n raise NotImplementedError\n\n def metadataGet(self):\n raise NotImplementedError\n\n def toolsIdGet(self, tool_id):\n raise NotImplementedError\n\n def toolsIdVersionGet(self, tool_id, tool_version):\n raise NotImplementedError\n\n def toolsIdVersionsGet(self, tool_id):\n raise NotImplementedError\n\n def toolsIdVersionsVersionIdTypeDescriptorGet(self, tool_id,\n tool_version, descriptor_type):\n raise NotImplementedError\n\n def toolsIdVersionsVersionIdTypeDescriptorRelativePathGet(self, tool_id,\n tool_version, descriptor_type, rel_path):\n raise NotImplementedError\n\n def toolsIdVersionsVersionIdTypeTestsGet(self, tool_id, tool_version,\n descriptor_type, rel_path):\n raise NotImplementedError\n\n def toolsIdVersionsVersionIdTypeFilesGet(self, tool_id, tool_version,\n descriptor_type):\n raise NotImplementedError\n\n def toolsIdVersionsContainerGet(self, tool_id, tool_version):\n raise NotImplementedError\n\n\nclass TRSAdapter(TRSInterface):\n \"\"\"\n Adapter class for TRS client functionality.\n\n Args:\n trs_client: ...\n \"\"\"\n\n def __init__(self, trs_client):\n self.trs_client = trs_client\n\n def toolsGet(self):\n return self.trs_client.get_tools()\n\n def metadataGet(self):\n raise self.trs_client.get_tool_types()\n\n def toolsIdGet(self, tool_id):\n return self.trs_client.get_tool(tool_id)\n\n def toolsIdVersionGet(self, tool_id, tool_version):\n return self.trs_client.get_tool_version(tool_id, tool_version)\n\n def toolsIdVersionsGet(self, tool_id):\n return self.trs_client.get_tool_versions(tool_id)\n\n def toolsIdVersionsVersionIdTypeDescriptorGet(self, tool_id,\n tool_version, descriptor_type):\n return self.trs_client.get_tool_descriptor(tool_id, tool_version,\n descriptor_type)\n\n def toolsIdVersionsVersionIdTypeDescriptorRelativePathGet(self, tool_id,\n tool_version, descriptor_type, rel_path):\n return self.trs_client.get_relative_tool_descriptor(tool_id,\n tool_version, descriptor_type, rel_path)\n\n def toolsIdVersionsVersionIdTypeTestsGet(self, tool_id, tool_version,\n descriptor_type, rel_path):\n return self.trs_client.get_tool_tests(tool_id, tool_version,\n descriptor_type, rel_path)\n\n def toolsIdVersionsVersionIdTypeFilesGet(self, tool_id, tool_version,\n descriptor_type):\n return self.trs_client.get_tools_with_relative_path(tool_id,\n tool_version, descriptor_type)\n\n def toolsIdVersionsContainerGet(self, tool_id, tool_version):\n return self.trs_client.get_tool_container_specs(tool_id, tool_version)\n\n\ndef load_trs_client(service_id, http_client=None):\n \"\"\"Return an API client for the selected workflow execution service.\"\"\"\n trs_client = TRSClient(service=_get_trs_opts(service_id))\n return TRSAdapter(trs_client)\n",
"step-5": "#!/usr/bin/env python\n\"\"\"\nLoad API client for a Tool Registry Service (TRS) endpoint based\neither on the GA4GH specification or an existing client library.\n\"\"\"\nimport logging\n\nfrom bravado.requests_client import RequestsClient\n\nfrom ga4ghtest.core.config import trs_config\nfrom .client import TRSClient\n\nlogger = logging.getLogger(__name__)\n\n\ndef _get_trs_opts(service_id):\n \"\"\"\n Look up stored parameters for tool registry services.\n \"\"\"\n return trs_config()[service_id]\n\n\ndef _init_http_client(service_id=None, opts=None):\n \"\"\"\n Initialize and configure HTTP requests client for selected service.\n \"\"\"\n if service_id:\n opts = _get_trs_opts(service_id)\n\n http_client = RequestsClient()\n\n http_client.set_api_key(host=opts['host'],\n api_key=opts['auth'],\n param_in='header')\n return http_client\n\n\nclass TRSInterface:\n def toolsGet(self):\n raise NotImplementedError\n\n def metadataGet(self):\n raise NotImplementedError\n\n def toolsIdGet(self, tool_id):\n raise NotImplementedError\n\n def toolsIdVersionGet(self, tool_id, tool_version):\n raise NotImplementedError\n\n def toolsIdVersionsGet(self, tool_id):\n raise NotImplementedError\n\n def toolsIdVersionsVersionIdTypeDescriptorGet(self, tool_id, tool_version, descriptor_type):\n raise NotImplementedError\n\n def toolsIdVersionsVersionIdTypeDescriptorRelativePathGet(self, tool_id, tool_version, descriptor_type, rel_path):\n raise NotImplementedError\n\n def toolsIdVersionsVersionIdTypeTestsGet(self, tool_id, tool_version, descriptor_type, rel_path):\n raise NotImplementedError\n\n def toolsIdVersionsVersionIdTypeFilesGet(self, tool_id, tool_version, descriptor_type):\n raise NotImplementedError\n\n def toolsIdVersionsContainerGet(self, tool_id, tool_version):\n raise NotImplementedError\n\n\nclass TRSAdapter(TRSInterface):\n \"\"\"\n Adapter class for TRS client functionality.\n\n Args:\n trs_client: ...\n \"\"\"\n def __init__(self, trs_client):\n self.trs_client = trs_client\n\n def toolsGet(self):\n return self.trs_client.get_tools()\n\n def metadataGet(self):\n raise self.trs_client.get_tool_types()\n\n def toolsIdGet(self, tool_id):\n return self.trs_client.get_tool(tool_id)\n\n def toolsIdVersionGet(self, tool_id, tool_version):\n return self.trs_client.get_tool_version(tool_id, tool_version)\n\n def toolsIdVersionsGet(self, tool_id):\n return self.trs_client.get_tool_versions(tool_id)\n\n def toolsIdVersionsVersionIdTypeDescriptorGet(self, tool_id, tool_version, descriptor_type):\n return self.trs_client.get_tool_descriptor(tool_id, tool_version, descriptor_type)\n\n def toolsIdVersionsVersionIdTypeDescriptorRelativePathGet(self, tool_id, tool_version, descriptor_type, rel_path):\n return self.trs_client.get_relative_tool_descriptor(tool_id, tool_version, descriptor_type, rel_path)\n\n def toolsIdVersionsVersionIdTypeTestsGet(self, tool_id, tool_version, descriptor_type, rel_path):\n return self.trs_client.get_tool_tests(tool_id, tool_version, descriptor_type, rel_path)\n\n def toolsIdVersionsVersionIdTypeFilesGet(self, tool_id, tool_version, descriptor_type):\n return self.trs_client.get_tools_with_relative_path(tool_id, tool_version, descriptor_type)\n\n def toolsIdVersionsContainerGet(self, tool_id, tool_version):\n return self.trs_client.get_tool_container_specs(tool_id, tool_version)\n\n\ndef load_trs_client(service_id, http_client=None):\n \"\"\"Return an API client for the selected workflow execution service.\"\"\"\n trs_client = TRSClient(service=_get_trs_opts(service_id))\n return TRSAdapter(trs_client)\n",
"step-ids": [
18,
21,
24,
27,
30
]
}
|
[
18,
21,
24,
27,
30
] |
# (C) Copyright IBM 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""ipyhi
ipyhi is a Jupyter notebook notification system.
It is based on the jupyter-notify package.
"""
import os
from setuptools import find_packages, setup
MAJOR = 0
MINOR = 1
MICRO = 0
ISRELEASED = False
VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
FULLVERSION = VERSION
DOCLINES = __doc__.split('\n')
DESCRIPTION = DOCLINES[0]
LONG_DESCRIPTION = "\n".join(DOCLINES[2:])
def git_short_hash():
try:
git_str = "+" + os.popen('git log -1 --format="%h"').read().strip()
except: # pylint: disable=bare-except
git_str = ""
else:
if git_str == '+': #fixes setuptools PEP issues with versioning
git_str = ''
return git_str
if not ISRELEASED:
FULLVERSION += '.dev'+str(MICRO)+git_short_hash()
def write_version_py(filename='ipyhi/version.py'):
cnt = """\
# THIS FILE IS GENERATED FROM IPYHI SETUP.PY
# pylint: disable=missing-module-docstring
short_version = '%(version)s'
version = '%(fullversion)s'
release = %(isrelease)s
"""
a = open(filename, 'w')
try:
a.write(cnt % {'version': VERSION, 'fullversion':
FULLVERSION, 'isrelease': str(ISRELEASED)})
finally:
a.close()
setup(
name='ipyhi',
version=VERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author='Paul Nation',
author_email='nonhermitian@gmail.com',
url='https://github.com/nonhermitian/ipyhi',
license='Apache-2',
packages=find_packages(exclude=('tests', 'docs')),
package_data={'ipyhi': ['js/*.js']},
install_requires=[
'ipython',
'jupyter',
'ipywidgets'
],
classifiers=[
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9'
]
)
|
normal
|
{
"blob_id": "2e2de50a7d366ca1a98d29b33ed157a1e8445ada",
"index": 3523,
"step-1": "<mask token>\n\n\ndef git_short_hash():\n try:\n git_str = '+' + os.popen('git log -1 --format=\"%h\"').read().strip()\n except:\n git_str = ''\n else:\n if git_str == '+':\n git_str = ''\n return git_str\n\n\n<mask token>\n\n\ndef write_version_py(filename='ipyhi/version.py'):\n cnt = \"\"\"# THIS FILE IS GENERATED FROM IPYHI SETUP.PY\n# pylint: disable=missing-module-docstring\nshort_version = '%(version)s'\nversion = '%(fullversion)s'\nrelease = %(isrelease)s\n\"\"\"\n a = open(filename, 'w')\n try:\n a.write(cnt % {'version': VERSION, 'fullversion': FULLVERSION,\n 'isrelease': str(ISRELEASED)})\n finally:\n a.close()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef git_short_hash():\n try:\n git_str = '+' + os.popen('git log -1 --format=\"%h\"').read().strip()\n except:\n git_str = ''\n else:\n if git_str == '+':\n git_str = ''\n return git_str\n\n\nif not ISRELEASED:\n FULLVERSION += '.dev' + str(MICRO) + git_short_hash()\n\n\ndef write_version_py(filename='ipyhi/version.py'):\n cnt = \"\"\"# THIS FILE IS GENERATED FROM IPYHI SETUP.PY\n# pylint: disable=missing-module-docstring\nshort_version = '%(version)s'\nversion = '%(fullversion)s'\nrelease = %(isrelease)s\n\"\"\"\n a = open(filename, 'w')\n try:\n a.write(cnt % {'version': VERSION, 'fullversion': FULLVERSION,\n 'isrelease': str(ISRELEASED)})\n finally:\n a.close()\n\n\nsetup(name='ipyhi', version=VERSION, description=DESCRIPTION,\n long_description=LONG_DESCRIPTION, author='Paul Nation', author_email=\n 'nonhermitian@gmail.com', url='https://github.com/nonhermitian/ipyhi',\n license='Apache-2', packages=find_packages(exclude=('tests', 'docs')),\n package_data={'ipyhi': ['js/*.js']}, install_requires=['ipython',\n 'jupyter', 'ipywidgets'], classifiers=[\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9'])\n",
"step-3": "<mask token>\nMAJOR = 0\nMINOR = 1\nMICRO = 0\nISRELEASED = False\nVERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)\nFULLVERSION = VERSION\nDOCLINES = __doc__.split('\\n')\nDESCRIPTION = DOCLINES[0]\nLONG_DESCRIPTION = '\\n'.join(DOCLINES[2:])\n\n\ndef git_short_hash():\n try:\n git_str = '+' + os.popen('git log -1 --format=\"%h\"').read().strip()\n except:\n git_str = ''\n else:\n if git_str == '+':\n git_str = ''\n return git_str\n\n\nif not ISRELEASED:\n FULLVERSION += '.dev' + str(MICRO) + git_short_hash()\n\n\ndef write_version_py(filename='ipyhi/version.py'):\n cnt = \"\"\"# THIS FILE IS GENERATED FROM IPYHI SETUP.PY\n# pylint: disable=missing-module-docstring\nshort_version = '%(version)s'\nversion = '%(fullversion)s'\nrelease = %(isrelease)s\n\"\"\"\n a = open(filename, 'w')\n try:\n a.write(cnt % {'version': VERSION, 'fullversion': FULLVERSION,\n 'isrelease': str(ISRELEASED)})\n finally:\n a.close()\n\n\nsetup(name='ipyhi', version=VERSION, description=DESCRIPTION,\n long_description=LONG_DESCRIPTION, author='Paul Nation', author_email=\n 'nonhermitian@gmail.com', url='https://github.com/nonhermitian/ipyhi',\n license='Apache-2', packages=find_packages(exclude=('tests', 'docs')),\n package_data={'ipyhi': ['js/*.js']}, install_requires=['ipython',\n 'jupyter', 'ipywidgets'], classifiers=[\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9'])\n",
"step-4": "<mask token>\nimport os\nfrom setuptools import find_packages, setup\nMAJOR = 0\nMINOR = 1\nMICRO = 0\nISRELEASED = False\nVERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)\nFULLVERSION = VERSION\nDOCLINES = __doc__.split('\\n')\nDESCRIPTION = DOCLINES[0]\nLONG_DESCRIPTION = '\\n'.join(DOCLINES[2:])\n\n\ndef git_short_hash():\n try:\n git_str = '+' + os.popen('git log -1 --format=\"%h\"').read().strip()\n except:\n git_str = ''\n else:\n if git_str == '+':\n git_str = ''\n return git_str\n\n\nif not ISRELEASED:\n FULLVERSION += '.dev' + str(MICRO) + git_short_hash()\n\n\ndef write_version_py(filename='ipyhi/version.py'):\n cnt = \"\"\"# THIS FILE IS GENERATED FROM IPYHI SETUP.PY\n# pylint: disable=missing-module-docstring\nshort_version = '%(version)s'\nversion = '%(fullversion)s'\nrelease = %(isrelease)s\n\"\"\"\n a = open(filename, 'w')\n try:\n a.write(cnt % {'version': VERSION, 'fullversion': FULLVERSION,\n 'isrelease': str(ISRELEASED)})\n finally:\n a.close()\n\n\nsetup(name='ipyhi', version=VERSION, description=DESCRIPTION,\n long_description=LONG_DESCRIPTION, author='Paul Nation', author_email=\n 'nonhermitian@gmail.com', url='https://github.com/nonhermitian/ipyhi',\n license='Apache-2', packages=find_packages(exclude=('tests', 'docs')),\n package_data={'ipyhi': ['js/*.js']}, install_requires=['ipython',\n 'jupyter', 'ipywidgets'], classifiers=[\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9'])\n",
"step-5": "# (C) Copyright IBM 2020.\n#\n# This code is licensed under the Apache License, Version 2.0. You may\n# obtain a copy of this license in the LICENSE.txt file in the root directory\n# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.\n#\n# Any modifications or derivative works of this code must retain this\n# copyright notice, and modified files need to carry a notice indicating\n# that they have been altered from the originals.\n\"\"\"ipyhi\n\nipyhi is a Jupyter notebook notification system.\nIt is based on the jupyter-notify package.\n\"\"\"\nimport os\nfrom setuptools import find_packages, setup\n\nMAJOR = 0\nMINOR = 1\nMICRO = 0\n\nISRELEASED = False\nVERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)\nFULLVERSION = VERSION\n\nDOCLINES = __doc__.split('\\n')\nDESCRIPTION = DOCLINES[0]\nLONG_DESCRIPTION = \"\\n\".join(DOCLINES[2:])\n\ndef git_short_hash():\n try:\n git_str = \"+\" + os.popen('git log -1 --format=\"%h\"').read().strip()\n except: # pylint: disable=bare-except\n git_str = \"\"\n else:\n if git_str == '+': #fixes setuptools PEP issues with versioning\n git_str = ''\n return git_str\n\nif not ISRELEASED:\n FULLVERSION += '.dev'+str(MICRO)+git_short_hash()\n\ndef write_version_py(filename='ipyhi/version.py'):\n cnt = \"\"\"\\\n# THIS FILE IS GENERATED FROM IPYHI SETUP.PY\n# pylint: disable=missing-module-docstring\nshort_version = '%(version)s'\nversion = '%(fullversion)s'\nrelease = %(isrelease)s\n\"\"\"\n a = open(filename, 'w')\n try:\n a.write(cnt % {'version': VERSION, 'fullversion':\n FULLVERSION, 'isrelease': str(ISRELEASED)})\n finally:\n a.close()\n\nsetup(\n name='ipyhi',\n version=VERSION,\n description=DESCRIPTION,\n long_description=LONG_DESCRIPTION,\n author='Paul Nation',\n author_email='nonhermitian@gmail.com',\n url='https://github.com/nonhermitian/ipyhi',\n license='Apache-2',\n packages=find_packages(exclude=('tests', 'docs')),\n package_data={'ipyhi': ['js/*.js']},\n install_requires=[\n 'ipython',\n 'jupyter',\n 'ipywidgets'\n ],\n classifiers=[\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9'\n ]\n)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@app.task
def add(x, y):
return x + y
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app = Celery('tasks', broker='redis://localhost')
@app.task
def add(x, y):
return x + y
<|reserved_special_token_1|>
from celery import Celery
app = Celery('tasks', broker='redis://localhost')
@app.task
def add(x, y):
return x + y
<|reserved_special_token_1|>
#from __future__ import absolute_import
#import os
from celery import Celery
#from django.conf import settings
#os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'learning.settings')
app = Celery('tasks', broker="redis://localhost")
#app.config_from_object('django.conf:settings')
#app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task
def add(x, y):
return x+y
#print('Request:{0!r}'.format(self.request))
|
flexible
|
{
"blob_id": "3ef114dd35ef3995ae73bf85bbe38db4fb7045d8",
"index": 7315,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@app.task\ndef add(x, y):\n return x + y\n",
"step-3": "<mask token>\napp = Celery('tasks', broker='redis://localhost')\n\n\n@app.task\ndef add(x, y):\n return x + y\n",
"step-4": "from celery import Celery\napp = Celery('tasks', broker='redis://localhost')\n\n\n@app.task\ndef add(x, y):\n return x + y\n",
"step-5": "\n#from __future__ import absolute_import\n#import os\nfrom celery import Celery\n#from django.conf import settings\n\n#os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'learning.settings')\napp = Celery('tasks', broker=\"redis://localhost\")\n\n\n#app.config_from_object('django.conf:settings')\n#app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)\n\n\n@app.task\ndef add(x, y):\n return x+y\n #print('Request:{0!r}'.format(self.request))\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#%%
# -*- coding: utf-8 -*-
import numpy as np
import plotly
from plotly.subplots import make_subplots
import plotly.graph_objects as go
import pandas as pd
import os
output_directory = r'C:/Users/jgamm/Desktop/rssi_measurement/2020-06-10/figures'
antennas = ['original_whip']
folder = r'C:/Users/jgamm/Desktop/rssi_measurement/2020-06-10/data'
ri_filenames = []
for i_angle in np.arange(0, 360, 45):
ri_filenames.append('r%di%d.csv'%(i_angle, i_angle))
ri_filenames.append('r%di%d.csv'%(i_angle+45, i_angle))
ri_filenames.append('r360i360.csv')
angle_filenames = ['%d.csv'%(n) for n in np.arange(0, 405, 45)]
distance_filenames = ['%1.2f.csv'%(n) for n in np.arange(.75, 3.25, .25)]
ref_line = dict(color='white', width=1)
# Plot yaw data
for antenna in antennas:
fig = make_subplots(rows=2, cols=1,
subplot_titles=['Initiator RSSI vs. yaw',
'Calculated distance vs. yaw'],
shared_xaxes=True)
rssi_hist2d = []
dist_hist2d = []
experiment = 'orientation_exp1'
dist_lim = [100, 0]
db_lim = [-100, 0]
for filename in ri_filenames:
data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))
Dist = np.around(data['distance'], 1)
for rssi in data['i_rssi']:
if rssi-5 < db_lim[1]:
db_lim[1] = rssi-5
if rssi+5 > db_lim[0]:
db_lim[0] = rssi+5
for dist in Dist:
if dist-.5 < dist_lim[0]:
dist_lim[0] = dist-.5
if dist+.5 > dist_lim[1]:
dist_lim[1] = dist+.5
dist_lim[0] = np.max([0, dist_lim[0]])
column = np.zeros(200)
hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T
for row in hist:
row_idx = -int(row[0])
column[row_idx] = row[1]/len(data['i_rssi'])
rssi_hist2d.append(column)
column = np.zeros(100)
hist = np.array(np.unique(Dist, return_counts=True)).T
for row in hist:
row_idx = int(np.around(row[0]/.1))
column[row_idx] = row[1]/len(Dist)
dist_hist2d.append(column)
rssi_hist2d = np.array(rssi_hist2d).T
dist_hist2d = np.array(dist_hist2d).T
maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])
fig.add_trace(go.Heatmap(
x=np.arange(0, 765, 45),
y=np.arange(db_lim[0], db_lim[1], -1),
z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],
zmin=0, zmax=maxz), row=1, col=1)
fig.add_trace(go.Heatmap(
x=np.arange(0, 765, 45),
y=np.arange(dist_lim[0], dist_lim[1], .1),
z=dist_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],
zmin=0, zmax=maxz), row=2, col=1)
fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1]*16), mode='lines', line=ref_line), row=2, col=1)
fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),
'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})
fig.update_xaxes(title='Angle (°)', row=2, col=1)
fig.update_layout(showlegend=False)
fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)
fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)
fig.write_image(os.path.join(output_directory, 'orientation_exp1_%s.png'%(antenna)))
# Plot pitch data
for antenna in antennas:
fig = make_subplots(rows=2, cols=1,
subplot_titles=['Initiator RSSI vs. pitch',
'Calculated distance vs. pitch'],
shared_xaxes=True)
rssi_hist2d = []
dist_hist2d = []
experiment = 'orientation_exp2'
dist_lim = [100, 0]
db_lim = [-100, 0]
for filename in ri_filenames:
data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))
Dist = np.around(data['distance'], 1)
for rssi in data['i_rssi']:
if rssi-5 < db_lim[1]:
db_lim[1] = rssi-5
if rssi+5 > db_lim[0]:
db_lim[0] = rssi+5
for dist in Dist:
if dist-.5 < dist_lim[0]:
dist_lim[0] = dist-.5
if dist+.5 > dist_lim[1]:
dist_lim[1] = dist+.5
dist_lim[0] = np.max([0, dist_lim[0]])
column = np.zeros(200)
hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T
for row in hist:
row_idx = -int(row[0])
column[row_idx] = row[1]/len(data['i_rssi'])
rssi_hist2d.append(column)
column = np.zeros(100)
hist = np.array(np.unique(Dist, return_counts=True)).T
for row in hist:
row_idx = int(np.around(row[0]/.1))
column[row_idx] = row[1]/len(Dist)
dist_hist2d.append(column)
rssi_hist2d = np.array(rssi_hist2d).T
dist_hist2d = np.array(dist_hist2d).T
maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])
fig.add_trace(go.Heatmap(
x=np.arange(0, 765, 45),
y=np.arange(db_lim[0], db_lim[1], -1),
z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],
zmin=0, zmax=maxz), row=1, col=1)
fig.add_trace(go.Heatmap(
x=np.arange(0, 765, 45),
y=np.arange(dist_lim[0], dist_lim[1], .1),
z=dist_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],
zmin=0, zmax=maxz), row=2, col=1)
fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1]*16), mode='lines', line=ref_line), row=2, col=1)
fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),
'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})
fig.update_xaxes(title='Angle (°)', row=2, col=1)
fig.update_layout(showlegend=False)
fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)
fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)
fig.write_image(os.path.join(output_directory, 'orientation_exp2_%s.png'%(antenna)))
# Plot roll data
for antenna in antennas:
fig = make_subplots(rows=2, cols=1,
subplot_titles=['Initiator RSSI vs. roll',
'Calculated distance vs. roll'],
shared_xaxes=True)
rssi_hist2d = []
dist_hist2d = []
experiment = 'orientation_exp3'
dist_lim = [100, 0]
db_lim = [-100, 0]
for filename in ri_filenames:
data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))
Dist = np.around(data['distance'], 1)
for rssi in data['i_rssi']:
if rssi-5 < db_lim[1]:
db_lim[1] = rssi-5
if rssi+5 > db_lim[0]:
db_lim[0] = rssi+5
for dist in Dist:
if dist-.5 < dist_lim[0]:
dist_lim[0] = dist-.5
if dist+.5 > dist_lim[1]:
dist_lim[1] = dist+.5
dist_lim[0] = np.max([0, dist_lim[0]])
column = np.zeros(200)
hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T
for row in hist:
row_idx = -int(row[0])
column[row_idx] = row[1]/len(data['i_rssi'])
rssi_hist2d.append(column)
column = np.zeros(100)
hist = np.array(np.unique(Dist, return_counts=True)).T
for row in hist:
row_idx = int(np.around(row[0]/.1))
column[row_idx] = row[1]/len(Dist)
dist_hist2d.append(column)
rssi_hist2d = np.array(rssi_hist2d).T
dist_hist2d = np.array(dist_hist2d).T
maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])
fig.add_trace(go.Heatmap(
x=np.arange(0, 765, 45),
y=np.arange(db_lim[0], db_lim[1], -1),
z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],
zmin=0, zmax=maxz), row=1, col=1)
fig.add_trace(go.Heatmap(
x=np.arange(0, 765, 45),
y=np.arange(dist_lim[0], dist_lim[1], .1),
z=dist_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],
zmin=0, zmax=maxz), row=2, col=1)
fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1]*16), mode='lines', line=ref_line), row=2, col=1)
fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),
'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})
fig.update_xaxes(title='Angle (°)', row=2, col=1)
fig.update_layout(showlegend=False)
fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)
fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)
fig.write_image(os.path.join(output_directory, 'orientation_exp3_%s.png'%(antenna)))
# Plot position data
for antenna in antennas:
fig = make_subplots(rows=2, cols=1,
subplot_titles=['Initiator RSSI vs. position',
'Calculated distance vs. position'],
shared_xaxes=True)
rssi_hist2d = []
dist_hist2d = []
experiment = 'orientation_exp4'
dist_lim = [100, 0]
db_lim = [-100, 0]
for filename in angle_filenames:
data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))
Dist = np.around(data['distance'], 1)
for rssi in data['i_rssi']:
if rssi-5 < db_lim[1]:
db_lim[1] = rssi-5
if rssi+5 > db_lim[0]:
db_lim[0] = rssi+5
for dist in Dist:
if dist-.5 < dist_lim[0]:
dist_lim[0] = dist-.5
if dist+.5 > dist_lim[1]:
dist_lim[1] = dist+.5
dist_lim[0] = np.max([0, dist_lim[0]])
column = np.zeros(200)
hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T
for row in hist:
row_idx = -int(row[0])
column[row_idx] = row[1]/len(data['i_rssi'])
rssi_hist2d.append(column)
column = np.zeros(100)
hist = np.array(np.unique(Dist, return_counts=True)).T
for row in hist:
row_idx = int(np.around(row[0]/.1))
column[row_idx] = row[1]/len(Dist)
dist_hist2d.append(column)
rssi_hist2d = np.array(rssi_hist2d).T
dist_hist2d = np.array(dist_hist2d).T
maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])
fig.add_trace(go.Heatmap(
x=np.arange(0, 360, 45),
y=np.arange(db_lim[0], db_lim[1], -1),
z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],
zmin=0, zmax=maxz), row=1, col=1)
fig.add_trace(go.Heatmap(
x=np.arange(0, 360, 45),
y=np.arange(dist_lim[0], dist_lim[1], .1),
z=dist_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],
zmin=0, zmax=maxz), row=2, col=1)
fig.add_trace(go.Scatter(x=np.arange(0, 360, 45), y=np.array([1]*16), mode='lines', line=ref_line), row=2, col=1)
fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),
'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})
fig.update_xaxes(title='Angle (°)', row=2, col=1)
fig.update_layout(showlegend=False)
fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)
fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)
fig.write_image(os.path.join(output_directory, 'orientation_exp4_%s.png'%(antenna)))
# Plot separation data
for antenna in antennas:
fig = make_subplots(rows=2, cols=2,
subplot_titles=['Line of sight', 'Blocked'],
shared_xaxes=True)
rssi_los_hist2d = []
dist_los_hist2d = []
experiment = 'distance_los'
dist_lim = [100, 0]
db_lim = [-100, 0]
for filename in distance_filenames:
data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))
Dist = np.around(data['distance'], 1)
for rssi in data['i_rssi']:
if rssi-5 < db_lim[1]:
db_lim[1] = rssi-5
if rssi+5 > db_lim[0]:
db_lim[0] = rssi+5
for dist in Dist:
if dist-.5 < dist_lim[0]:
dist_lim[0] = dist-.5
if dist+.5 > dist_lim[1]:
dist_lim[1] = dist+.5
dist_lim[0] = np.max([0, dist_lim[0]])
column = np.zeros(200)
hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T
for row in hist:
row_idx = -int(row[0])
column[row_idx] = row[1]/len(data['i_rssi'])
rssi_los_hist2d.append(column)
column = np.zeros(100)
hist = np.array(np.unique(Dist, return_counts=True)).T
for row in hist:
row_idx = int(np.around(row[0]/.1))
column[row_idx] = row[1]/len(Dist)
dist_los_hist2d.append(column)
rssi_los_hist2d = np.array(rssi_los_hist2d).T
dist_los_hist2d = np.array(dist_los_hist2d).T
rssi_blocked_hist2d = []
dist_blocked_hist2d = []
experiment = 'distance_blocked'
for filename in distance_filenames:
data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))
Dist = np.around(data['distance'], 1)
for rssi in data['i_rssi']:
if rssi-5 < db_lim[1]:
db_lim[1] = rssi-5
if rssi+5 > db_lim[0]:
db_lim[0] = rssi+5
for dist in Dist:
if dist-.5 < dist_lim[0]:
dist_lim[0] = dist-.5
if dist+.5 > dist_lim[1]:
dist_lim[1] = dist+.5
dist_lim[0] = np.max([0, dist_lim[0]])
column = np.zeros(200)
hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T
for row in hist:
row_idx = -int(row[0])
column[row_idx] = row[1]/len(data['i_rssi'])
rssi_blocked_hist2d.append(column)
column = np.zeros(1000)
hist = np.array(np.unique(Dist, return_counts=True)).T
for row in hist:
row_idx = int(np.around(row[0]/.1))
column[row_idx] = row[1]/len(Dist)
dist_blocked_hist2d.append(column)
rssi_blocked_hist2d = np.array(rssi_blocked_hist2d).T
dist_blocked_hist2d = np.array(dist_blocked_hist2d).T
maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])
fig.add_trace(go.Heatmap(
x=np.arange(.75, 3.25, .25),
y=np.arange(db_lim[0], db_lim[1], -1),
z=rssi_los_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],
zmin=0, zmax=maxz), row=1, col=1)
fig.add_trace(go.Heatmap(
x=np.arange(.75, 3.25, .25),
y=np.arange(dist_lim[0], dist_lim[1], .1),
z=dist_los_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],
zmin=0, zmax=maxz), row=2, col=1)
fig.add_trace(go.Heatmap(
x=np.arange(.75, 3.25, .25),
y=np.arange(db_lim[0], db_lim[1], -1),
z=rssi_blocked_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],
zmin=0, zmax=maxz), row=1, col=2)
fig.add_trace(go.Heatmap(
x=np.arange(.75, 3.25, .25),
y=np.arange(dist_lim[0], dist_lim[1], .1),
z=dist_blocked_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],
zmin=0, zmax=maxz), row=2, col=2)
fig.add_trace(go.Scatter(x=np.arange(.75, 3.25, .25), y=np.arange(.75, 3.25, .25), mode='lines', line=ref_line), row=2, col=1)
fig.add_trace(go.Scatter(x=np.arange(.75, 3.25, .25), y=np.arange(.75, 3.25, .25), mode='lines', line=ref_line), row=2, col=2)
fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),
'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})
fig.update_xaxes(title='Separation (m)', row=2, col=1)
fig.update_xaxes(title='Separation (m)', row=2, col=2)
fig.update_layout(showlegend=False)
fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)
fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)
fig.write_image(os.path.join(output_directory, 'distance_%s.png'%(antenna)))
|
normal
|
{
"blob_id": "3d3b9956a98f11a170d66280abe7f193cef9ccfb",
"index": 808,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i_angle in np.arange(0, 360, 45):\n ri_filenames.append('r%di%d.csv' % (i_angle, i_angle))\n ri_filenames.append('r%di%d.csv' % (i_angle + 45, i_angle))\nri_filenames.append('r360i360.csv')\n<mask token>\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. yaw', 'Calculated distance vs. yaw'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp1'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp1_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. pitch', 'Calculated distance vs. pitch'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp2'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp2_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. roll', 'Calculated distance vs. roll'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp3'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp3_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. position', 'Calculated distance vs. position'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp4'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in angle_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 360, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 360, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 360, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp4_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=2, subplot_titles=['Line of sight',\n 'Blocked'], shared_xaxes=True)\n rssi_los_hist2d = []\n dist_los_hist2d = []\n experiment = 'distance_los'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in distance_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_los_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_los_hist2d.append(column)\n rssi_los_hist2d = np.array(rssi_los_hist2d).T\n dist_los_hist2d = np.array(dist_los_hist2d).T\n rssi_blocked_hist2d = []\n dist_blocked_hist2d = []\n experiment = 'distance_blocked'\n for filename in distance_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_blocked_hist2d.append(column)\n column = np.zeros(1000)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_blocked_hist2d.append(column)\n rssi_blocked_hist2d = np.array(rssi_blocked_hist2d).T\n dist_blocked_hist2d = np.array(dist_blocked_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n db_lim[0], db_lim[1], -1), z=rssi_los_hist2d[int(-db_lim[0]):int(-\n db_lim[1]), :], zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n dist_lim[0], dist_lim[1], 0.1), z=dist_los_hist2d[int(dist_lim[0] /\n 0.1):int(dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n db_lim[0], db_lim[1], -1), z=rssi_blocked_hist2d[int(-db_lim[0]):\n int(-db_lim[1]), :], zmin=0, zmax=maxz), row=1, col=2)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n dist_lim[0], dist_lim[1], 0.1), z=dist_blocked_hist2d[int(dist_lim[\n 0] / 0.1):int(dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=2)\n fig.add_trace(go.Scatter(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n 0.75, 3.25, 0.25), mode='lines', line=ref_line), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n 0.75, 3.25, 0.25), mode='lines', line=ref_line), row=2, col=2)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Separation (m)', row=2, col=1)\n fig.update_xaxes(title='Separation (m)', row=2, col=2)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, 'distance_%s.png' % antenna)\n )\n",
"step-3": "<mask token>\noutput_directory = 'C:/Users/jgamm/Desktop/rssi_measurement/2020-06-10/figures'\nantennas = ['original_whip']\nfolder = 'C:/Users/jgamm/Desktop/rssi_measurement/2020-06-10/data'\nri_filenames = []\nfor i_angle in np.arange(0, 360, 45):\n ri_filenames.append('r%di%d.csv' % (i_angle, i_angle))\n ri_filenames.append('r%di%d.csv' % (i_angle + 45, i_angle))\nri_filenames.append('r360i360.csv')\nangle_filenames = [('%d.csv' % n) for n in np.arange(0, 405, 45)]\ndistance_filenames = [('%1.2f.csv' % n) for n in np.arange(0.75, 3.25, 0.25)]\nref_line = dict(color='white', width=1)\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. yaw', 'Calculated distance vs. yaw'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp1'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp1_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. pitch', 'Calculated distance vs. pitch'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp2'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp2_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. roll', 'Calculated distance vs. roll'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp3'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp3_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. position', 'Calculated distance vs. position'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp4'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in angle_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 360, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 360, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 360, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp4_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=2, subplot_titles=['Line of sight',\n 'Blocked'], shared_xaxes=True)\n rssi_los_hist2d = []\n dist_los_hist2d = []\n experiment = 'distance_los'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in distance_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_los_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_los_hist2d.append(column)\n rssi_los_hist2d = np.array(rssi_los_hist2d).T\n dist_los_hist2d = np.array(dist_los_hist2d).T\n rssi_blocked_hist2d = []\n dist_blocked_hist2d = []\n experiment = 'distance_blocked'\n for filename in distance_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_blocked_hist2d.append(column)\n column = np.zeros(1000)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_blocked_hist2d.append(column)\n rssi_blocked_hist2d = np.array(rssi_blocked_hist2d).T\n dist_blocked_hist2d = np.array(dist_blocked_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n db_lim[0], db_lim[1], -1), z=rssi_los_hist2d[int(-db_lim[0]):int(-\n db_lim[1]), :], zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n dist_lim[0], dist_lim[1], 0.1), z=dist_los_hist2d[int(dist_lim[0] /\n 0.1):int(dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n db_lim[0], db_lim[1], -1), z=rssi_blocked_hist2d[int(-db_lim[0]):\n int(-db_lim[1]), :], zmin=0, zmax=maxz), row=1, col=2)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n dist_lim[0], dist_lim[1], 0.1), z=dist_blocked_hist2d[int(dist_lim[\n 0] / 0.1):int(dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=2)\n fig.add_trace(go.Scatter(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n 0.75, 3.25, 0.25), mode='lines', line=ref_line), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n 0.75, 3.25, 0.25), mode='lines', line=ref_line), row=2, col=2)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Separation (m)', row=2, col=1)\n fig.update_xaxes(title='Separation (m)', row=2, col=2)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, 'distance_%s.png' % antenna)\n )\n",
"step-4": "import numpy as np\nimport plotly\nfrom plotly.subplots import make_subplots\nimport plotly.graph_objects as go\nimport pandas as pd\nimport os\noutput_directory = 'C:/Users/jgamm/Desktop/rssi_measurement/2020-06-10/figures'\nantennas = ['original_whip']\nfolder = 'C:/Users/jgamm/Desktop/rssi_measurement/2020-06-10/data'\nri_filenames = []\nfor i_angle in np.arange(0, 360, 45):\n ri_filenames.append('r%di%d.csv' % (i_angle, i_angle))\n ri_filenames.append('r%di%d.csv' % (i_angle + 45, i_angle))\nri_filenames.append('r360i360.csv')\nangle_filenames = [('%d.csv' % n) for n in np.arange(0, 405, 45)]\ndistance_filenames = [('%1.2f.csv' % n) for n in np.arange(0.75, 3.25, 0.25)]\nref_line = dict(color='white', width=1)\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. yaw', 'Calculated distance vs. yaw'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp1'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp1_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. pitch', 'Calculated distance vs. pitch'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp2'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp2_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. roll', 'Calculated distance vs. roll'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp3'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 765, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp3_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1, subplot_titles=[\n 'Initiator RSSI vs. position', 'Calculated distance vs. position'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp4'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in angle_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0, 360, 45), y=np.arange(db_lim[0],\n db_lim[1], -1), z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0, 360, 45), y=np.arange(dist_lim[\n 0], dist_lim[1], 0.1), z=dist_hist2d[int(dist_lim[0] / 0.1):int(\n dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 360, 45), y=np.array([1] * 16),\n mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, \n 'orientation_exp4_%s.png' % antenna))\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=2, subplot_titles=['Line of sight',\n 'Blocked'], shared_xaxes=True)\n rssi_los_hist2d = []\n dist_los_hist2d = []\n experiment = 'distance_los'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in distance_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_los_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_los_hist2d.append(column)\n rssi_los_hist2d = np.array(rssi_los_hist2d).T\n dist_los_hist2d = np.array(dist_los_hist2d).T\n rssi_blocked_hist2d = []\n dist_blocked_hist2d = []\n experiment = 'distance_blocked'\n for filename in distance_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi - 5 < db_lim[1]:\n db_lim[1] = rssi - 5\n if rssi + 5 > db_lim[0]:\n db_lim[0] = rssi + 5\n for dist in Dist:\n if dist - 0.5 < dist_lim[0]:\n dist_lim[0] = dist - 0.5\n if dist + 0.5 > dist_lim[1]:\n dist_lim[1] = dist + 0.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1] / len(data['i_rssi'])\n rssi_blocked_hist2d.append(column)\n column = np.zeros(1000)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0] / 0.1))\n column[row_idx] = row[1] / len(Dist)\n dist_blocked_hist2d.append(column)\n rssi_blocked_hist2d = np.array(rssi_blocked_hist2d).T\n dist_blocked_hist2d = np.array(dist_blocked_hist2d).T\n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n db_lim[0], db_lim[1], -1), z=rssi_los_hist2d[int(-db_lim[0]):int(-\n db_lim[1]), :], zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n dist_lim[0], dist_lim[1], 0.1), z=dist_los_hist2d[int(dist_lim[0] /\n 0.1):int(dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n db_lim[0], db_lim[1], -1), z=rssi_blocked_hist2d[int(-db_lim[0]):\n int(-db_lim[1]), :], zmin=0, zmax=maxz), row=1, col=2)\n fig.add_trace(go.Heatmap(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n dist_lim[0], dist_lim[1], 0.1), z=dist_blocked_hist2d[int(dist_lim[\n 0] / 0.1):int(dist_lim[1] / 0.1), :], zmin=0, zmax=maxz), row=2, col=2)\n fig.add_trace(go.Scatter(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n 0.75, 3.25, 0.25), mode='lines', line=ref_line), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0.75, 3.25, 0.25), y=np.arange(\n 0.75, 3.25, 0.25), mode='lines', line=ref_line), row=2, col=2)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna' %\n antenna, 'xanchor': 'center', 'yanchor': 'top', 'y': 0.95, 'x': 0.5})\n fig.update_xaxes(title='Separation (m)', row=2, col=1)\n fig.update_xaxes(title='Separation (m)', row=2, col=2)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, 'distance_%s.png' % antenna)\n )\n",
"step-5": "#%%\n# -*- coding: utf-8 -*-\n\nimport numpy as np\nimport plotly\nfrom plotly.subplots import make_subplots\nimport plotly.graph_objects as go\nimport pandas as pd\nimport os\n\noutput_directory = r'C:/Users/jgamm/Desktop/rssi_measurement/2020-06-10/figures'\nantennas = ['original_whip']\nfolder = r'C:/Users/jgamm/Desktop/rssi_measurement/2020-06-10/data'\nri_filenames = []\nfor i_angle in np.arange(0, 360, 45):\n ri_filenames.append('r%di%d.csv'%(i_angle, i_angle))\n ri_filenames.append('r%di%d.csv'%(i_angle+45, i_angle))\nri_filenames.append('r360i360.csv')\nangle_filenames = ['%d.csv'%(n) for n in np.arange(0, 405, 45)]\ndistance_filenames = ['%1.2f.csv'%(n) for n in np.arange(.75, 3.25, .25)]\n\nref_line = dict(color='white', width=1)\n\n# Plot yaw data\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1,\n subplot_titles=['Initiator RSSI vs. yaw',\n 'Calculated distance vs. yaw'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp1'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi-5 < db_lim[1]:\n db_lim[1] = rssi-5\n if rssi+5 > db_lim[0]:\n db_lim[0] = rssi+5\n for dist in Dist:\n if dist-.5 < dist_lim[0]:\n dist_lim[0] = dist-.5\n if dist+.5 > dist_lim[1]:\n dist_lim[1] = dist+.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1]/len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0]/.1))\n column[row_idx] = row[1]/len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n \n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(\n x=np.arange(0, 765, 45),\n y=np.arange(db_lim[0], db_lim[1], -1),\n z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(\n x=np.arange(0, 765, 45),\n y=np.arange(dist_lim[0], dist_lim[1], .1),\n z=dist_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],\n zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1]*16), mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),\n 'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, 'orientation_exp1_%s.png'%(antenna)))\n\n# Plot pitch data\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1,\n subplot_titles=['Initiator RSSI vs. pitch',\n 'Calculated distance vs. pitch'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp2'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi-5 < db_lim[1]:\n db_lim[1] = rssi-5\n if rssi+5 > db_lim[0]:\n db_lim[0] = rssi+5\n for dist in Dist:\n if dist-.5 < dist_lim[0]:\n dist_lim[0] = dist-.5\n if dist+.5 > dist_lim[1]:\n dist_lim[1] = dist+.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1]/len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0]/.1))\n column[row_idx] = row[1]/len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n \n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(\n x=np.arange(0, 765, 45),\n y=np.arange(db_lim[0], db_lim[1], -1),\n z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(\n x=np.arange(0, 765, 45),\n y=np.arange(dist_lim[0], dist_lim[1], .1),\n z=dist_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],\n zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1]*16), mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),\n 'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, 'orientation_exp2_%s.png'%(antenna)))\n\n# Plot roll data\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1,\n subplot_titles=['Initiator RSSI vs. roll',\n 'Calculated distance vs. roll'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp3'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in ri_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi-5 < db_lim[1]:\n db_lim[1] = rssi-5\n if rssi+5 > db_lim[0]:\n db_lim[0] = rssi+5\n for dist in Dist:\n if dist-.5 < dist_lim[0]:\n dist_lim[0] = dist-.5\n if dist+.5 > dist_lim[1]:\n dist_lim[1] = dist+.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1]/len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0]/.1))\n column[row_idx] = row[1]/len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n \n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(\n x=np.arange(0, 765, 45),\n y=np.arange(db_lim[0], db_lim[1], -1),\n z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(\n x=np.arange(0, 765, 45),\n y=np.arange(dist_lim[0], dist_lim[1], .1),\n z=dist_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],\n zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 765, 45), y=np.array([1]*16), mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),\n 'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, 'orientation_exp3_%s.png'%(antenna)))\n\n# Plot position data\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=1,\n subplot_titles=['Initiator RSSI vs. position',\n 'Calculated distance vs. position'],\n shared_xaxes=True)\n rssi_hist2d = []\n dist_hist2d = []\n experiment = 'orientation_exp4'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in angle_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi-5 < db_lim[1]:\n db_lim[1] = rssi-5\n if rssi+5 > db_lim[0]:\n db_lim[0] = rssi+5\n for dist in Dist:\n if dist-.5 < dist_lim[0]:\n dist_lim[0] = dist-.5\n if dist+.5 > dist_lim[1]:\n dist_lim[1] = dist+.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1]/len(data['i_rssi'])\n rssi_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0]/.1))\n column[row_idx] = row[1]/len(Dist)\n dist_hist2d.append(column)\n rssi_hist2d = np.array(rssi_hist2d).T\n dist_hist2d = np.array(dist_hist2d).T\n \n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(\n x=np.arange(0, 360, 45),\n y=np.arange(db_lim[0], db_lim[1], -1),\n z=rssi_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(\n x=np.arange(0, 360, 45),\n y=np.arange(dist_lim[0], dist_lim[1], .1),\n z=dist_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],\n zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(0, 360, 45), y=np.array([1]*16), mode='lines', line=ref_line), row=2, col=1)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),\n 'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})\n fig.update_xaxes(title='Angle (°)', row=2, col=1)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, 'orientation_exp4_%s.png'%(antenna)))\n\n# Plot separation data\nfor antenna in antennas:\n fig = make_subplots(rows=2, cols=2,\n subplot_titles=['Line of sight', 'Blocked'],\n shared_xaxes=True)\n rssi_los_hist2d = []\n dist_los_hist2d = []\n experiment = 'distance_los'\n dist_lim = [100, 0]\n db_lim = [-100, 0]\n for filename in distance_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi-5 < db_lim[1]:\n db_lim[1] = rssi-5\n if rssi+5 > db_lim[0]:\n db_lim[0] = rssi+5\n for dist in Dist:\n if dist-.5 < dist_lim[0]:\n dist_lim[0] = dist-.5\n if dist+.5 > dist_lim[1]:\n dist_lim[1] = dist+.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1]/len(data['i_rssi'])\n rssi_los_hist2d.append(column)\n column = np.zeros(100)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0]/.1))\n column[row_idx] = row[1]/len(Dist)\n dist_los_hist2d.append(column)\n rssi_los_hist2d = np.array(rssi_los_hist2d).T\n dist_los_hist2d = np.array(dist_los_hist2d).T\n \n rssi_blocked_hist2d = []\n dist_blocked_hist2d = []\n experiment = 'distance_blocked'\n for filename in distance_filenames:\n data = pd.read_csv(os.path.join(folder, antenna, experiment, filename))\n Dist = np.around(data['distance'], 1)\n for rssi in data['i_rssi']:\n if rssi-5 < db_lim[1]:\n db_lim[1] = rssi-5\n if rssi+5 > db_lim[0]:\n db_lim[0] = rssi+5\n for dist in Dist:\n if dist-.5 < dist_lim[0]:\n dist_lim[0] = dist-.5\n if dist+.5 > dist_lim[1]:\n dist_lim[1] = dist+.5\n dist_lim[0] = np.max([0, dist_lim[0]])\n column = np.zeros(200)\n hist = np.array(np.unique(data['i_rssi'], return_counts=True)).T\n for row in hist:\n row_idx = -int(row[0])\n column[row_idx] = row[1]/len(data['i_rssi'])\n rssi_blocked_hist2d.append(column)\n column = np.zeros(1000)\n hist = np.array(np.unique(Dist, return_counts=True)).T\n for row in hist:\n row_idx = int(np.around(row[0]/.1))\n column[row_idx] = row[1]/len(Dist)\n dist_blocked_hist2d.append(column)\n rssi_blocked_hist2d = np.array(rssi_blocked_hist2d).T\n dist_blocked_hist2d = np.array(dist_blocked_hist2d).T\n \n maxz = np.max([np.max(rssi_hist2d), np.max(dist_hist2d)])\n fig.add_trace(go.Heatmap(\n x=np.arange(.75, 3.25, .25),\n y=np.arange(db_lim[0], db_lim[1], -1),\n z=rssi_los_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=1)\n fig.add_trace(go.Heatmap(\n x=np.arange(.75, 3.25, .25),\n y=np.arange(dist_lim[0], dist_lim[1], .1),\n z=dist_los_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],\n zmin=0, zmax=maxz), row=2, col=1)\n fig.add_trace(go.Heatmap(\n x=np.arange(.75, 3.25, .25),\n y=np.arange(db_lim[0], db_lim[1], -1),\n z=rssi_blocked_hist2d[int(-db_lim[0]):int(-db_lim[1]), :],\n zmin=0, zmax=maxz), row=1, col=2)\n fig.add_trace(go.Heatmap(\n x=np.arange(.75, 3.25, .25),\n y=np.arange(dist_lim[0], dist_lim[1], .1),\n z=dist_blocked_hist2d[int(dist_lim[0]/.1):int(dist_lim[1]/.1), :],\n zmin=0, zmax=maxz), row=2, col=2)\n fig.add_trace(go.Scatter(x=np.arange(.75, 3.25, .25), y=np.arange(.75, 3.25, .25), mode='lines', line=ref_line), row=2, col=1)\n fig.add_trace(go.Scatter(x=np.arange(.75, 3.25, .25), y=np.arange(.75, 3.25, .25), mode='lines', line=ref_line), row=2, col=2)\n fig.update_layout(title={'text': 'DA14695 Evaluation Board, %s antenna'%(antenna),\n 'xanchor': 'center', 'yanchor': 'top', 'y': .95, 'x': .5})\n fig.update_xaxes(title='Separation (m)', row=2, col=1)\n fig.update_xaxes(title='Separation (m)', row=2, col=2)\n fig.update_layout(showlegend=False)\n fig.update_yaxes(title_text='Initiator RSSI (dBm)', row=1, col=1)\n fig.update_yaxes(title_text='Calculated distance (m)', row=2, col=1)\n fig.write_image(os.path.join(output_directory, 'distance_%s.png'%(antenna)))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Acoount(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Meta:
verbose_name = 'Акаунт'
verbose_name_plural = 'Акаунти'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Acoount(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __str__(self):
return self.first_name + ' ' + self.last_name
class Meta:
verbose_name = 'Акаунт'
verbose_name_plural = 'Акаунти'
<|reserved_special_token_1|>
import datetime
from django.db import models
from django.utils import timezone
class Acoount(models.Model):
first_name = models.CharField("Ім'я", max_length=50)
last_name = models.CharField('Прізвище', max_length=50)
username = models.CharField('Псевдонім', max_length=50)
email = models.CharField('Електронна почта', max_length=16)
password = models.CharField('Пароль', max_length=16)
def __str__(self):
return self.first_name + ' ' + self.last_name
class Meta:
verbose_name = 'Акаунт'
verbose_name_plural = 'Акаунти'
<|reserved_special_token_1|>
import datetime
from django.db import models
from django.utils import timezone
class Acoount(models.Model):
first_name = models.CharField("Ім\'я", max_length=50)
last_name = models.CharField('Прізвище', max_length=50)
username = models.CharField('Псевдонім', max_length=50)
email = models.CharField('Електронна почта', max_length=16)
password = models.CharField('Пароль', max_length=16)
def __str__(self):
return self.first_name + ' ' + self.last_name
class Meta:
verbose_name = 'Акаунт'
verbose_name_plural = 'Акаунти'
|
flexible
|
{
"blob_id": "18c2fe40b51ad1489d55aa2be068a1c4f381a2a5",
"index": 553,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Acoount(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n verbose_name = 'Акаунт'\n verbose_name_plural = 'Акаунти'\n",
"step-3": "<mask token>\n\n\nclass Acoount(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.first_name + ' ' + self.last_name\n\n\n class Meta:\n verbose_name = 'Акаунт'\n verbose_name_plural = 'Акаунти'\n",
"step-4": "import datetime\nfrom django.db import models\nfrom django.utils import timezone\n\n\nclass Acoount(models.Model):\n first_name = models.CharField(\"Ім'я\", max_length=50)\n last_name = models.CharField('Прізвище', max_length=50)\n username = models.CharField('Псевдонім', max_length=50)\n email = models.CharField('Електронна почта', max_length=16)\n password = models.CharField('Пароль', max_length=16)\n\n def __str__(self):\n return self.first_name + ' ' + self.last_name\n\n\n class Meta:\n verbose_name = 'Акаунт'\n verbose_name_plural = 'Акаунти'\n",
"step-5": "import datetime\nfrom django.db import models\n\nfrom django.utils import timezone\n\n\nclass Acoount(models.Model):\n first_name = models.CharField(\"Ім\\'я\", max_length=50)\n last_name = models.CharField('Прізвище', max_length=50)\n username = models.CharField('Псевдонім', max_length=50)\n email = models.CharField('Електронна почта', max_length=16)\n password = models.CharField('Пароль', max_length=16)\n \n\n\n def __str__(self):\n return self.first_name + ' ' + self.last_name\n\n class Meta:\n verbose_name = 'Акаунт'\n verbose_name_plural = 'Акаунти'",
"step-ids": [
0,
1,
2,
4,
5
]
}
|
[
0,
1,
2,
4,
5
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.