code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
<|reserved_special_token_0|>
class StudentMotionPlanner(GreedyBestFirstSearch):
<|reserved_special_token_0|>
def __init__(self, scenario, planningProblem, automata, plot_config=
DefaultPlotConfig):
super().__init__(scenario=scenario, planningProblem=planningProblem,
automaton=automata, plot_config=plot_config)
def evaluation_function(self, node_current: PriorityNode) ->float:
"""
Evaluation function of GBFS is f(n) = h(n)
"""
node_current.priority = self.heuristic_function(node_current=
node_current)
return node_current.priority
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def cost_for_modeC_problem(self, node_current, output_logs):
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
distance = self.calc_euclidean_distance(current_node=
node_current)
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,
path_last[-1].orientation)
orientationToGoalDiffdegree = abs(orientationToGoalDiff
) * 180 / 3.14
desired_orient = (self.orientation_desired.end + self.
orientation_desired.start) / 2
diff_desiredOrient = self.calc_orientation_diff(desired_orient,
path_last[-1].orientation)
angle_intervall = abs(abs(self.orientation_desired.start) -
abs(self.orientation_desired.end))
arry = node_current.list_paths[-1][-1].position
a = np.array([arry[0], arry[1]])
if self.routeplannerresult is not None:
distance_to_refrence = self.calc_distance_to_nearest_point(
self.routeplannerresult.reference_path, a)
else:
distance_to_refrence = 0
if output_logs:
print('distance to reference path: ', distance_to_refrence)
print('Distance to goal of current node is: ', distance)
print('Velocity of current node is: ', velocity)
print('Orientation of current position: ', node_current
.list_paths[-1][-1].orientation)
print('Angle to goal of current node is: ', angleToGoal)
print('orientation diff to goal of current node is(deg): ',
orientationToGoalDiffdegree)
print('diff desired orient of current node is(deg): ',
diff_desiredOrient)
if distance <= 0.1 and node_current.list_paths[-1][-1
].time_step < self.time_desired.start:
return self.time_desired.start - node_current.list_paths[-1
][-1].time_step
if self.planningProblem.goal.is_reached_only_pos(node_current
.list_paths[-1][-1]):
cost = (self.time_desired.start - node_current.
list_paths[-1][-1].time_step) * 0.01
cost = cost + diff_desiredOrient + velocity * 0.01
return cost
cost = (distance / velocity + 2 * diff_desiredOrient +
velocity * 0.01)
return cost
def cost_for_modeD_problem(self, node_current, output_logs):
totaltogoal = self.calc_distance_to_goal_from_point(node_current.
list_paths[-1][-1])
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
if self.planningProblem.goal.is_reached_only_pos(node_current.
list_paths[-1][-1]):
return (self.time_desired.start - node_current.list_paths[-
1][-1].time_step) * 0.01
velocity = node_current.list_paths[-1][-1].velocity
if np.isclose(velocity, 0):
return np.inf
cost = totaltogoal / node_current.list_paths[-1][-1].velocity
return cost
<|reserved_special_token_0|>
def calc_distance_to_ref_from_point(self, state):
currentpos = state.position
distances = []
for p in self.refPathParsedPnts:
distances.append(self.euclidean_distance(currentpos, p))
smallest_points = nsmallest(2, distances)
index1 = distances.index(smallest_points[0])
index2 = distances.index(smallest_points[1])
p1 = self.refPathParsedPnts[index1]
p2 = self.refPathParsedPnts[index2]
distance_to_refrence = np.abs(np.cross(p2 - p1, currentpos - p1) /
np.linalg.norm(p2 - p1))
return distance_to_refrence
def calc_distance_to_goal_from_point(self, state):
currentpos = state.position
distances = []
for p in self.refPathParsedPnts:
distances.append(self.euclidean_distance(currentpos, p))
index_smallest_dist = distances.index(min(distances))
totaltogoal = 0
for p in range(index_smallest_dist, len(self.refPathParsedPnts) - 1):
totaltogoal = totaltogoal + self.euclidean_distance(self.
refPathParsedPnts[p], self.refPathParsedPnts[p + 1])
return totaltogoal
def get_index_nearest_obst_infront(self, node_current):
currentorient = node_current.list_paths[-1][-1].orientation
currentpos = node_current.list_paths[-1][-1].position
currenttimestep = node_current.list_paths[-1][-1].time_step
currentVel = node_current.list_paths[-1][-1].velocity
disttoobst = [np.inf] * len(self.list_obstacles)
for i in range(len(self.list_obstacles)):
obst = self.list_obstacles[i]
obstPos = obst.state_at_time(currenttimestep)
if currentorient is not None and obstPos is not None:
dist = self.euclidean_distance(currentpos, obstPos.position)
lookaheadVar = 1.375 * currentVel + 2.5
if dist <= lookaheadVar:
vectorToObst = np.array([currentpos, obstPos.position])
vectorToObstOrient = self.calc_angle_of_position(
vectorToObst, currentpos)
orientdiff = self.calc_orientation_diff(currentorient,
vectorToObstOrient)
if abs(orientdiff) <= 0.261799:
disttoobst[i] = dist
else:
disttoobst[i] = np.inf
else:
disttoobst[i] = np.inf
index_smallest_dist = disttoobst.index(min(disttoobst))
if disttoobst[index_smallest_dist] == np.inf:
index_smallest_dist = -1
return index_smallest_dist
<|reserved_special_token_0|>
def cost_for_modeA_problem_old(self, node_current, output_logs):
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
distance = self.calc_euclidean_distance(current_node=
node_current)
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,
path_last[-1].orientation)
orientationToGoalDiffdegree = abs(orientationToGoalDiff
) * 180 / 3.14
desired_orient = (self.orientation_desired.end + self.
orientation_desired.start) / 2
desired_velocity = (self.velocity_desired.start + self.
velocity_desired.end) / 2
diff_desiredOrient = self.calc_orientation_diff(desired_orient,
path_last[-1].orientation)
diff_deiredVelocity = abs(velocity - desired_velocity)
angle_intervall = abs(abs(self.orientation_desired.start) -
abs(self.orientation_desired.end))
if output_logs:
print('Distance to goal of current node is: ', distance)
print('Velocity of current node is: ', velocity)
print('Orientation of current position: ', node_current
.list_paths[-1][-1].orientation)
print('Angle to goal of current node is: ', angleToGoal)
print('orientation diff to goal of current node is(deg): ',
orientationToGoalDiffdegree)
print('diff desired orient of current node is(deg): ',
diff_desiredOrient)
print('diff desired velocity of current node is(deg): ',
diff_deiredVelocity)
if distance <= 1:
desired_vel_weight = 1
desired_orient_weight = 1
cost = desired_vel_weight * diff_deiredVelocity
if angle_intervall < 1 and angle_intervall != 0:
cost = (cost + desired_orient_weight *
diff_desiredOrient)
return cost
if distance <= 0.1 and node_current.list_paths[-1][-1
].time_step < self.time_desired.start:
return (self.time_desired.start - node_current.
list_paths[-1][-1].time_step) * 0.001
if orientationToGoalDiffdegree > 45:
if distance >= 10:
velocity_weight = 1
cost = distance / velocity
return cost
if distance < 10 and distance >= 5:
return np.inf
if distance < 5:
return np.inf
else:
if distance >= 10:
velocity_weight = 1
cost = distance / velocity * velocity_weight
return cost
if distance < 10 and distance >= 5:
velocity_weight = 0.5
desired_vel_weight = 1
desired_orient_weight = 1
cost = distance / velocity
cost = cost + desired_vel_weight * diff_deiredVelocity
if angle_intervall < 1 and angle_intervall != 0:
cost = (cost + desired_orient_weight *
diff_desiredOrient)
return cost
if distance < 5:
cost = distance / velocity
desired_vel_weight = 3
desired_orient_weight = 3
cost = cost + desired_vel_weight * diff_deiredVelocity
if angle_intervall < 1 and angle_intervall != 0:
cost = (cost + desired_orient_weight *
diff_desiredOrient)
return cost
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class StudentMotionPlanner(GreedyBestFirstSearch):
<|reserved_special_token_0|>
def __init__(self, scenario, planningProblem, automata, plot_config=
DefaultPlotConfig):
super().__init__(scenario=scenario, planningProblem=planningProblem,
automaton=automata, plot_config=plot_config)
def evaluation_function(self, node_current: PriorityNode) ->float:
"""
Evaluation function of GBFS is f(n) = h(n)
"""
node_current.priority = self.heuristic_function(node_current=
node_current)
return node_current.priority
<|reserved_special_token_0|>
def cost_for_modeA_problem(self, node_current, output_logs):
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
distance = self.calc_euclidean_distance(current_node=
node_current)
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,
path_last[-1].orientation)
orientationToGoalDiffdegree = abs(orientationToGoalDiff
) * 180 / 3.14
desired_orient = (self.orientation_desired.end + self.
orientation_desired.start) / 2
desired_velocity = (self.velocity_desired.start + self.
velocity_desired.end) / 2
diff_desiredOrient = abs(self.calc_orientation_diff(
desired_orient, path_last[-1].orientation))
diff_deiredVelocity = abs(velocity - desired_velocity)
angle_intervall = abs(abs(self.orientation_desired.start) -
abs(self.orientation_desired.end))
if output_logs:
print('Distance to goal of current node is: ', distance)
print('Velocity of current node is: ', velocity)
print('Orientation of current position: ', node_current
.list_paths[-1][-1].orientation)
print('Angle to goal of current node is: ', angleToGoal)
print('orientation diff to goal of current node is(deg): ',
orientationToGoalDiffdegree)
print('diff desired orient of current node is(deg): ',
diff_desiredOrient)
print('diff desired velocity of current node is(deg): ',
diff_deiredVelocity)
current_orient = path_last[-1].orientation
if distance <= 10:
if (current_orient < self.orientation_desired.start or
current_orient > self.orientation_desired.end):
return np.inf
if (velocity < self.velocity_desired.start or velocity >
self.velocity_desired.end):
return np.inf
weight = 10
cost = (distance / velocity + weight * diff_deiredVelocity +
weight * diff_desiredOrient)
return cost
def cost_for_modeB_problem(self, node_current, output_logs):
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
distance = self.calc_distance_to_goal_from_point(node_current
.list_paths[-1][-1])
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,
path_last[-1].orientation)
orientationToGoalDiffdegree = abs(orientationToGoalDiff
) * 180 / 3.14
desired_velocity = (self.velocity_desired.start + self.
velocity_desired.end) / 2
diff_deiredVelocity = abs(velocity - desired_velocity)
self.test_if_in_goal_lanelet(node_current)
if output_logs:
print('Distance to goal of current node is: ', distance)
print('Velocity of current node is: ', velocity)
print('Orientation of current position: ', node_current
.list_paths[-1][-1].orientation)
print('Angle to goal of current node is: ', angleToGoal)
print('orientation diff to goal of current node is(deg): ',
orientationToGoalDiffdegree)
print('diff desired velocity of current node is(deg): ',
diff_deiredVelocity)
if self.planningProblem.goal.is_reached_only_pos(node_current
.list_paths[-1][-1]):
cost = (self.time_desired.start - node_current.
list_paths[-1][-1].time_step) * 0.01
cost = cost + diff_deiredVelocity + velocity * 0.01
return cost
cost = (distance / velocity + 2 * diff_deiredVelocity +
velocity * 0.01)
return cost
def cost_for_modeC_problem(self, node_current, output_logs):
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
distance = self.calc_euclidean_distance(current_node=
node_current)
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,
path_last[-1].orientation)
orientationToGoalDiffdegree = abs(orientationToGoalDiff
) * 180 / 3.14
desired_orient = (self.orientation_desired.end + self.
orientation_desired.start) / 2
diff_desiredOrient = self.calc_orientation_diff(desired_orient,
path_last[-1].orientation)
angle_intervall = abs(abs(self.orientation_desired.start) -
abs(self.orientation_desired.end))
arry = node_current.list_paths[-1][-1].position
a = np.array([arry[0], arry[1]])
if self.routeplannerresult is not None:
distance_to_refrence = self.calc_distance_to_nearest_point(
self.routeplannerresult.reference_path, a)
else:
distance_to_refrence = 0
if output_logs:
print('distance to reference path: ', distance_to_refrence)
print('Distance to goal of current node is: ', distance)
print('Velocity of current node is: ', velocity)
print('Orientation of current position: ', node_current
.list_paths[-1][-1].orientation)
print('Angle to goal of current node is: ', angleToGoal)
print('orientation diff to goal of current node is(deg): ',
orientationToGoalDiffdegree)
print('diff desired orient of current node is(deg): ',
diff_desiredOrient)
if distance <= 0.1 and node_current.list_paths[-1][-1
].time_step < self.time_desired.start:
return self.time_desired.start - node_current.list_paths[-1
][-1].time_step
if self.planningProblem.goal.is_reached_only_pos(node_current
.list_paths[-1][-1]):
cost = (self.time_desired.start - node_current.
list_paths[-1][-1].time_step) * 0.01
cost = cost + diff_desiredOrient + velocity * 0.01
return cost
cost = (distance / velocity + 2 * diff_desiredOrient +
velocity * 0.01)
return cost
def cost_for_modeD_problem(self, node_current, output_logs):
totaltogoal = self.calc_distance_to_goal_from_point(node_current.
list_paths[-1][-1])
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
if self.planningProblem.goal.is_reached_only_pos(node_current.
list_paths[-1][-1]):
return (self.time_desired.start - node_current.list_paths[-
1][-1].time_step) * 0.01
velocity = node_current.list_paths[-1][-1].velocity
if np.isclose(velocity, 0):
return np.inf
cost = totaltogoal / node_current.list_paths[-1][-1].velocity
return cost
<|reserved_special_token_0|>
def calc_distance_to_ref_from_point(self, state):
currentpos = state.position
distances = []
for p in self.refPathParsedPnts:
distances.append(self.euclidean_distance(currentpos, p))
smallest_points = nsmallest(2, distances)
index1 = distances.index(smallest_points[0])
index2 = distances.index(smallest_points[1])
p1 = self.refPathParsedPnts[index1]
p2 = self.refPathParsedPnts[index2]
distance_to_refrence = np.abs(np.cross(p2 - p1, currentpos - p1) /
np.linalg.norm(p2 - p1))
return distance_to_refrence
def calc_distance_to_goal_from_point(self, state):
currentpos = state.position
distances = []
for p in self.refPathParsedPnts:
distances.append(self.euclidean_distance(currentpos, p))
index_smallest_dist = distances.index(min(distances))
totaltogoal = 0
for p in range(index_smallest_dist, len(self.refPathParsedPnts) - 1):
totaltogoal = totaltogoal + self.euclidean_distance(self.
refPathParsedPnts[p], self.refPathParsedPnts[p + 1])
return totaltogoal
def get_index_nearest_obst_infront(self, node_current):
currentorient = node_current.list_paths[-1][-1].orientation
currentpos = node_current.list_paths[-1][-1].position
currenttimestep = node_current.list_paths[-1][-1].time_step
currentVel = node_current.list_paths[-1][-1].velocity
disttoobst = [np.inf] * len(self.list_obstacles)
for i in range(len(self.list_obstacles)):
obst = self.list_obstacles[i]
obstPos = obst.state_at_time(currenttimestep)
if currentorient is not None and obstPos is not None:
dist = self.euclidean_distance(currentpos, obstPos.position)
lookaheadVar = 1.375 * currentVel + 2.5
if dist <= lookaheadVar:
vectorToObst = np.array([currentpos, obstPos.position])
vectorToObstOrient = self.calc_angle_of_position(
vectorToObst, currentpos)
orientdiff = self.calc_orientation_diff(currentorient,
vectorToObstOrient)
if abs(orientdiff) <= 0.261799:
disttoobst[i] = dist
else:
disttoobst[i] = np.inf
else:
disttoobst[i] = np.inf
index_smallest_dist = disttoobst.index(min(disttoobst))
if disttoobst[index_smallest_dist] == np.inf:
index_smallest_dist = -1
return index_smallest_dist
<|reserved_special_token_0|>
def cost_for_modeA_problem_old(self, node_current, output_logs):
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
distance = self.calc_euclidean_distance(current_node=
node_current)
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,
path_last[-1].orientation)
orientationToGoalDiffdegree = abs(orientationToGoalDiff
) * 180 / 3.14
desired_orient = (self.orientation_desired.end + self.
orientation_desired.start) / 2
desired_velocity = (self.velocity_desired.start + self.
velocity_desired.end) / 2
diff_desiredOrient = self.calc_orientation_diff(desired_orient,
path_last[-1].orientation)
diff_deiredVelocity = abs(velocity - desired_velocity)
angle_intervall = abs(abs(self.orientation_desired.start) -
abs(self.orientation_desired.end))
if output_logs:
print('Distance to goal of current node is: ', distance)
print('Velocity of current node is: ', velocity)
print('Orientation of current position: ', node_current
.list_paths[-1][-1].orientation)
print('Angle to goal of current node is: ', angleToGoal)
print('orientation diff to goal of current node is(deg): ',
orientationToGoalDiffdegree)
print('diff desired orient of current node is(deg): ',
diff_desiredOrient)
print('diff desired velocity of current node is(deg): ',
diff_deiredVelocity)
if distance <= 1:
desired_vel_weight = 1
desired_orient_weight = 1
cost = desired_vel_weight * diff_deiredVelocity
if angle_intervall < 1 and angle_intervall != 0:
cost = (cost + desired_orient_weight *
diff_desiredOrient)
return cost
if distance <= 0.1 and node_current.list_paths[-1][-1
].time_step < self.time_desired.start:
return (self.time_desired.start - node_current.
list_paths[-1][-1].time_step) * 0.001
if orientationToGoalDiffdegree > 45:
if distance >= 10:
velocity_weight = 1
cost = distance / velocity
return cost
if distance < 10 and distance >= 5:
return np.inf
if distance < 5:
return np.inf
else:
if distance >= 10:
velocity_weight = 1
cost = distance / velocity * velocity_weight
return cost
if distance < 10 and distance >= 5:
velocity_weight = 0.5
desired_vel_weight = 1
desired_orient_weight = 1
cost = distance / velocity
cost = cost + desired_vel_weight * diff_deiredVelocity
if angle_intervall < 1 and angle_intervall != 0:
cost = (cost + desired_orient_weight *
diff_desiredOrient)
return cost
if distance < 5:
cost = distance / velocity
desired_vel_weight = 3
desired_orient_weight = 3
cost = cost + desired_vel_weight * diff_deiredVelocity
if angle_intervall < 1 and angle_intervall != 0:
cost = (cost + desired_orient_weight *
diff_desiredOrient)
return cost
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class StudentMotionPlanner(GreedyBestFirstSearch):
<|reserved_special_token_0|>
def __init__(self, scenario, planningProblem, automata, plot_config=
DefaultPlotConfig):
super().__init__(scenario=scenario, planningProblem=planningProblem,
automaton=automata, plot_config=plot_config)
def evaluation_function(self, node_current: PriorityNode) ->float:
"""
Evaluation function of GBFS is f(n) = h(n)
"""
node_current.priority = self.heuristic_function(node_current=
node_current)
return node_current.priority
<|reserved_special_token_0|>
def cost_for_modeA_problem(self, node_current, output_logs):
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
distance = self.calc_euclidean_distance(current_node=
node_current)
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,
path_last[-1].orientation)
orientationToGoalDiffdegree = abs(orientationToGoalDiff
) * 180 / 3.14
desired_orient = (self.orientation_desired.end + self.
orientation_desired.start) / 2
desired_velocity = (self.velocity_desired.start + self.
velocity_desired.end) / 2
diff_desiredOrient = abs(self.calc_orientation_diff(
desired_orient, path_last[-1].orientation))
diff_deiredVelocity = abs(velocity - desired_velocity)
angle_intervall = abs(abs(self.orientation_desired.start) -
abs(self.orientation_desired.end))
if output_logs:
print('Distance to goal of current node is: ', distance)
print('Velocity of current node is: ', velocity)
print('Orientation of current position: ', node_current
.list_paths[-1][-1].orientation)
print('Angle to goal of current node is: ', angleToGoal)
print('orientation diff to goal of current node is(deg): ',
orientationToGoalDiffdegree)
print('diff desired orient of current node is(deg): ',
diff_desiredOrient)
print('diff desired velocity of current node is(deg): ',
diff_deiredVelocity)
current_orient = path_last[-1].orientation
if distance <= 10:
if (current_orient < self.orientation_desired.start or
current_orient > self.orientation_desired.end):
return np.inf
if (velocity < self.velocity_desired.start or velocity >
self.velocity_desired.end):
return np.inf
weight = 10
cost = (distance / velocity + weight * diff_deiredVelocity +
weight * diff_desiredOrient)
return cost
def cost_for_modeB_problem(self, node_current, output_logs):
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
distance = self.calc_distance_to_goal_from_point(node_current
.list_paths[-1][-1])
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,
path_last[-1].orientation)
orientationToGoalDiffdegree = abs(orientationToGoalDiff
) * 180 / 3.14
desired_velocity = (self.velocity_desired.start + self.
velocity_desired.end) / 2
diff_deiredVelocity = abs(velocity - desired_velocity)
self.test_if_in_goal_lanelet(node_current)
if output_logs:
print('Distance to goal of current node is: ', distance)
print('Velocity of current node is: ', velocity)
print('Orientation of current position: ', node_current
.list_paths[-1][-1].orientation)
print('Angle to goal of current node is: ', angleToGoal)
print('orientation diff to goal of current node is(deg): ',
orientationToGoalDiffdegree)
print('diff desired velocity of current node is(deg): ',
diff_deiredVelocity)
if self.planningProblem.goal.is_reached_only_pos(node_current
.list_paths[-1][-1]):
cost = (self.time_desired.start - node_current.
list_paths[-1][-1].time_step) * 0.01
cost = cost + diff_deiredVelocity + velocity * 0.01
return cost
cost = (distance / velocity + 2 * diff_deiredVelocity +
velocity * 0.01)
return cost
def cost_for_modeC_problem(self, node_current, output_logs):
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
distance = self.calc_euclidean_distance(current_node=
node_current)
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,
path_last[-1].orientation)
orientationToGoalDiffdegree = abs(orientationToGoalDiff
) * 180 / 3.14
desired_orient = (self.orientation_desired.end + self.
orientation_desired.start) / 2
diff_desiredOrient = self.calc_orientation_diff(desired_orient,
path_last[-1].orientation)
angle_intervall = abs(abs(self.orientation_desired.start) -
abs(self.orientation_desired.end))
arry = node_current.list_paths[-1][-1].position
a = np.array([arry[0], arry[1]])
if self.routeplannerresult is not None:
distance_to_refrence = self.calc_distance_to_nearest_point(
self.routeplannerresult.reference_path, a)
else:
distance_to_refrence = 0
if output_logs:
print('distance to reference path: ', distance_to_refrence)
print('Distance to goal of current node is: ', distance)
print('Velocity of current node is: ', velocity)
print('Orientation of current position: ', node_current
.list_paths[-1][-1].orientation)
print('Angle to goal of current node is: ', angleToGoal)
print('orientation diff to goal of current node is(deg): ',
orientationToGoalDiffdegree)
print('diff desired orient of current node is(deg): ',
diff_desiredOrient)
if distance <= 0.1 and node_current.list_paths[-1][-1
].time_step < self.time_desired.start:
return self.time_desired.start - node_current.list_paths[-1
][-1].time_step
if self.planningProblem.goal.is_reached_only_pos(node_current
.list_paths[-1][-1]):
cost = (self.time_desired.start - node_current.
list_paths[-1][-1].time_step) * 0.01
cost = cost + diff_desiredOrient + velocity * 0.01
return cost
cost = (distance / velocity + 2 * diff_desiredOrient +
velocity * 0.01)
return cost
def cost_for_modeD_problem(self, node_current, output_logs):
totaltogoal = self.calc_distance_to_goal_from_point(node_current.
list_paths[-1][-1])
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
if self.planningProblem.goal.is_reached_only_pos(node_current.
list_paths[-1][-1]):
return (self.time_desired.start - node_current.list_paths[-
1][-1].time_step) * 0.01
velocity = node_current.list_paths[-1][-1].velocity
if np.isclose(velocity, 0):
return np.inf
cost = totaltogoal / node_current.list_paths[-1][-1].velocity
return cost
def cost_for_Survival_problem(self, node_current, output_logs):
currentorient = node_current.list_paths[-1][-1].orientation
currentpos = node_current.list_paths[-1][-1].position
currenttimestep = node_current.list_paths[-1][-1].time_step
currentVel = node_current.list_paths[-1][-1].velocity
for obst in self.list_obstacles:
obstPos = obst.state_at_time(currenttimestep)
if currentorient is not None and obstPos is not None:
disttoobst = self.euclidean_distance(currentpos, obstPos.
position)
lookaheadVar = 1.375 * currentVel + 2.5
if disttoobst <= lookaheadVar:
vectorToObst = np.array([currentpos, obstPos.position])
vectorToObstOrient = self.calc_angle_of_position(
vectorToObst, currentpos)
orientdiff = self.calc_orientation_diff(currentorient,
vectorToObstOrient)
if abs(orientdiff) <= 0.261799:
if not 'velocity' in obstPos.attributes:
continue
if (node_current.list_paths[-1][-1].velocity >
obstPos.velocity and obstPos.velocity != 0):
return np.inf
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
def calc_distance_to_ref_from_point(self, state):
currentpos = state.position
distances = []
for p in self.refPathParsedPnts:
distances.append(self.euclidean_distance(currentpos, p))
smallest_points = nsmallest(2, distances)
index1 = distances.index(smallest_points[0])
index2 = distances.index(smallest_points[1])
p1 = self.refPathParsedPnts[index1]
p2 = self.refPathParsedPnts[index2]
distance_to_refrence = np.abs(np.cross(p2 - p1, currentpos - p1) /
np.linalg.norm(p2 - p1))
return distance_to_refrence
def calc_distance_to_goal_from_point(self, state):
currentpos = state.position
distances = []
for p in self.refPathParsedPnts:
distances.append(self.euclidean_distance(currentpos, p))
index_smallest_dist = distances.index(min(distances))
totaltogoal = 0
for p in range(index_smallest_dist, len(self.refPathParsedPnts) - 1):
totaltogoal = totaltogoal + self.euclidean_distance(self.
refPathParsedPnts[p], self.refPathParsedPnts[p + 1])
return totaltogoal
def get_index_nearest_obst_infront(self, node_current):
currentorient = node_current.list_paths[-1][-1].orientation
currentpos = node_current.list_paths[-1][-1].position
currenttimestep = node_current.list_paths[-1][-1].time_step
currentVel = node_current.list_paths[-1][-1].velocity
disttoobst = [np.inf] * len(self.list_obstacles)
for i in range(len(self.list_obstacles)):
obst = self.list_obstacles[i]
obstPos = obst.state_at_time(currenttimestep)
if currentorient is not None and obstPos is not None:
dist = self.euclidean_distance(currentpos, obstPos.position)
lookaheadVar = 1.375 * currentVel + 2.5
if dist <= lookaheadVar:
vectorToObst = np.array([currentpos, obstPos.position])
vectorToObstOrient = self.calc_angle_of_position(
vectorToObst, currentpos)
orientdiff = self.calc_orientation_diff(currentorient,
vectorToObstOrient)
if abs(orientdiff) <= 0.261799:
disttoobst[i] = dist
else:
disttoobst[i] = np.inf
else:
disttoobst[i] = np.inf
index_smallest_dist = disttoobst.index(min(disttoobst))
if disttoobst[index_smallest_dist] == np.inf:
index_smallest_dist = -1
return index_smallest_dist
<|reserved_special_token_0|>
def cost_for_modeA_problem_old(self, node_current, output_logs):
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
distance = self.calc_euclidean_distance(current_node=
node_current)
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,
path_last[-1].orientation)
orientationToGoalDiffdegree = abs(orientationToGoalDiff
) * 180 / 3.14
desired_orient = (self.orientation_desired.end + self.
orientation_desired.start) / 2
desired_velocity = (self.velocity_desired.start + self.
velocity_desired.end) / 2
diff_desiredOrient = self.calc_orientation_diff(desired_orient,
path_last[-1].orientation)
diff_deiredVelocity = abs(velocity - desired_velocity)
angle_intervall = abs(abs(self.orientation_desired.start) -
abs(self.orientation_desired.end))
if output_logs:
print('Distance to goal of current node is: ', distance)
print('Velocity of current node is: ', velocity)
print('Orientation of current position: ', node_current
.list_paths[-1][-1].orientation)
print('Angle to goal of current node is: ', angleToGoal)
print('orientation diff to goal of current node is(deg): ',
orientationToGoalDiffdegree)
print('diff desired orient of current node is(deg): ',
diff_desiredOrient)
print('diff desired velocity of current node is(deg): ',
diff_deiredVelocity)
if distance <= 1:
desired_vel_weight = 1
desired_orient_weight = 1
cost = desired_vel_weight * diff_deiredVelocity
if angle_intervall < 1 and angle_intervall != 0:
cost = (cost + desired_orient_weight *
diff_desiredOrient)
return cost
if distance <= 0.1 and node_current.list_paths[-1][-1
].time_step < self.time_desired.start:
return (self.time_desired.start - node_current.
list_paths[-1][-1].time_step) * 0.001
if orientationToGoalDiffdegree > 45:
if distance >= 10:
velocity_weight = 1
cost = distance / velocity
return cost
if distance < 10 and distance >= 5:
return np.inf
if distance < 5:
return np.inf
else:
if distance >= 10:
velocity_weight = 1
cost = distance / velocity * velocity_weight
return cost
if distance < 10 and distance >= 5:
velocity_weight = 0.5
desired_vel_weight = 1
desired_orient_weight = 1
cost = distance / velocity
cost = cost + desired_vel_weight * diff_deiredVelocity
if angle_intervall < 1 and angle_intervall != 0:
cost = (cost + desired_orient_weight *
diff_desiredOrient)
return cost
if distance < 5:
cost = distance / velocity
desired_vel_weight = 3
desired_orient_weight = 3
cost = cost + desired_vel_weight * diff_deiredVelocity
if angle_intervall < 1 and angle_intervall != 0:
cost = (cost + desired_orient_weight *
diff_desiredOrient)
return cost
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class StudentMotionPlanner(GreedyBestFirstSearch):
<|reserved_special_token_0|>
def __init__(self, scenario, planningProblem, automata, plot_config=
DefaultPlotConfig):
super().__init__(scenario=scenario, planningProblem=planningProblem,
automaton=automata, plot_config=plot_config)
def evaluation_function(self, node_current: PriorityNode) ->float:
"""
Evaluation function of GBFS is f(n) = h(n)
"""
node_current.priority = self.heuristic_function(node_current=
node_current)
return node_current.priority
def heuristic_function(self, node_current: PriorityNode) ->float:
"""
Function that evaluates the heuristic cost h(n) in student class.
Created by Mohamed A. Abdellaoui 10.01.2021
"""
output_logs = False
if output_logs:
print('##################')
print('current time step: ', node_current.list_paths[-1][-1].
time_step)
print('current problem mode', self.planningProblemType)
print('depth tree: ', node_current.depth_tree)
currentorient = node_current.list_paths[-1][-1].orientation
currentpos = node_current.list_paths[-1][-1].position
currenttimestep = node_current.list_paths[-1][-1].time_step
currentVel = node_current.list_paths[-1][-1].velocity
if self.reached_goal(node_current.list_paths[-1]):
return 0.0
if self.routeplannerresult is None:
return np.inf
if not self.disableObstAvoidance:
for obst in self.list_obstacles:
obstPos = obst.state_at_time(currenttimestep)
if currentorient is not None and obstPos is not None:
disttoobst = self.euclidean_distance(currentpos,
obstPos.position)
lookaheadVar = 1.375 * currentVel + 2.5
if disttoobst <= lookaheadVar:
vectorToObst = np.array([currentpos, obstPos.position])
vectorToObstOrient = self.calc_angle_of_position(
vectorToObst, currentpos)
orientdiff = self.calc_orientation_diff(currentorient,
vectorToObstOrient)
if abs(orientdiff) <= 0.261799:
if not 'velocity' in obstPos.attributes:
continue
if (node_current.list_paths[-1][-1].velocity >
obstPos.velocity and obstPos.velocity != 0):
return np.inf
index_smallest_dist = self.get_index_nearest_obst_infront(
node_current)
if index_smallest_dist != -1:
obst = self.list_obstacles[index_smallest_dist]
obstPos = obst.state_at_time(currenttimestep)
if obstPos is not None and 'velocity' in obstPos.attributes:
if obstPos.velocity == 0:
cost = node_current.list_paths[-1][-1].velocity
return cost
if node_current.list_paths[-1][-1
].velocity > obstPos.velocity:
return np.inf
cost = abs(node_current.list_paths[-1][-1].velocity -
obstPos.velocity)
return cost
if self.planningProblemType == 'ModeA':
cost = self.cost_for_modeA_problem(node_current, output_logs)
if output_logs:
print('Cost from modeA cost func: ', cost)
if cost < 0:
return 0
return cost
elif self.planningProblemType == 'ModeB':
cost = self.cost_for_modeB_problem(node_current, output_logs)
if output_logs:
print('Cost from modeB cost func: ', cost)
if cost < 0:
return 0
return cost
elif self.planningProblemType == 'ModeC':
cost = self.cost_for_modeC_problem(node_current, output_logs)
if output_logs:
print('Cost from modeB cost func: ', cost)
if cost < 0:
return 0
return cost
elif self.planningProblemType == 'ModeD':
cost = self.cost_for_modeD_problem(node_current, output_logs)
if output_logs:
print('Cost from modeB cost func: ', cost)
if cost < 0:
return 0
return cost
elif self.planningProblemType == 'Survival':
cost = self.cost_for_Survival_problem(node_current, output_logs)
if output_logs:
print('Cost from modeB cost func: ', cost)
if cost < 0:
return 0
return cost
def cost_for_modeA_problem(self, node_current, output_logs):
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
distance = self.calc_euclidean_distance(current_node=
node_current)
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,
path_last[-1].orientation)
orientationToGoalDiffdegree = abs(orientationToGoalDiff
) * 180 / 3.14
desired_orient = (self.orientation_desired.end + self.
orientation_desired.start) / 2
desired_velocity = (self.velocity_desired.start + self.
velocity_desired.end) / 2
diff_desiredOrient = abs(self.calc_orientation_diff(
desired_orient, path_last[-1].orientation))
diff_deiredVelocity = abs(velocity - desired_velocity)
angle_intervall = abs(abs(self.orientation_desired.start) -
abs(self.orientation_desired.end))
if output_logs:
print('Distance to goal of current node is: ', distance)
print('Velocity of current node is: ', velocity)
print('Orientation of current position: ', node_current
.list_paths[-1][-1].orientation)
print('Angle to goal of current node is: ', angleToGoal)
print('orientation diff to goal of current node is(deg): ',
orientationToGoalDiffdegree)
print('diff desired orient of current node is(deg): ',
diff_desiredOrient)
print('diff desired velocity of current node is(deg): ',
diff_deiredVelocity)
current_orient = path_last[-1].orientation
if distance <= 10:
if (current_orient < self.orientation_desired.start or
current_orient > self.orientation_desired.end):
return np.inf
if (velocity < self.velocity_desired.start or velocity >
self.velocity_desired.end):
return np.inf
weight = 10
cost = (distance / velocity + weight * diff_deiredVelocity +
weight * diff_desiredOrient)
return cost
def cost_for_modeB_problem(self, node_current, output_logs):
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
distance = self.calc_distance_to_goal_from_point(node_current
.list_paths[-1][-1])
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,
path_last[-1].orientation)
orientationToGoalDiffdegree = abs(orientationToGoalDiff
) * 180 / 3.14
desired_velocity = (self.velocity_desired.start + self.
velocity_desired.end) / 2
diff_deiredVelocity = abs(velocity - desired_velocity)
self.test_if_in_goal_lanelet(node_current)
if output_logs:
print('Distance to goal of current node is: ', distance)
print('Velocity of current node is: ', velocity)
print('Orientation of current position: ', node_current
.list_paths[-1][-1].orientation)
print('Angle to goal of current node is: ', angleToGoal)
print('orientation diff to goal of current node is(deg): ',
orientationToGoalDiffdegree)
print('diff desired velocity of current node is(deg): ',
diff_deiredVelocity)
if self.planningProblem.goal.is_reached_only_pos(node_current
.list_paths[-1][-1]):
cost = (self.time_desired.start - node_current.
list_paths[-1][-1].time_step) * 0.01
cost = cost + diff_deiredVelocity + velocity * 0.01
return cost
cost = (distance / velocity + 2 * diff_deiredVelocity +
velocity * 0.01)
return cost
def cost_for_modeC_problem(self, node_current, output_logs):
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
distance = self.calc_euclidean_distance(current_node=
node_current)
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,
path_last[-1].orientation)
orientationToGoalDiffdegree = abs(orientationToGoalDiff
) * 180 / 3.14
desired_orient = (self.orientation_desired.end + self.
orientation_desired.start) / 2
diff_desiredOrient = self.calc_orientation_diff(desired_orient,
path_last[-1].orientation)
angle_intervall = abs(abs(self.orientation_desired.start) -
abs(self.orientation_desired.end))
arry = node_current.list_paths[-1][-1].position
a = np.array([arry[0], arry[1]])
if self.routeplannerresult is not None:
distance_to_refrence = self.calc_distance_to_nearest_point(
self.routeplannerresult.reference_path, a)
else:
distance_to_refrence = 0
if output_logs:
print('distance to reference path: ', distance_to_refrence)
print('Distance to goal of current node is: ', distance)
print('Velocity of current node is: ', velocity)
print('Orientation of current position: ', node_current
.list_paths[-1][-1].orientation)
print('Angle to goal of current node is: ', angleToGoal)
print('orientation diff to goal of current node is(deg): ',
orientationToGoalDiffdegree)
print('diff desired orient of current node is(deg): ',
diff_desiredOrient)
if distance <= 0.1 and node_current.list_paths[-1][-1
].time_step < self.time_desired.start:
return self.time_desired.start - node_current.list_paths[-1
][-1].time_step
if self.planningProblem.goal.is_reached_only_pos(node_current
.list_paths[-1][-1]):
cost = (self.time_desired.start - node_current.
list_paths[-1][-1].time_step) * 0.01
cost = cost + diff_desiredOrient + velocity * 0.01
return cost
cost = (distance / velocity + 2 * diff_desiredOrient +
velocity * 0.01)
return cost
def cost_for_modeD_problem(self, node_current, output_logs):
totaltogoal = self.calc_distance_to_goal_from_point(node_current.
list_paths[-1][-1])
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
if self.planningProblem.goal.is_reached_only_pos(node_current.
list_paths[-1][-1]):
return (self.time_desired.start - node_current.list_paths[-
1][-1].time_step) * 0.01
velocity = node_current.list_paths[-1][-1].velocity
if np.isclose(velocity, 0):
return np.inf
cost = totaltogoal / node_current.list_paths[-1][-1].velocity
return cost
def cost_for_Survival_problem(self, node_current, output_logs):
currentorient = node_current.list_paths[-1][-1].orientation
currentpos = node_current.list_paths[-1][-1].position
currenttimestep = node_current.list_paths[-1][-1].time_step
currentVel = node_current.list_paths[-1][-1].velocity
for obst in self.list_obstacles:
obstPos = obst.state_at_time(currenttimestep)
if currentorient is not None and obstPos is not None:
disttoobst = self.euclidean_distance(currentpos, obstPos.
position)
lookaheadVar = 1.375 * currentVel + 2.5
if disttoobst <= lookaheadVar:
vectorToObst = np.array([currentpos, obstPos.position])
vectorToObstOrient = self.calc_angle_of_position(
vectorToObst, currentpos)
orientdiff = self.calc_orientation_diff(currentorient,
vectorToObstOrient)
if abs(orientdiff) <= 0.261799:
if not 'velocity' in obstPos.attributes:
continue
if (node_current.list_paths[-1][-1].velocity >
obstPos.velocity and obstPos.velocity != 0):
return np.inf
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
def calc_distance_to_ref_from_point(self, state):
currentpos = state.position
distances = []
for p in self.refPathParsedPnts:
distances.append(self.euclidean_distance(currentpos, p))
smallest_points = nsmallest(2, distances)
index1 = distances.index(smallest_points[0])
index2 = distances.index(smallest_points[1])
p1 = self.refPathParsedPnts[index1]
p2 = self.refPathParsedPnts[index2]
distance_to_refrence = np.abs(np.cross(p2 - p1, currentpos - p1) /
np.linalg.norm(p2 - p1))
return distance_to_refrence
def calc_distance_to_goal_from_point(self, state):
currentpos = state.position
distances = []
for p in self.refPathParsedPnts:
distances.append(self.euclidean_distance(currentpos, p))
index_smallest_dist = distances.index(min(distances))
totaltogoal = 0
for p in range(index_smallest_dist, len(self.refPathParsedPnts) - 1):
totaltogoal = totaltogoal + self.euclidean_distance(self.
refPathParsedPnts[p], self.refPathParsedPnts[p + 1])
return totaltogoal
def get_index_nearest_obst_infront(self, node_current):
currentorient = node_current.list_paths[-1][-1].orientation
currentpos = node_current.list_paths[-1][-1].position
currenttimestep = node_current.list_paths[-1][-1].time_step
currentVel = node_current.list_paths[-1][-1].velocity
disttoobst = [np.inf] * len(self.list_obstacles)
for i in range(len(self.list_obstacles)):
obst = self.list_obstacles[i]
obstPos = obst.state_at_time(currenttimestep)
if currentorient is not None and obstPos is not None:
dist = self.euclidean_distance(currentpos, obstPos.position)
lookaheadVar = 1.375 * currentVel + 2.5
if dist <= lookaheadVar:
vectorToObst = np.array([currentpos, obstPos.position])
vectorToObstOrient = self.calc_angle_of_position(
vectorToObst, currentpos)
orientdiff = self.calc_orientation_diff(currentorient,
vectorToObstOrient)
if abs(orientdiff) <= 0.261799:
disttoobst[i] = dist
else:
disttoobst[i] = np.inf
else:
disttoobst[i] = np.inf
index_smallest_dist = disttoobst.index(min(disttoobst))
if disttoobst[index_smallest_dist] == np.inf:
index_smallest_dist = -1
return index_smallest_dist
<|reserved_special_token_0|>
def cost_for_modeA_problem_old(self, node_current, output_logs):
if self.position_desired is None:
if output_logs:
print('exit Cost function because position desired is None!')
return self.time_desired.start - node_current.list_paths[-1][-1
].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
distance = self.calc_euclidean_distance(current_node=
node_current)
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,
path_last[-1].orientation)
orientationToGoalDiffdegree = abs(orientationToGoalDiff
) * 180 / 3.14
desired_orient = (self.orientation_desired.end + self.
orientation_desired.start) / 2
desired_velocity = (self.velocity_desired.start + self.
velocity_desired.end) / 2
diff_desiredOrient = self.calc_orientation_diff(desired_orient,
path_last[-1].orientation)
diff_deiredVelocity = abs(velocity - desired_velocity)
angle_intervall = abs(abs(self.orientation_desired.start) -
abs(self.orientation_desired.end))
if output_logs:
print('Distance to goal of current node is: ', distance)
print('Velocity of current node is: ', velocity)
print('Orientation of current position: ', node_current
.list_paths[-1][-1].orientation)
print('Angle to goal of current node is: ', angleToGoal)
print('orientation diff to goal of current node is(deg): ',
orientationToGoalDiffdegree)
print('diff desired orient of current node is(deg): ',
diff_desiredOrient)
print('diff desired velocity of current node is(deg): ',
diff_deiredVelocity)
if distance <= 1:
desired_vel_weight = 1
desired_orient_weight = 1
cost = desired_vel_weight * diff_deiredVelocity
if angle_intervall < 1 and angle_intervall != 0:
cost = (cost + desired_orient_weight *
diff_desiredOrient)
return cost
if distance <= 0.1 and node_current.list_paths[-1][-1
].time_step < self.time_desired.start:
return (self.time_desired.start - node_current.
list_paths[-1][-1].time_step) * 0.001
if orientationToGoalDiffdegree > 45:
if distance >= 10:
velocity_weight = 1
cost = distance / velocity
return cost
if distance < 10 and distance >= 5:
return np.inf
if distance < 5:
return np.inf
else:
if distance >= 10:
velocity_weight = 1
cost = distance / velocity * velocity_weight
return cost
if distance < 10 and distance >= 5:
velocity_weight = 0.5
desired_vel_weight = 1
desired_orient_weight = 1
cost = distance / velocity
cost = cost + desired_vel_weight * diff_deiredVelocity
if angle_intervall < 1 and angle_intervall != 0:
cost = (cost + desired_orient_weight *
diff_desiredOrient)
return cost
if distance < 5:
cost = distance / velocity
desired_vel_weight = 3
desired_orient_weight = 3
cost = cost + desired_vel_weight * diff_deiredVelocity
if angle_intervall < 1 and angle_intervall != 0:
cost = (cost + desired_orient_weight *
diff_desiredOrient)
return cost
<|reserved_special_token_1|>
from SMP.motion_planner.node import PriorityNode
import numpy as np
from heapq import nsmallest
import sys
from SMP.motion_planner.plot_config import DefaultPlotConfig
from SMP.motion_planner.search_algorithms.best_first_search import GreedyBestFirstSearch
# imports for route planner:
class StudentMotionPlanner(GreedyBestFirstSearch):
"""
Motion planner implementation by students.
Note that you may inherit from any given motion planner as you wish, or come up with your own planner.
Here as an example, the planner is inherited from the GreedyBestFirstSearch planner.
"""
def __init__(self, scenario, planningProblem, automata, plot_config=DefaultPlotConfig):
super().__init__(scenario=scenario, planningProblem=planningProblem, automaton=automata,
plot_config=plot_config)
def evaluation_function(self, node_current: PriorityNode) -> float:
########################################################################
# todo: Implement your own evaluation function here. #
########################################################################
# Copied from greedy best first search:
"""
Evaluation function of GBFS is f(n) = h(n)
"""
node_current.priority = self.heuristic_function(node_current=node_current)
return node_current.priority
def heuristic_function(self, node_current: PriorityNode) -> float:
########################################################################
# todo: Implement your own heuristic cost calculation here. #
# Hint: #
# Use the State of the current node and the information from the #
# planning problem, as well as from the scenario. #
# Some helper functions for your convenience can be found in #
# ./search_algorithms/base_class.py #
########################################################################
"""
Function that evaluates the heuristic cost h(n) in student class.
Created by Mohamed A. Abdellaoui 10.01.2021
"""
output_logs = False
if output_logs:
print("##################")
print("current time step: ", node_current.list_paths[-1][-1].time_step)
print("current problem mode", self.planningProblemType)
print("depth tree: ", node_current.depth_tree)
currentorient = node_current.list_paths[-1][-1].orientation
currentpos = node_current.list_paths[-1][-1].position
currenttimestep = node_current.list_paths[-1][-1].time_step
currentVel = node_current.list_paths[-1][-1].velocity
# Test if reached goal:
if self.reached_goal(node_current.list_paths[-1]):
return 0.0
# Test if route planner failed to find a path:
if self.routeplannerresult is None:
return np.inf
############ Detect cars in front:
# calc cost based on distance to gool following the refrence path:
# loop through all obstacles at time step x and find if any is close of current pos:
if not self.disableObstAvoidance:
for obst in self.list_obstacles:
obstPos = obst.state_at_time(currenttimestep)
if currentorient is not None and obstPos is not None:
disttoobst = self.euclidean_distance(currentpos, obstPos.position)
lookaheadVar = 1.375 * currentVel + 2.5
if disttoobst <= lookaheadVar:
# calc orientation diff between car and obstacle:
vectorToObst = np.array([currentpos, obstPos.position])
vectorToObstOrient = self.calc_angle_of_position(vectorToObst, currentpos)
orientdiff = self.calc_orientation_diff(currentorient, vectorToObstOrient)
if abs(orientdiff) <= 0.261799:
if not 'velocity' in obstPos.attributes:
continue
if node_current.list_paths[-1][-1].velocity > obstPos.velocity and obstPos.velocity != 0:
return np.inf
# get index of closest object to the ego vehicle:
index_smallest_dist = self.get_index_nearest_obst_infront(node_current)
# use the index to locate vehicle to calc cost:
if index_smallest_dist != -1:
# found the index of vehicle with smallest distance to ego car:
obst = self.list_obstacles[index_smallest_dist]
obstPos = obst.state_at_time(currenttimestep)
if obstPos is not None and 'velocity' in obstPos.attributes:
if obstPos.velocity == 0:
cost = node_current.list_paths[-1][-1].velocity
return cost
if node_current.list_paths[-1][-1].velocity > obstPos.velocity:
return np.inf
cost = abs(node_current.list_paths[-1][-1].velocity - obstPos.velocity)
return cost
#########################################################
# Decide based on planning problem type how to calculate cost
if self.planningProblemType == 'ModeA':
# Call function for planning problem with desired time, position, speed and orientation
cost = self.cost_for_modeA_problem(node_current, output_logs)
if output_logs:
print("Cost from modeA cost func: ", cost)
if cost < 0:
return 0
return cost
elif self.planningProblemType == 'ModeB':
# Call function for planning problem with desired time, position and velocity:
cost = self.cost_for_modeB_problem(node_current, output_logs)
if output_logs:
print("Cost from modeB cost func: ", cost)
if cost < 0:
return 0
return cost
elif self.planningProblemType == 'ModeC':
# Call function for planning problem with desired time, position and orientation:
cost = self.cost_for_modeC_problem(node_current, output_logs)
if output_logs:
print("Cost from modeB cost func: ", cost)
if cost < 0:
return 0
return cost
elif self.planningProblemType == 'ModeD':
# Call function for planning problem with desired time and position:
cost = self.cost_for_modeD_problem(node_current, output_logs)
if output_logs:
print("Cost from modeB cost func: ", cost)
if cost < 0:
return 0
return cost
elif self.planningProblemType == 'Survival':
# Call function for planning problem with desired time:
cost = self.cost_for_Survival_problem(node_current, output_logs)
if output_logs:
print("Cost from modeB cost func: ", cost)
if cost < 0:
return 0
return cost
def cost_for_modeA_problem(self, node_current, output_logs):
# Function for planning problem with desired time, position, speed and orientation
if self.position_desired is None:
if output_logs:
print("exit Cost function because position desired is None!")
return self.time_desired.start - node_current.list_paths[-1][-1].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
# Calc Variables:
distance = self.calc_euclidean_distance(current_node=node_current)
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal, path_last[-1].orientation)
orientationToGoalDiffdegree = (abs(orientationToGoalDiff) * 180) / 3.14
desired_orient = (self.orientation_desired.end + self.orientation_desired.start) / 2
desired_velocity = (self.velocity_desired.start + self.velocity_desired.end) / 2
diff_desiredOrient = abs(self.calc_orientation_diff(desired_orient, path_last[-1].orientation))
diff_deiredVelocity = abs(velocity - desired_velocity)
angle_intervall = abs(abs(self.orientation_desired.start) - abs(self.orientation_desired.end))
# Output data for debugging:
if output_logs:
print("Distance to goal of current node is: ", distance)
print("Velocity of current node is: ", velocity)
print("Orientation of current position: ", node_current.list_paths[-1][-1].orientation)
print("Angle to goal of current node is: ", angleToGoal)
print("orientation diff to goal of current node is(deg): ", orientationToGoalDiffdegree)
print("diff desired orient of current node is(deg): ", diff_desiredOrient)
print("diff desired velocity of current node is(deg): ", diff_deiredVelocity)
# test 16.01:
current_orient = path_last[-1].orientation
if distance <= 10:
if current_orient < self.orientation_desired.start or current_orient > self.orientation_desired.end:
return np.inf
if velocity < self.velocity_desired.start or velocity > self.velocity_desired.end:
return np.inf
weight = 10
# if very colse to goal, minimize the diff velocity and diff orient
cost = (distance / velocity) + weight* diff_deiredVelocity + weight* diff_desiredOrient
#cost = distance + diff_desiredOrient + diff_deiredVelocity
return cost
def cost_for_modeB_problem(self, node_current, output_logs):
# Function for planning problem with desired time, position, speed
if self.position_desired is None:
if output_logs:
print("exit Cost function because position desired is None!")
return self.time_desired.start - node_current.list_paths[-1][-1].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
# Calc Variables:
distance = self.calc_distance_to_goal_from_point(node_current.list_paths[-1][-1])
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal, path_last[-1].orientation)
orientationToGoalDiffdegree = (abs(orientationToGoalDiff)*180)/3.14
desired_velocity = (self.velocity_desired.start + self.velocity_desired.end)/2
diff_deiredVelocity = abs(velocity - desired_velocity)
self.test_if_in_goal_lanelet(node_current)
# Output data for debugging:
if output_logs:
print("Distance to goal of current node is: ", distance)
print("Velocity of current node is: ", velocity)
print("Orientation of current position: ",node_current.list_paths[-1][-1].orientation)
print("Angle to goal of current node is: ", angleToGoal)
print("orientation diff to goal of current node is(deg): ", orientationToGoalDiffdegree)
print("diff desired velocity of current node is(deg): ", diff_deiredVelocity)
# If very close to target but time is still not reached:
#if distance <= 0.1 and node_current.list_paths[-1][-1].time_step < self.time_desired.start:
# return self.time_desired.start - node_current.list_paths[-1][-1].time_step * 0.01
if self.planningProblem.goal.is_reached_only_pos(node_current.list_paths[-1][-1]):
cost = (self.time_desired.start - node_current.list_paths[-1][-1].time_step) * 0.01
cost = cost + diff_deiredVelocity + velocity *0.01
return cost
cost = ( distance / velocity ) + 2 * diff_deiredVelocity + velocity*0.01
return cost
def cost_for_modeC_problem(self, node_current, output_logs):
# Function for planning problem with desired time, position, speed and orientation
if self.position_desired is None:
if output_logs:
print("exit Cost function because position desired is None!")
return self.time_desired.start - node_current.list_paths[-1][-1].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
# Calc Variables:
distance = self.calc_euclidean_distance(current_node=node_current)
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal, path_last[-1].orientation)
orientationToGoalDiffdegree = (abs(orientationToGoalDiff)*180)/3.14
desired_orient = (self.orientation_desired.end + self.orientation_desired.start) / 2
diff_desiredOrient = self.calc_orientation_diff(desired_orient, path_last[-1].orientation)
angle_intervall = abs(abs(self.orientation_desired.start) - abs(self.orientation_desired.end))
# Calcualte distance between currrent position and reference path:
arry = node_current.list_paths[-1][-1].position
a = np.array([arry[0], arry[1]])
if self.routeplannerresult is not None:
distance_to_refrence = self.calc_distance_to_nearest_point(self.routeplannerresult.reference_path,
a)
else:
distance_to_refrence = 0
# Output data for debugging:
if output_logs:
print("distance to reference path: ", distance_to_refrence)
print("Distance to goal of current node is: ", distance)
print("Velocity of current node is: ", velocity)
print("Orientation of current position: ",node_current.list_paths[-1][-1].orientation)
print("Angle to goal of current node is: ", angleToGoal)
print("orientation diff to goal of current node is(deg): ", orientationToGoalDiffdegree)
print("diff desired orient of current node is(deg): ", diff_desiredOrient)
# If very close to target but time is still not reached:
if distance <= 0.1 and node_current.list_paths[-1][-1].time_step < self.time_desired.start:
return self.time_desired.start - node_current.list_paths[-1][-1].time_step
if self.planningProblem.goal.is_reached_only_pos(node_current.list_paths[-1][-1]):
cost = (self.time_desired.start - node_current.list_paths[-1][-1].time_step) * 0.01
cost = cost + diff_desiredOrient + velocity *0.01
return cost
cost = ( distance / velocity ) + 2 * diff_desiredOrient + velocity*0.01
return cost
def cost_for_modeD_problem(self, node_current, output_logs):
totaltogoal = self.calc_distance_to_goal_from_point(node_current.list_paths[-1][-1])
if self.position_desired is None:
if output_logs:
print("exit Cost function because position desired is None!")
return self.time_desired.start - node_current.list_paths[-1][-1].time_step
else:
if self.planningProblem.goal.is_reached_only_pos(node_current.list_paths[-1][-1]):
return (self.time_desired.start - node_current.list_paths[-1][-1].time_step) *0.01
velocity = node_current.list_paths[-1][-1].velocity
if np.isclose(velocity, 0):
return np.inf
cost = totaltogoal / node_current.list_paths[-1][-1].velocity
return cost
def cost_for_Survival_problem(self, node_current, output_logs):
currentorient = node_current.list_paths[-1][-1].orientation
currentpos = node_current.list_paths[-1][-1].position
currenttimestep = node_current.list_paths[-1][-1].time_step
currentVel = node_current.list_paths[-1][-1].velocity
for obst in self.list_obstacles:
obstPos = obst.state_at_time(currenttimestep)
if currentorient is not None and obstPos is not None:
disttoobst = self.euclidean_distance(currentpos, obstPos.position)
lookaheadVar = 1.375 * currentVel + 2.5
if disttoobst <= lookaheadVar:
# calc orientation diff between car and obstacle:
vectorToObst = np.array([currentpos, obstPos.position])
vectorToObstOrient = self.calc_angle_of_position(vectorToObst, currentpos)
orientdiff = self.calc_orientation_diff(currentorient, vectorToObstOrient)
if abs(orientdiff) <= 0.261799:
if not 'velocity' in obstPos.attributes:
continue
if node_current.list_paths[-1][-1].velocity > obstPos.velocity and obstPos.velocity != 0:
return np.inf
return self.time_desired.start - node_current.list_paths[-1][-1].time_step
def calc_distance_to_ref_from_point(self, state):
#calc distance of points to each point of refrence path:
currentpos = state.position
distances = []
for p in self.refPathParsedPnts:
distances.append(self.euclidean_distance(currentpos, p))
smallest_points = nsmallest(2, distances)
index1 = distances.index(smallest_points[0])
index2 = distances.index(smallest_points[1])
p1 = self.refPathParsedPnts[index1]
p2 = self.refPathParsedPnts[index2]
distance_to_refrence = np.abs(np.cross(p2 - p1, currentpos - p1) / np.linalg.norm(p2 - p1))
return distance_to_refrence
def calc_distance_to_goal_from_point(self, state):
#calc distance of points to each point of refrence path:
currentpos = state.position
distances = []
for p in self.refPathParsedPnts:
distances.append(self.euclidean_distance(currentpos, p))
index_smallest_dist = distances.index(min(distances))
totaltogoal = 0
for p in range(index_smallest_dist, len(self.refPathParsedPnts) - 1):
totaltogoal = totaltogoal + self.euclidean_distance(self.refPathParsedPnts[p],self.refPathParsedPnts[p+1])
return totaltogoal
def get_index_nearest_obst_infront(self,node_current):
# loop through all obstacles at time step x and find if any is close of current pos:
currentorient = node_current.list_paths[-1][-1].orientation
currentpos = node_current.list_paths[-1][-1].position
currenttimestep = node_current.list_paths[-1][-1].time_step
currentVel = node_current.list_paths[-1][-1].velocity
disttoobst = [np.inf] * len(self.list_obstacles)
for i in range(len(self.list_obstacles)):
obst = self.list_obstacles[i]
obstPos = obst.state_at_time(currenttimestep)
if currentorient is not None and obstPos is not None:
dist = self.euclidean_distance(currentpos, obstPos.position)
lookaheadVar = 1.375 * currentVel + 2.5
if dist <= lookaheadVar:
# calc orientation diff between car and obstacle:
vectorToObst = np.array([currentpos, obstPos.position])
vectorToObstOrient = self.calc_angle_of_position(vectorToObst, currentpos)
orientdiff = self.calc_orientation_diff(currentorient, vectorToObstOrient)
if abs(orientdiff) <= 0.261799:
disttoobst[i]= dist
else:
disttoobst[i]= np.inf
else:
disttoobst[i]= np.inf
index_smallest_dist = disttoobst.index(min(disttoobst))
if disttoobst[index_smallest_dist] == np.inf:
index_smallest_dist = -1
return index_smallest_dist
def test_if_in_goal_lanelet(self, node_current):
pos = [node_current.list_paths[-1][-1].position]
currentlanelet = self.scenario.lanelet_network.find_lanelet_by_position(pos)
currentlanelet = currentlanelet[0][0]
#result = self.is_goal_in_lane(currentlanelet)
result = False
if self.planningProblem.goal.lanelets_of_goal_position is not None:
if currentlanelet in self.planningProblem.goal.lanelets_of_goal_position.get(0):
result = True
return result
def cost_for_modeA_problem_old(self, node_current, output_logs):
# Function for planning problem with desired time, position, speed and orientation
if self.position_desired is None:
if output_logs:
print("exit Cost function because position desired is None!")
return self.time_desired.start - node_current.list_paths[-1][-1].time_step
else:
velocity = node_current.list_paths[-1][-1].velocity
path_last = node_current.list_paths[-1]
if np.isclose(velocity, 0):
return np.inf
else:
# Calc Variables:
distance = self.calc_euclidean_distance(current_node=node_current)
angleToGoal = self.calc_angle_to_goal(path_last[-1])
orientationToGoalDiff = self.calc_orientation_diff(angleToGoal, path_last[-1].orientation)
orientationToGoalDiffdegree = (abs(orientationToGoalDiff)*180)/3.14
desired_orient = (self.orientation_desired.end + self.orientation_desired.start) / 2
desired_velocity = (self.velocity_desired.start + self.velocity_desired.end)/2
diff_desiredOrient = self.calc_orientation_diff(desired_orient, path_last[-1].orientation)
diff_deiredVelocity = abs(velocity - desired_velocity)
angle_intervall = abs(abs(self.orientation_desired.start) - abs(self.orientation_desired.end))
# Output data for debugging:
if output_logs:
print("Distance to goal of current node is: ", distance)
print("Velocity of current node is: ", velocity)
print("Orientation of current position: ",node_current.list_paths[-1][-1].orientation)
print("Angle to goal of current node is: ", angleToGoal)
print("orientation diff to goal of current node is(deg): ", orientationToGoalDiffdegree)
print("diff desired orient of current node is(deg): ", diff_desiredOrient)
print("diff desired velocity of current node is(deg): ", diff_deiredVelocity)
# if very colse to goal, minimize the diff velocity and diff orient
if distance <= 1:
desired_vel_weight = 1
desired_orient_weight = 1
cost = desired_vel_weight * diff_deiredVelocity
if angle_intervall < 1 and angle_intervall != 0:
cost = cost + desired_orient_weight * diff_desiredOrient
return cost
# If very close to target but time is still not reached:
if distance <= 0.1 and node_current.list_paths[-1][-1].time_step < self.time_desired.start:
return (self.time_desired.start - node_current.list_paths[-1][-1].time_step) *0.001
# check if goal in in field of view:
if orientationToGoalDiffdegree > 45:
# goal is not in field of view:
# give more weight to speed and follow reference path blindly:
# block to differentiate between large distance to goal and small distance:
if distance >= 10: # too far away from target, just follow the least distance and target lanelet.
velocity_weight = 1
cost = distance / velocity
return cost
if distance < 10 and distance >= 5: # almost close, reduce speed.
return np.inf
if distance < 5: # very close andjust orientation angle..
return np.inf
else:
# goal is in field of view:
# give more weight to distance and speed and orientation goals:
# goal is not in field of view:
# give more weight to speed and follow reference path blindly:
# block to differentiate between large distance to goal and small distance:
if distance >= 10: # too far away from target, just follow the least distance and target lanelet.
velocity_weight = 1
cost = distance / velocity * velocity_weight
return cost
if distance < 10 and distance >= 5: # almost close, reduce speed.
velocity_weight = 0.5
desired_vel_weight = 1
desired_orient_weight = 1
cost = distance / velocity
cost = cost + desired_vel_weight * diff_deiredVelocity
if angle_intervall < 1 and angle_intervall != 0:
cost = cost + desired_orient_weight * diff_desiredOrient
return cost
if distance < 5: # very close andjust orientation angle..
cost = distance / velocity
desired_vel_weight = 3
desired_orient_weight = 3
cost = cost + desired_vel_weight * diff_deiredVelocity
if angle_intervall < 1 and angle_intervall != 0:
cost = cost + desired_orient_weight * diff_desiredOrient
return cost
|
flexible
|
{
"blob_id": "6ecbe119c8a14776373d165dc05e81f91084893c",
"index": 4229,
"step-1": "<mask token>\n\n\nclass StudentMotionPlanner(GreedyBestFirstSearch):\n <mask token>\n\n def __init__(self, scenario, planningProblem, automata, plot_config=\n DefaultPlotConfig):\n super().__init__(scenario=scenario, planningProblem=planningProblem,\n automaton=automata, plot_config=plot_config)\n\n def evaluation_function(self, node_current: PriorityNode) ->float:\n \"\"\"\n Evaluation function of GBFS is f(n) = h(n)\n \"\"\"\n node_current.priority = self.heuristic_function(node_current=\n node_current)\n return node_current.priority\n <mask token>\n <mask token>\n <mask token>\n\n def cost_for_modeC_problem(self, node_current, output_logs):\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n distance = self.calc_euclidean_distance(current_node=\n node_current)\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,\n path_last[-1].orientation)\n orientationToGoalDiffdegree = abs(orientationToGoalDiff\n ) * 180 / 3.14\n desired_orient = (self.orientation_desired.end + self.\n orientation_desired.start) / 2\n diff_desiredOrient = self.calc_orientation_diff(desired_orient,\n path_last[-1].orientation)\n angle_intervall = abs(abs(self.orientation_desired.start) -\n abs(self.orientation_desired.end))\n arry = node_current.list_paths[-1][-1].position\n a = np.array([arry[0], arry[1]])\n if self.routeplannerresult is not None:\n distance_to_refrence = self.calc_distance_to_nearest_point(\n self.routeplannerresult.reference_path, a)\n else:\n distance_to_refrence = 0\n if output_logs:\n print('distance to reference path: ', distance_to_refrence)\n print('Distance to goal of current node is: ', distance)\n print('Velocity of current node is: ', velocity)\n print('Orientation of current position: ', node_current\n .list_paths[-1][-1].orientation)\n print('Angle to goal of current node is: ', angleToGoal)\n print('orientation diff to goal of current node is(deg): ',\n orientationToGoalDiffdegree)\n print('diff desired orient of current node is(deg): ',\n diff_desiredOrient)\n if distance <= 0.1 and node_current.list_paths[-1][-1\n ].time_step < self.time_desired.start:\n return self.time_desired.start - node_current.list_paths[-1\n ][-1].time_step\n if self.planningProblem.goal.is_reached_only_pos(node_current\n .list_paths[-1][-1]):\n cost = (self.time_desired.start - node_current.\n list_paths[-1][-1].time_step) * 0.01\n cost = cost + diff_desiredOrient + velocity * 0.01\n return cost\n cost = (distance / velocity + 2 * diff_desiredOrient + \n velocity * 0.01)\n return cost\n\n def cost_for_modeD_problem(self, node_current, output_logs):\n totaltogoal = self.calc_distance_to_goal_from_point(node_current.\n list_paths[-1][-1])\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n if self.planningProblem.goal.is_reached_only_pos(node_current.\n list_paths[-1][-1]):\n return (self.time_desired.start - node_current.list_paths[-\n 1][-1].time_step) * 0.01\n velocity = node_current.list_paths[-1][-1].velocity\n if np.isclose(velocity, 0):\n return np.inf\n cost = totaltogoal / node_current.list_paths[-1][-1].velocity\n return cost\n <mask token>\n\n def calc_distance_to_ref_from_point(self, state):\n currentpos = state.position\n distances = []\n for p in self.refPathParsedPnts:\n distances.append(self.euclidean_distance(currentpos, p))\n smallest_points = nsmallest(2, distances)\n index1 = distances.index(smallest_points[0])\n index2 = distances.index(smallest_points[1])\n p1 = self.refPathParsedPnts[index1]\n p2 = self.refPathParsedPnts[index2]\n distance_to_refrence = np.abs(np.cross(p2 - p1, currentpos - p1) /\n np.linalg.norm(p2 - p1))\n return distance_to_refrence\n\n def calc_distance_to_goal_from_point(self, state):\n currentpos = state.position\n distances = []\n for p in self.refPathParsedPnts:\n distances.append(self.euclidean_distance(currentpos, p))\n index_smallest_dist = distances.index(min(distances))\n totaltogoal = 0\n for p in range(index_smallest_dist, len(self.refPathParsedPnts) - 1):\n totaltogoal = totaltogoal + self.euclidean_distance(self.\n refPathParsedPnts[p], self.refPathParsedPnts[p + 1])\n return totaltogoal\n\n def get_index_nearest_obst_infront(self, node_current):\n currentorient = node_current.list_paths[-1][-1].orientation\n currentpos = node_current.list_paths[-1][-1].position\n currenttimestep = node_current.list_paths[-1][-1].time_step\n currentVel = node_current.list_paths[-1][-1].velocity\n disttoobst = [np.inf] * len(self.list_obstacles)\n for i in range(len(self.list_obstacles)):\n obst = self.list_obstacles[i]\n obstPos = obst.state_at_time(currenttimestep)\n if currentorient is not None and obstPos is not None:\n dist = self.euclidean_distance(currentpos, obstPos.position)\n lookaheadVar = 1.375 * currentVel + 2.5\n if dist <= lookaheadVar:\n vectorToObst = np.array([currentpos, obstPos.position])\n vectorToObstOrient = self.calc_angle_of_position(\n vectorToObst, currentpos)\n orientdiff = self.calc_orientation_diff(currentorient,\n vectorToObstOrient)\n if abs(orientdiff) <= 0.261799:\n disttoobst[i] = dist\n else:\n disttoobst[i] = np.inf\n else:\n disttoobst[i] = np.inf\n index_smallest_dist = disttoobst.index(min(disttoobst))\n if disttoobst[index_smallest_dist] == np.inf:\n index_smallest_dist = -1\n return index_smallest_dist\n <mask token>\n\n def cost_for_modeA_problem_old(self, node_current, output_logs):\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n distance = self.calc_euclidean_distance(current_node=\n node_current)\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,\n path_last[-1].orientation)\n orientationToGoalDiffdegree = abs(orientationToGoalDiff\n ) * 180 / 3.14\n desired_orient = (self.orientation_desired.end + self.\n orientation_desired.start) / 2\n desired_velocity = (self.velocity_desired.start + self.\n velocity_desired.end) / 2\n diff_desiredOrient = self.calc_orientation_diff(desired_orient,\n path_last[-1].orientation)\n diff_deiredVelocity = abs(velocity - desired_velocity)\n angle_intervall = abs(abs(self.orientation_desired.start) -\n abs(self.orientation_desired.end))\n if output_logs:\n print('Distance to goal of current node is: ', distance)\n print('Velocity of current node is: ', velocity)\n print('Orientation of current position: ', node_current\n .list_paths[-1][-1].orientation)\n print('Angle to goal of current node is: ', angleToGoal)\n print('orientation diff to goal of current node is(deg): ',\n orientationToGoalDiffdegree)\n print('diff desired orient of current node is(deg): ',\n diff_desiredOrient)\n print('diff desired velocity of current node is(deg): ',\n diff_deiredVelocity)\n if distance <= 1:\n desired_vel_weight = 1\n desired_orient_weight = 1\n cost = desired_vel_weight * diff_deiredVelocity\n if angle_intervall < 1 and angle_intervall != 0:\n cost = (cost + desired_orient_weight *\n diff_desiredOrient)\n return cost\n if distance <= 0.1 and node_current.list_paths[-1][-1\n ].time_step < self.time_desired.start:\n return (self.time_desired.start - node_current.\n list_paths[-1][-1].time_step) * 0.001\n if orientationToGoalDiffdegree > 45:\n if distance >= 10:\n velocity_weight = 1\n cost = distance / velocity\n return cost\n if distance < 10 and distance >= 5:\n return np.inf\n if distance < 5:\n return np.inf\n else:\n if distance >= 10:\n velocity_weight = 1\n cost = distance / velocity * velocity_weight\n return cost\n if distance < 10 and distance >= 5:\n velocity_weight = 0.5\n desired_vel_weight = 1\n desired_orient_weight = 1\n cost = distance / velocity\n cost = cost + desired_vel_weight * diff_deiredVelocity\n if angle_intervall < 1 and angle_intervall != 0:\n cost = (cost + desired_orient_weight *\n diff_desiredOrient)\n return cost\n if distance < 5:\n cost = distance / velocity\n desired_vel_weight = 3\n desired_orient_weight = 3\n cost = cost + desired_vel_weight * diff_deiredVelocity\n if angle_intervall < 1 and angle_intervall != 0:\n cost = (cost + desired_orient_weight *\n diff_desiredOrient)\n return cost\n",
"step-2": "<mask token>\n\n\nclass StudentMotionPlanner(GreedyBestFirstSearch):\n <mask token>\n\n def __init__(self, scenario, planningProblem, automata, plot_config=\n DefaultPlotConfig):\n super().__init__(scenario=scenario, planningProblem=planningProblem,\n automaton=automata, plot_config=plot_config)\n\n def evaluation_function(self, node_current: PriorityNode) ->float:\n \"\"\"\n Evaluation function of GBFS is f(n) = h(n)\n \"\"\"\n node_current.priority = self.heuristic_function(node_current=\n node_current)\n return node_current.priority\n <mask token>\n\n def cost_for_modeA_problem(self, node_current, output_logs):\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n distance = self.calc_euclidean_distance(current_node=\n node_current)\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,\n path_last[-1].orientation)\n orientationToGoalDiffdegree = abs(orientationToGoalDiff\n ) * 180 / 3.14\n desired_orient = (self.orientation_desired.end + self.\n orientation_desired.start) / 2\n desired_velocity = (self.velocity_desired.start + self.\n velocity_desired.end) / 2\n diff_desiredOrient = abs(self.calc_orientation_diff(\n desired_orient, path_last[-1].orientation))\n diff_deiredVelocity = abs(velocity - desired_velocity)\n angle_intervall = abs(abs(self.orientation_desired.start) -\n abs(self.orientation_desired.end))\n if output_logs:\n print('Distance to goal of current node is: ', distance)\n print('Velocity of current node is: ', velocity)\n print('Orientation of current position: ', node_current\n .list_paths[-1][-1].orientation)\n print('Angle to goal of current node is: ', angleToGoal)\n print('orientation diff to goal of current node is(deg): ',\n orientationToGoalDiffdegree)\n print('diff desired orient of current node is(deg): ',\n diff_desiredOrient)\n print('diff desired velocity of current node is(deg): ',\n diff_deiredVelocity)\n current_orient = path_last[-1].orientation\n if distance <= 10:\n if (current_orient < self.orientation_desired.start or \n current_orient > self.orientation_desired.end):\n return np.inf\n if (velocity < self.velocity_desired.start or velocity >\n self.velocity_desired.end):\n return np.inf\n weight = 10\n cost = (distance / velocity + weight * diff_deiredVelocity +\n weight * diff_desiredOrient)\n return cost\n\n def cost_for_modeB_problem(self, node_current, output_logs):\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n distance = self.calc_distance_to_goal_from_point(node_current\n .list_paths[-1][-1])\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,\n path_last[-1].orientation)\n orientationToGoalDiffdegree = abs(orientationToGoalDiff\n ) * 180 / 3.14\n desired_velocity = (self.velocity_desired.start + self.\n velocity_desired.end) / 2\n diff_deiredVelocity = abs(velocity - desired_velocity)\n self.test_if_in_goal_lanelet(node_current)\n if output_logs:\n print('Distance to goal of current node is: ', distance)\n print('Velocity of current node is: ', velocity)\n print('Orientation of current position: ', node_current\n .list_paths[-1][-1].orientation)\n print('Angle to goal of current node is: ', angleToGoal)\n print('orientation diff to goal of current node is(deg): ',\n orientationToGoalDiffdegree)\n print('diff desired velocity of current node is(deg): ',\n diff_deiredVelocity)\n if self.planningProblem.goal.is_reached_only_pos(node_current\n .list_paths[-1][-1]):\n cost = (self.time_desired.start - node_current.\n list_paths[-1][-1].time_step) * 0.01\n cost = cost + diff_deiredVelocity + velocity * 0.01\n return cost\n cost = (distance / velocity + 2 * diff_deiredVelocity + \n velocity * 0.01)\n return cost\n\n def cost_for_modeC_problem(self, node_current, output_logs):\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n distance = self.calc_euclidean_distance(current_node=\n node_current)\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,\n path_last[-1].orientation)\n orientationToGoalDiffdegree = abs(orientationToGoalDiff\n ) * 180 / 3.14\n desired_orient = (self.orientation_desired.end + self.\n orientation_desired.start) / 2\n diff_desiredOrient = self.calc_orientation_diff(desired_orient,\n path_last[-1].orientation)\n angle_intervall = abs(abs(self.orientation_desired.start) -\n abs(self.orientation_desired.end))\n arry = node_current.list_paths[-1][-1].position\n a = np.array([arry[0], arry[1]])\n if self.routeplannerresult is not None:\n distance_to_refrence = self.calc_distance_to_nearest_point(\n self.routeplannerresult.reference_path, a)\n else:\n distance_to_refrence = 0\n if output_logs:\n print('distance to reference path: ', distance_to_refrence)\n print('Distance to goal of current node is: ', distance)\n print('Velocity of current node is: ', velocity)\n print('Orientation of current position: ', node_current\n .list_paths[-1][-1].orientation)\n print('Angle to goal of current node is: ', angleToGoal)\n print('orientation diff to goal of current node is(deg): ',\n orientationToGoalDiffdegree)\n print('diff desired orient of current node is(deg): ',\n diff_desiredOrient)\n if distance <= 0.1 and node_current.list_paths[-1][-1\n ].time_step < self.time_desired.start:\n return self.time_desired.start - node_current.list_paths[-1\n ][-1].time_step\n if self.planningProblem.goal.is_reached_only_pos(node_current\n .list_paths[-1][-1]):\n cost = (self.time_desired.start - node_current.\n list_paths[-1][-1].time_step) * 0.01\n cost = cost + diff_desiredOrient + velocity * 0.01\n return cost\n cost = (distance / velocity + 2 * diff_desiredOrient + \n velocity * 0.01)\n return cost\n\n def cost_for_modeD_problem(self, node_current, output_logs):\n totaltogoal = self.calc_distance_to_goal_from_point(node_current.\n list_paths[-1][-1])\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n if self.planningProblem.goal.is_reached_only_pos(node_current.\n list_paths[-1][-1]):\n return (self.time_desired.start - node_current.list_paths[-\n 1][-1].time_step) * 0.01\n velocity = node_current.list_paths[-1][-1].velocity\n if np.isclose(velocity, 0):\n return np.inf\n cost = totaltogoal / node_current.list_paths[-1][-1].velocity\n return cost\n <mask token>\n\n def calc_distance_to_ref_from_point(self, state):\n currentpos = state.position\n distances = []\n for p in self.refPathParsedPnts:\n distances.append(self.euclidean_distance(currentpos, p))\n smallest_points = nsmallest(2, distances)\n index1 = distances.index(smallest_points[0])\n index2 = distances.index(smallest_points[1])\n p1 = self.refPathParsedPnts[index1]\n p2 = self.refPathParsedPnts[index2]\n distance_to_refrence = np.abs(np.cross(p2 - p1, currentpos - p1) /\n np.linalg.norm(p2 - p1))\n return distance_to_refrence\n\n def calc_distance_to_goal_from_point(self, state):\n currentpos = state.position\n distances = []\n for p in self.refPathParsedPnts:\n distances.append(self.euclidean_distance(currentpos, p))\n index_smallest_dist = distances.index(min(distances))\n totaltogoal = 0\n for p in range(index_smallest_dist, len(self.refPathParsedPnts) - 1):\n totaltogoal = totaltogoal + self.euclidean_distance(self.\n refPathParsedPnts[p], self.refPathParsedPnts[p + 1])\n return totaltogoal\n\n def get_index_nearest_obst_infront(self, node_current):\n currentorient = node_current.list_paths[-1][-1].orientation\n currentpos = node_current.list_paths[-1][-1].position\n currenttimestep = node_current.list_paths[-1][-1].time_step\n currentVel = node_current.list_paths[-1][-1].velocity\n disttoobst = [np.inf] * len(self.list_obstacles)\n for i in range(len(self.list_obstacles)):\n obst = self.list_obstacles[i]\n obstPos = obst.state_at_time(currenttimestep)\n if currentorient is not None and obstPos is not None:\n dist = self.euclidean_distance(currentpos, obstPos.position)\n lookaheadVar = 1.375 * currentVel + 2.5\n if dist <= lookaheadVar:\n vectorToObst = np.array([currentpos, obstPos.position])\n vectorToObstOrient = self.calc_angle_of_position(\n vectorToObst, currentpos)\n orientdiff = self.calc_orientation_diff(currentorient,\n vectorToObstOrient)\n if abs(orientdiff) <= 0.261799:\n disttoobst[i] = dist\n else:\n disttoobst[i] = np.inf\n else:\n disttoobst[i] = np.inf\n index_smallest_dist = disttoobst.index(min(disttoobst))\n if disttoobst[index_smallest_dist] == np.inf:\n index_smallest_dist = -1\n return index_smallest_dist\n <mask token>\n\n def cost_for_modeA_problem_old(self, node_current, output_logs):\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n distance = self.calc_euclidean_distance(current_node=\n node_current)\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,\n path_last[-1].orientation)\n orientationToGoalDiffdegree = abs(orientationToGoalDiff\n ) * 180 / 3.14\n desired_orient = (self.orientation_desired.end + self.\n orientation_desired.start) / 2\n desired_velocity = (self.velocity_desired.start + self.\n velocity_desired.end) / 2\n diff_desiredOrient = self.calc_orientation_diff(desired_orient,\n path_last[-1].orientation)\n diff_deiredVelocity = abs(velocity - desired_velocity)\n angle_intervall = abs(abs(self.orientation_desired.start) -\n abs(self.orientation_desired.end))\n if output_logs:\n print('Distance to goal of current node is: ', distance)\n print('Velocity of current node is: ', velocity)\n print('Orientation of current position: ', node_current\n .list_paths[-1][-1].orientation)\n print('Angle to goal of current node is: ', angleToGoal)\n print('orientation diff to goal of current node is(deg): ',\n orientationToGoalDiffdegree)\n print('diff desired orient of current node is(deg): ',\n diff_desiredOrient)\n print('diff desired velocity of current node is(deg): ',\n diff_deiredVelocity)\n if distance <= 1:\n desired_vel_weight = 1\n desired_orient_weight = 1\n cost = desired_vel_weight * diff_deiredVelocity\n if angle_intervall < 1 and angle_intervall != 0:\n cost = (cost + desired_orient_weight *\n diff_desiredOrient)\n return cost\n if distance <= 0.1 and node_current.list_paths[-1][-1\n ].time_step < self.time_desired.start:\n return (self.time_desired.start - node_current.\n list_paths[-1][-1].time_step) * 0.001\n if orientationToGoalDiffdegree > 45:\n if distance >= 10:\n velocity_weight = 1\n cost = distance / velocity\n return cost\n if distance < 10 and distance >= 5:\n return np.inf\n if distance < 5:\n return np.inf\n else:\n if distance >= 10:\n velocity_weight = 1\n cost = distance / velocity * velocity_weight\n return cost\n if distance < 10 and distance >= 5:\n velocity_weight = 0.5\n desired_vel_weight = 1\n desired_orient_weight = 1\n cost = distance / velocity\n cost = cost + desired_vel_weight * diff_deiredVelocity\n if angle_intervall < 1 and angle_intervall != 0:\n cost = (cost + desired_orient_weight *\n diff_desiredOrient)\n return cost\n if distance < 5:\n cost = distance / velocity\n desired_vel_weight = 3\n desired_orient_weight = 3\n cost = cost + desired_vel_weight * diff_deiredVelocity\n if angle_intervall < 1 and angle_intervall != 0:\n cost = (cost + desired_orient_weight *\n diff_desiredOrient)\n return cost\n",
"step-3": "<mask token>\n\n\nclass StudentMotionPlanner(GreedyBestFirstSearch):\n <mask token>\n\n def __init__(self, scenario, planningProblem, automata, plot_config=\n DefaultPlotConfig):\n super().__init__(scenario=scenario, planningProblem=planningProblem,\n automaton=automata, plot_config=plot_config)\n\n def evaluation_function(self, node_current: PriorityNode) ->float:\n \"\"\"\n Evaluation function of GBFS is f(n) = h(n)\n \"\"\"\n node_current.priority = self.heuristic_function(node_current=\n node_current)\n return node_current.priority\n <mask token>\n\n def cost_for_modeA_problem(self, node_current, output_logs):\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n distance = self.calc_euclidean_distance(current_node=\n node_current)\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,\n path_last[-1].orientation)\n orientationToGoalDiffdegree = abs(orientationToGoalDiff\n ) * 180 / 3.14\n desired_orient = (self.orientation_desired.end + self.\n orientation_desired.start) / 2\n desired_velocity = (self.velocity_desired.start + self.\n velocity_desired.end) / 2\n diff_desiredOrient = abs(self.calc_orientation_diff(\n desired_orient, path_last[-1].orientation))\n diff_deiredVelocity = abs(velocity - desired_velocity)\n angle_intervall = abs(abs(self.orientation_desired.start) -\n abs(self.orientation_desired.end))\n if output_logs:\n print('Distance to goal of current node is: ', distance)\n print('Velocity of current node is: ', velocity)\n print('Orientation of current position: ', node_current\n .list_paths[-1][-1].orientation)\n print('Angle to goal of current node is: ', angleToGoal)\n print('orientation diff to goal of current node is(deg): ',\n orientationToGoalDiffdegree)\n print('diff desired orient of current node is(deg): ',\n diff_desiredOrient)\n print('diff desired velocity of current node is(deg): ',\n diff_deiredVelocity)\n current_orient = path_last[-1].orientation\n if distance <= 10:\n if (current_orient < self.orientation_desired.start or \n current_orient > self.orientation_desired.end):\n return np.inf\n if (velocity < self.velocity_desired.start or velocity >\n self.velocity_desired.end):\n return np.inf\n weight = 10\n cost = (distance / velocity + weight * diff_deiredVelocity +\n weight * diff_desiredOrient)\n return cost\n\n def cost_for_modeB_problem(self, node_current, output_logs):\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n distance = self.calc_distance_to_goal_from_point(node_current\n .list_paths[-1][-1])\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,\n path_last[-1].orientation)\n orientationToGoalDiffdegree = abs(orientationToGoalDiff\n ) * 180 / 3.14\n desired_velocity = (self.velocity_desired.start + self.\n velocity_desired.end) / 2\n diff_deiredVelocity = abs(velocity - desired_velocity)\n self.test_if_in_goal_lanelet(node_current)\n if output_logs:\n print('Distance to goal of current node is: ', distance)\n print('Velocity of current node is: ', velocity)\n print('Orientation of current position: ', node_current\n .list_paths[-1][-1].orientation)\n print('Angle to goal of current node is: ', angleToGoal)\n print('orientation diff to goal of current node is(deg): ',\n orientationToGoalDiffdegree)\n print('diff desired velocity of current node is(deg): ',\n diff_deiredVelocity)\n if self.planningProblem.goal.is_reached_only_pos(node_current\n .list_paths[-1][-1]):\n cost = (self.time_desired.start - node_current.\n list_paths[-1][-1].time_step) * 0.01\n cost = cost + diff_deiredVelocity + velocity * 0.01\n return cost\n cost = (distance / velocity + 2 * diff_deiredVelocity + \n velocity * 0.01)\n return cost\n\n def cost_for_modeC_problem(self, node_current, output_logs):\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n distance = self.calc_euclidean_distance(current_node=\n node_current)\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,\n path_last[-1].orientation)\n orientationToGoalDiffdegree = abs(orientationToGoalDiff\n ) * 180 / 3.14\n desired_orient = (self.orientation_desired.end + self.\n orientation_desired.start) / 2\n diff_desiredOrient = self.calc_orientation_diff(desired_orient,\n path_last[-1].orientation)\n angle_intervall = abs(abs(self.orientation_desired.start) -\n abs(self.orientation_desired.end))\n arry = node_current.list_paths[-1][-1].position\n a = np.array([arry[0], arry[1]])\n if self.routeplannerresult is not None:\n distance_to_refrence = self.calc_distance_to_nearest_point(\n self.routeplannerresult.reference_path, a)\n else:\n distance_to_refrence = 0\n if output_logs:\n print('distance to reference path: ', distance_to_refrence)\n print('Distance to goal of current node is: ', distance)\n print('Velocity of current node is: ', velocity)\n print('Orientation of current position: ', node_current\n .list_paths[-1][-1].orientation)\n print('Angle to goal of current node is: ', angleToGoal)\n print('orientation diff to goal of current node is(deg): ',\n orientationToGoalDiffdegree)\n print('diff desired orient of current node is(deg): ',\n diff_desiredOrient)\n if distance <= 0.1 and node_current.list_paths[-1][-1\n ].time_step < self.time_desired.start:\n return self.time_desired.start - node_current.list_paths[-1\n ][-1].time_step\n if self.planningProblem.goal.is_reached_only_pos(node_current\n .list_paths[-1][-1]):\n cost = (self.time_desired.start - node_current.\n list_paths[-1][-1].time_step) * 0.01\n cost = cost + diff_desiredOrient + velocity * 0.01\n return cost\n cost = (distance / velocity + 2 * diff_desiredOrient + \n velocity * 0.01)\n return cost\n\n def cost_for_modeD_problem(self, node_current, output_logs):\n totaltogoal = self.calc_distance_to_goal_from_point(node_current.\n list_paths[-1][-1])\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n if self.planningProblem.goal.is_reached_only_pos(node_current.\n list_paths[-1][-1]):\n return (self.time_desired.start - node_current.list_paths[-\n 1][-1].time_step) * 0.01\n velocity = node_current.list_paths[-1][-1].velocity\n if np.isclose(velocity, 0):\n return np.inf\n cost = totaltogoal / node_current.list_paths[-1][-1].velocity\n return cost\n\n def cost_for_Survival_problem(self, node_current, output_logs):\n currentorient = node_current.list_paths[-1][-1].orientation\n currentpos = node_current.list_paths[-1][-1].position\n currenttimestep = node_current.list_paths[-1][-1].time_step\n currentVel = node_current.list_paths[-1][-1].velocity\n for obst in self.list_obstacles:\n obstPos = obst.state_at_time(currenttimestep)\n if currentorient is not None and obstPos is not None:\n disttoobst = self.euclidean_distance(currentpos, obstPos.\n position)\n lookaheadVar = 1.375 * currentVel + 2.5\n if disttoobst <= lookaheadVar:\n vectorToObst = np.array([currentpos, obstPos.position])\n vectorToObstOrient = self.calc_angle_of_position(\n vectorToObst, currentpos)\n orientdiff = self.calc_orientation_diff(currentorient,\n vectorToObstOrient)\n if abs(orientdiff) <= 0.261799:\n if not 'velocity' in obstPos.attributes:\n continue\n if (node_current.list_paths[-1][-1].velocity >\n obstPos.velocity and obstPos.velocity != 0):\n return np.inf\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n\n def calc_distance_to_ref_from_point(self, state):\n currentpos = state.position\n distances = []\n for p in self.refPathParsedPnts:\n distances.append(self.euclidean_distance(currentpos, p))\n smallest_points = nsmallest(2, distances)\n index1 = distances.index(smallest_points[0])\n index2 = distances.index(smallest_points[1])\n p1 = self.refPathParsedPnts[index1]\n p2 = self.refPathParsedPnts[index2]\n distance_to_refrence = np.abs(np.cross(p2 - p1, currentpos - p1) /\n np.linalg.norm(p2 - p1))\n return distance_to_refrence\n\n def calc_distance_to_goal_from_point(self, state):\n currentpos = state.position\n distances = []\n for p in self.refPathParsedPnts:\n distances.append(self.euclidean_distance(currentpos, p))\n index_smallest_dist = distances.index(min(distances))\n totaltogoal = 0\n for p in range(index_smallest_dist, len(self.refPathParsedPnts) - 1):\n totaltogoal = totaltogoal + self.euclidean_distance(self.\n refPathParsedPnts[p], self.refPathParsedPnts[p + 1])\n return totaltogoal\n\n def get_index_nearest_obst_infront(self, node_current):\n currentorient = node_current.list_paths[-1][-1].orientation\n currentpos = node_current.list_paths[-1][-1].position\n currenttimestep = node_current.list_paths[-1][-1].time_step\n currentVel = node_current.list_paths[-1][-1].velocity\n disttoobst = [np.inf] * len(self.list_obstacles)\n for i in range(len(self.list_obstacles)):\n obst = self.list_obstacles[i]\n obstPos = obst.state_at_time(currenttimestep)\n if currentorient is not None and obstPos is not None:\n dist = self.euclidean_distance(currentpos, obstPos.position)\n lookaheadVar = 1.375 * currentVel + 2.5\n if dist <= lookaheadVar:\n vectorToObst = np.array([currentpos, obstPos.position])\n vectorToObstOrient = self.calc_angle_of_position(\n vectorToObst, currentpos)\n orientdiff = self.calc_orientation_diff(currentorient,\n vectorToObstOrient)\n if abs(orientdiff) <= 0.261799:\n disttoobst[i] = dist\n else:\n disttoobst[i] = np.inf\n else:\n disttoobst[i] = np.inf\n index_smallest_dist = disttoobst.index(min(disttoobst))\n if disttoobst[index_smallest_dist] == np.inf:\n index_smallest_dist = -1\n return index_smallest_dist\n <mask token>\n\n def cost_for_modeA_problem_old(self, node_current, output_logs):\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n distance = self.calc_euclidean_distance(current_node=\n node_current)\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,\n path_last[-1].orientation)\n orientationToGoalDiffdegree = abs(orientationToGoalDiff\n ) * 180 / 3.14\n desired_orient = (self.orientation_desired.end + self.\n orientation_desired.start) / 2\n desired_velocity = (self.velocity_desired.start + self.\n velocity_desired.end) / 2\n diff_desiredOrient = self.calc_orientation_diff(desired_orient,\n path_last[-1].orientation)\n diff_deiredVelocity = abs(velocity - desired_velocity)\n angle_intervall = abs(abs(self.orientation_desired.start) -\n abs(self.orientation_desired.end))\n if output_logs:\n print('Distance to goal of current node is: ', distance)\n print('Velocity of current node is: ', velocity)\n print('Orientation of current position: ', node_current\n .list_paths[-1][-1].orientation)\n print('Angle to goal of current node is: ', angleToGoal)\n print('orientation diff to goal of current node is(deg): ',\n orientationToGoalDiffdegree)\n print('diff desired orient of current node is(deg): ',\n diff_desiredOrient)\n print('diff desired velocity of current node is(deg): ',\n diff_deiredVelocity)\n if distance <= 1:\n desired_vel_weight = 1\n desired_orient_weight = 1\n cost = desired_vel_weight * diff_deiredVelocity\n if angle_intervall < 1 and angle_intervall != 0:\n cost = (cost + desired_orient_weight *\n diff_desiredOrient)\n return cost\n if distance <= 0.1 and node_current.list_paths[-1][-1\n ].time_step < self.time_desired.start:\n return (self.time_desired.start - node_current.\n list_paths[-1][-1].time_step) * 0.001\n if orientationToGoalDiffdegree > 45:\n if distance >= 10:\n velocity_weight = 1\n cost = distance / velocity\n return cost\n if distance < 10 and distance >= 5:\n return np.inf\n if distance < 5:\n return np.inf\n else:\n if distance >= 10:\n velocity_weight = 1\n cost = distance / velocity * velocity_weight\n return cost\n if distance < 10 and distance >= 5:\n velocity_weight = 0.5\n desired_vel_weight = 1\n desired_orient_weight = 1\n cost = distance / velocity\n cost = cost + desired_vel_weight * diff_deiredVelocity\n if angle_intervall < 1 and angle_intervall != 0:\n cost = (cost + desired_orient_weight *\n diff_desiredOrient)\n return cost\n if distance < 5:\n cost = distance / velocity\n desired_vel_weight = 3\n desired_orient_weight = 3\n cost = cost + desired_vel_weight * diff_deiredVelocity\n if angle_intervall < 1 and angle_intervall != 0:\n cost = (cost + desired_orient_weight *\n diff_desiredOrient)\n return cost\n",
"step-4": "<mask token>\n\n\nclass StudentMotionPlanner(GreedyBestFirstSearch):\n <mask token>\n\n def __init__(self, scenario, planningProblem, automata, plot_config=\n DefaultPlotConfig):\n super().__init__(scenario=scenario, planningProblem=planningProblem,\n automaton=automata, plot_config=plot_config)\n\n def evaluation_function(self, node_current: PriorityNode) ->float:\n \"\"\"\n Evaluation function of GBFS is f(n) = h(n)\n \"\"\"\n node_current.priority = self.heuristic_function(node_current=\n node_current)\n return node_current.priority\n\n def heuristic_function(self, node_current: PriorityNode) ->float:\n \"\"\"\n Function that evaluates the heuristic cost h(n) in student class.\n Created by Mohamed A. Abdellaoui 10.01.2021\n \n \"\"\"\n output_logs = False\n if output_logs:\n print('##################')\n print('current time step: ', node_current.list_paths[-1][-1].\n time_step)\n print('current problem mode', self.planningProblemType)\n print('depth tree: ', node_current.depth_tree)\n currentorient = node_current.list_paths[-1][-1].orientation\n currentpos = node_current.list_paths[-1][-1].position\n currenttimestep = node_current.list_paths[-1][-1].time_step\n currentVel = node_current.list_paths[-1][-1].velocity\n if self.reached_goal(node_current.list_paths[-1]):\n return 0.0\n if self.routeplannerresult is None:\n return np.inf\n if not self.disableObstAvoidance:\n for obst in self.list_obstacles:\n obstPos = obst.state_at_time(currenttimestep)\n if currentorient is not None and obstPos is not None:\n disttoobst = self.euclidean_distance(currentpos,\n obstPos.position)\n lookaheadVar = 1.375 * currentVel + 2.5\n if disttoobst <= lookaheadVar:\n vectorToObst = np.array([currentpos, obstPos.position])\n vectorToObstOrient = self.calc_angle_of_position(\n vectorToObst, currentpos)\n orientdiff = self.calc_orientation_diff(currentorient,\n vectorToObstOrient)\n if abs(orientdiff) <= 0.261799:\n if not 'velocity' in obstPos.attributes:\n continue\n if (node_current.list_paths[-1][-1].velocity >\n obstPos.velocity and obstPos.velocity != 0):\n return np.inf\n index_smallest_dist = self.get_index_nearest_obst_infront(\n node_current)\n if index_smallest_dist != -1:\n obst = self.list_obstacles[index_smallest_dist]\n obstPos = obst.state_at_time(currenttimestep)\n if obstPos is not None and 'velocity' in obstPos.attributes:\n if obstPos.velocity == 0:\n cost = node_current.list_paths[-1][-1].velocity\n return cost\n if node_current.list_paths[-1][-1\n ].velocity > obstPos.velocity:\n return np.inf\n cost = abs(node_current.list_paths[-1][-1].velocity -\n obstPos.velocity)\n return cost\n if self.planningProblemType == 'ModeA':\n cost = self.cost_for_modeA_problem(node_current, output_logs)\n if output_logs:\n print('Cost from modeA cost func: ', cost)\n if cost < 0:\n return 0\n return cost\n elif self.planningProblemType == 'ModeB':\n cost = self.cost_for_modeB_problem(node_current, output_logs)\n if output_logs:\n print('Cost from modeB cost func: ', cost)\n if cost < 0:\n return 0\n return cost\n elif self.planningProblemType == 'ModeC':\n cost = self.cost_for_modeC_problem(node_current, output_logs)\n if output_logs:\n print('Cost from modeB cost func: ', cost)\n if cost < 0:\n return 0\n return cost\n elif self.planningProblemType == 'ModeD':\n cost = self.cost_for_modeD_problem(node_current, output_logs)\n if output_logs:\n print('Cost from modeB cost func: ', cost)\n if cost < 0:\n return 0\n return cost\n elif self.planningProblemType == 'Survival':\n cost = self.cost_for_Survival_problem(node_current, output_logs)\n if output_logs:\n print('Cost from modeB cost func: ', cost)\n if cost < 0:\n return 0\n return cost\n\n def cost_for_modeA_problem(self, node_current, output_logs):\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n distance = self.calc_euclidean_distance(current_node=\n node_current)\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,\n path_last[-1].orientation)\n orientationToGoalDiffdegree = abs(orientationToGoalDiff\n ) * 180 / 3.14\n desired_orient = (self.orientation_desired.end + self.\n orientation_desired.start) / 2\n desired_velocity = (self.velocity_desired.start + self.\n velocity_desired.end) / 2\n diff_desiredOrient = abs(self.calc_orientation_diff(\n desired_orient, path_last[-1].orientation))\n diff_deiredVelocity = abs(velocity - desired_velocity)\n angle_intervall = abs(abs(self.orientation_desired.start) -\n abs(self.orientation_desired.end))\n if output_logs:\n print('Distance to goal of current node is: ', distance)\n print('Velocity of current node is: ', velocity)\n print('Orientation of current position: ', node_current\n .list_paths[-1][-1].orientation)\n print('Angle to goal of current node is: ', angleToGoal)\n print('orientation diff to goal of current node is(deg): ',\n orientationToGoalDiffdegree)\n print('diff desired orient of current node is(deg): ',\n diff_desiredOrient)\n print('diff desired velocity of current node is(deg): ',\n diff_deiredVelocity)\n current_orient = path_last[-1].orientation\n if distance <= 10:\n if (current_orient < self.orientation_desired.start or \n current_orient > self.orientation_desired.end):\n return np.inf\n if (velocity < self.velocity_desired.start or velocity >\n self.velocity_desired.end):\n return np.inf\n weight = 10\n cost = (distance / velocity + weight * diff_deiredVelocity +\n weight * diff_desiredOrient)\n return cost\n\n def cost_for_modeB_problem(self, node_current, output_logs):\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n distance = self.calc_distance_to_goal_from_point(node_current\n .list_paths[-1][-1])\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,\n path_last[-1].orientation)\n orientationToGoalDiffdegree = abs(orientationToGoalDiff\n ) * 180 / 3.14\n desired_velocity = (self.velocity_desired.start + self.\n velocity_desired.end) / 2\n diff_deiredVelocity = abs(velocity - desired_velocity)\n self.test_if_in_goal_lanelet(node_current)\n if output_logs:\n print('Distance to goal of current node is: ', distance)\n print('Velocity of current node is: ', velocity)\n print('Orientation of current position: ', node_current\n .list_paths[-1][-1].orientation)\n print('Angle to goal of current node is: ', angleToGoal)\n print('orientation diff to goal of current node is(deg): ',\n orientationToGoalDiffdegree)\n print('diff desired velocity of current node is(deg): ',\n diff_deiredVelocity)\n if self.planningProblem.goal.is_reached_only_pos(node_current\n .list_paths[-1][-1]):\n cost = (self.time_desired.start - node_current.\n list_paths[-1][-1].time_step) * 0.01\n cost = cost + diff_deiredVelocity + velocity * 0.01\n return cost\n cost = (distance / velocity + 2 * diff_deiredVelocity + \n velocity * 0.01)\n return cost\n\n def cost_for_modeC_problem(self, node_current, output_logs):\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n distance = self.calc_euclidean_distance(current_node=\n node_current)\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,\n path_last[-1].orientation)\n orientationToGoalDiffdegree = abs(orientationToGoalDiff\n ) * 180 / 3.14\n desired_orient = (self.orientation_desired.end + self.\n orientation_desired.start) / 2\n diff_desiredOrient = self.calc_orientation_diff(desired_orient,\n path_last[-1].orientation)\n angle_intervall = abs(abs(self.orientation_desired.start) -\n abs(self.orientation_desired.end))\n arry = node_current.list_paths[-1][-1].position\n a = np.array([arry[0], arry[1]])\n if self.routeplannerresult is not None:\n distance_to_refrence = self.calc_distance_to_nearest_point(\n self.routeplannerresult.reference_path, a)\n else:\n distance_to_refrence = 0\n if output_logs:\n print('distance to reference path: ', distance_to_refrence)\n print('Distance to goal of current node is: ', distance)\n print('Velocity of current node is: ', velocity)\n print('Orientation of current position: ', node_current\n .list_paths[-1][-1].orientation)\n print('Angle to goal of current node is: ', angleToGoal)\n print('orientation diff to goal of current node is(deg): ',\n orientationToGoalDiffdegree)\n print('diff desired orient of current node is(deg): ',\n diff_desiredOrient)\n if distance <= 0.1 and node_current.list_paths[-1][-1\n ].time_step < self.time_desired.start:\n return self.time_desired.start - node_current.list_paths[-1\n ][-1].time_step\n if self.planningProblem.goal.is_reached_only_pos(node_current\n .list_paths[-1][-1]):\n cost = (self.time_desired.start - node_current.\n list_paths[-1][-1].time_step) * 0.01\n cost = cost + diff_desiredOrient + velocity * 0.01\n return cost\n cost = (distance / velocity + 2 * diff_desiredOrient + \n velocity * 0.01)\n return cost\n\n def cost_for_modeD_problem(self, node_current, output_logs):\n totaltogoal = self.calc_distance_to_goal_from_point(node_current.\n list_paths[-1][-1])\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n if self.planningProblem.goal.is_reached_only_pos(node_current.\n list_paths[-1][-1]):\n return (self.time_desired.start - node_current.list_paths[-\n 1][-1].time_step) * 0.01\n velocity = node_current.list_paths[-1][-1].velocity\n if np.isclose(velocity, 0):\n return np.inf\n cost = totaltogoal / node_current.list_paths[-1][-1].velocity\n return cost\n\n def cost_for_Survival_problem(self, node_current, output_logs):\n currentorient = node_current.list_paths[-1][-1].orientation\n currentpos = node_current.list_paths[-1][-1].position\n currenttimestep = node_current.list_paths[-1][-1].time_step\n currentVel = node_current.list_paths[-1][-1].velocity\n for obst in self.list_obstacles:\n obstPos = obst.state_at_time(currenttimestep)\n if currentorient is not None and obstPos is not None:\n disttoobst = self.euclidean_distance(currentpos, obstPos.\n position)\n lookaheadVar = 1.375 * currentVel + 2.5\n if disttoobst <= lookaheadVar:\n vectorToObst = np.array([currentpos, obstPos.position])\n vectorToObstOrient = self.calc_angle_of_position(\n vectorToObst, currentpos)\n orientdiff = self.calc_orientation_diff(currentorient,\n vectorToObstOrient)\n if abs(orientdiff) <= 0.261799:\n if not 'velocity' in obstPos.attributes:\n continue\n if (node_current.list_paths[-1][-1].velocity >\n obstPos.velocity and obstPos.velocity != 0):\n return np.inf\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n\n def calc_distance_to_ref_from_point(self, state):\n currentpos = state.position\n distances = []\n for p in self.refPathParsedPnts:\n distances.append(self.euclidean_distance(currentpos, p))\n smallest_points = nsmallest(2, distances)\n index1 = distances.index(smallest_points[0])\n index2 = distances.index(smallest_points[1])\n p1 = self.refPathParsedPnts[index1]\n p2 = self.refPathParsedPnts[index2]\n distance_to_refrence = np.abs(np.cross(p2 - p1, currentpos - p1) /\n np.linalg.norm(p2 - p1))\n return distance_to_refrence\n\n def calc_distance_to_goal_from_point(self, state):\n currentpos = state.position\n distances = []\n for p in self.refPathParsedPnts:\n distances.append(self.euclidean_distance(currentpos, p))\n index_smallest_dist = distances.index(min(distances))\n totaltogoal = 0\n for p in range(index_smallest_dist, len(self.refPathParsedPnts) - 1):\n totaltogoal = totaltogoal + self.euclidean_distance(self.\n refPathParsedPnts[p], self.refPathParsedPnts[p + 1])\n return totaltogoal\n\n def get_index_nearest_obst_infront(self, node_current):\n currentorient = node_current.list_paths[-1][-1].orientation\n currentpos = node_current.list_paths[-1][-1].position\n currenttimestep = node_current.list_paths[-1][-1].time_step\n currentVel = node_current.list_paths[-1][-1].velocity\n disttoobst = [np.inf] * len(self.list_obstacles)\n for i in range(len(self.list_obstacles)):\n obst = self.list_obstacles[i]\n obstPos = obst.state_at_time(currenttimestep)\n if currentorient is not None and obstPos is not None:\n dist = self.euclidean_distance(currentpos, obstPos.position)\n lookaheadVar = 1.375 * currentVel + 2.5\n if dist <= lookaheadVar:\n vectorToObst = np.array([currentpos, obstPos.position])\n vectorToObstOrient = self.calc_angle_of_position(\n vectorToObst, currentpos)\n orientdiff = self.calc_orientation_diff(currentorient,\n vectorToObstOrient)\n if abs(orientdiff) <= 0.261799:\n disttoobst[i] = dist\n else:\n disttoobst[i] = np.inf\n else:\n disttoobst[i] = np.inf\n index_smallest_dist = disttoobst.index(min(disttoobst))\n if disttoobst[index_smallest_dist] == np.inf:\n index_smallest_dist = -1\n return index_smallest_dist\n <mask token>\n\n def cost_for_modeA_problem_old(self, node_current, output_logs):\n if self.position_desired is None:\n if output_logs:\n print('exit Cost function because position desired is None!')\n return self.time_desired.start - node_current.list_paths[-1][-1\n ].time_step\n else:\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n distance = self.calc_euclidean_distance(current_node=\n node_current)\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal,\n path_last[-1].orientation)\n orientationToGoalDiffdegree = abs(orientationToGoalDiff\n ) * 180 / 3.14\n desired_orient = (self.orientation_desired.end + self.\n orientation_desired.start) / 2\n desired_velocity = (self.velocity_desired.start + self.\n velocity_desired.end) / 2\n diff_desiredOrient = self.calc_orientation_diff(desired_orient,\n path_last[-1].orientation)\n diff_deiredVelocity = abs(velocity - desired_velocity)\n angle_intervall = abs(abs(self.orientation_desired.start) -\n abs(self.orientation_desired.end))\n if output_logs:\n print('Distance to goal of current node is: ', distance)\n print('Velocity of current node is: ', velocity)\n print('Orientation of current position: ', node_current\n .list_paths[-1][-1].orientation)\n print('Angle to goal of current node is: ', angleToGoal)\n print('orientation diff to goal of current node is(deg): ',\n orientationToGoalDiffdegree)\n print('diff desired orient of current node is(deg): ',\n diff_desiredOrient)\n print('diff desired velocity of current node is(deg): ',\n diff_deiredVelocity)\n if distance <= 1:\n desired_vel_weight = 1\n desired_orient_weight = 1\n cost = desired_vel_weight * diff_deiredVelocity\n if angle_intervall < 1 and angle_intervall != 0:\n cost = (cost + desired_orient_weight *\n diff_desiredOrient)\n return cost\n if distance <= 0.1 and node_current.list_paths[-1][-1\n ].time_step < self.time_desired.start:\n return (self.time_desired.start - node_current.\n list_paths[-1][-1].time_step) * 0.001\n if orientationToGoalDiffdegree > 45:\n if distance >= 10:\n velocity_weight = 1\n cost = distance / velocity\n return cost\n if distance < 10 and distance >= 5:\n return np.inf\n if distance < 5:\n return np.inf\n else:\n if distance >= 10:\n velocity_weight = 1\n cost = distance / velocity * velocity_weight\n return cost\n if distance < 10 and distance >= 5:\n velocity_weight = 0.5\n desired_vel_weight = 1\n desired_orient_weight = 1\n cost = distance / velocity\n cost = cost + desired_vel_weight * diff_deiredVelocity\n if angle_intervall < 1 and angle_intervall != 0:\n cost = (cost + desired_orient_weight *\n diff_desiredOrient)\n return cost\n if distance < 5:\n cost = distance / velocity\n desired_vel_weight = 3\n desired_orient_weight = 3\n cost = cost + desired_vel_weight * diff_deiredVelocity\n if angle_intervall < 1 and angle_intervall != 0:\n cost = (cost + desired_orient_weight *\n diff_desiredOrient)\n return cost\n",
"step-5": "from SMP.motion_planner.node import PriorityNode\nimport numpy as np\nfrom heapq import nsmallest\nimport sys\nfrom SMP.motion_planner.plot_config import DefaultPlotConfig\nfrom SMP.motion_planner.search_algorithms.best_first_search import GreedyBestFirstSearch\n# imports for route planner:\n\nclass StudentMotionPlanner(GreedyBestFirstSearch):\n \"\"\"\n Motion planner implementation by students.\n Note that you may inherit from any given motion planner as you wish, or come up with your own planner.\n Here as an example, the planner is inherited from the GreedyBestFirstSearch planner.\n \"\"\"\n\n def __init__(self, scenario, planningProblem, automata, plot_config=DefaultPlotConfig):\n super().__init__(scenario=scenario, planningProblem=planningProblem, automaton=automata,\n plot_config=plot_config)\n\n def evaluation_function(self, node_current: PriorityNode) -> float:\n ########################################################################\n # todo: Implement your own evaluation function here. #\n ########################################################################\n # Copied from greedy best first search:\n \"\"\"\n Evaluation function of GBFS is f(n) = h(n)\n \"\"\"\n\n node_current.priority = self.heuristic_function(node_current=node_current)\n return node_current.priority\n\n\n def heuristic_function(self, node_current: PriorityNode) -> float:\n ########################################################################\n # todo: Implement your own heuristic cost calculation here. #\n # Hint: #\n # Use the State of the current node and the information from the #\n # planning problem, as well as from the scenario. #\n # Some helper functions for your convenience can be found in #\n # ./search_algorithms/base_class.py #\n ########################################################################\n \"\"\"\n Function that evaluates the heuristic cost h(n) in student class.\n Created by Mohamed A. Abdellaoui 10.01.2021\n \n \"\"\"\n output_logs = False\n if output_logs:\n print(\"##################\")\n print(\"current time step: \", node_current.list_paths[-1][-1].time_step)\n print(\"current problem mode\", self.planningProblemType)\n print(\"depth tree: \", node_current.depth_tree)\n currentorient = node_current.list_paths[-1][-1].orientation\n currentpos = node_current.list_paths[-1][-1].position\n currenttimestep = node_current.list_paths[-1][-1].time_step\n currentVel = node_current.list_paths[-1][-1].velocity\n\n # Test if reached goal:\n if self.reached_goal(node_current.list_paths[-1]):\n return 0.0\n # Test if route planner failed to find a path: \n if self.routeplannerresult is None:\n return np.inf\n\n ############ Detect cars in front:\n # calc cost based on distance to gool following the refrence path:\n # loop through all obstacles at time step x and find if any is close of current pos:\n if not self.disableObstAvoidance:\n for obst in self.list_obstacles:\n obstPos = obst.state_at_time(currenttimestep)\n if currentorient is not None and obstPos is not None:\n disttoobst = self.euclidean_distance(currentpos, obstPos.position)\n lookaheadVar = 1.375 * currentVel + 2.5\n if disttoobst <= lookaheadVar:\n # calc orientation diff between car and obstacle:\n vectorToObst = np.array([currentpos, obstPos.position])\n vectorToObstOrient = self.calc_angle_of_position(vectorToObst, currentpos)\n orientdiff = self.calc_orientation_diff(currentorient, vectorToObstOrient)\n if abs(orientdiff) <= 0.261799:\n if not 'velocity' in obstPos.attributes:\n continue\n if node_current.list_paths[-1][-1].velocity > obstPos.velocity and obstPos.velocity != 0:\n return np.inf\n \n # get index of closest object to the ego vehicle:\n index_smallest_dist = self.get_index_nearest_obst_infront(node_current)\n \n # use the index to locate vehicle to calc cost: \n if index_smallest_dist != -1:\n # found the index of vehicle with smallest distance to ego car:\n obst = self.list_obstacles[index_smallest_dist]\n obstPos = obst.state_at_time(currenttimestep)\n if obstPos is not None and 'velocity' in obstPos.attributes:\n if obstPos.velocity == 0:\n cost = node_current.list_paths[-1][-1].velocity\n return cost\n if node_current.list_paths[-1][-1].velocity > obstPos.velocity:\n return np.inf\n cost = abs(node_current.list_paths[-1][-1].velocity - obstPos.velocity)\n return cost\n #########################################################\n\n # Decide based on planning problem type how to calculate cost\n if self.planningProblemType == 'ModeA':\n # Call function for planning problem with desired time, position, speed and orientation\n cost = self.cost_for_modeA_problem(node_current, output_logs)\n if output_logs:\n print(\"Cost from modeA cost func: \", cost)\n if cost < 0:\n return 0\n return cost\n elif self.planningProblemType == 'ModeB':\n # Call function for planning problem with desired time, position and velocity:\n cost = self.cost_for_modeB_problem(node_current, output_logs)\n if output_logs:\n print(\"Cost from modeB cost func: \", cost)\n if cost < 0:\n return 0\n return cost\n elif self.planningProblemType == 'ModeC':\n # Call function for planning problem with desired time, position and orientation:\n cost = self.cost_for_modeC_problem(node_current, output_logs)\n if output_logs:\n print(\"Cost from modeB cost func: \", cost)\n if cost < 0:\n return 0\n return cost\n elif self.planningProblemType == 'ModeD':\n # Call function for planning problem with desired time and position:\n cost = self.cost_for_modeD_problem(node_current, output_logs)\n if output_logs:\n print(\"Cost from modeB cost func: \", cost)\n if cost < 0:\n return 0\n return cost\n elif self.planningProblemType == 'Survival':\n # Call function for planning problem with desired time:\n cost = self.cost_for_Survival_problem(node_current, output_logs)\n if output_logs:\n print(\"Cost from modeB cost func: \", cost)\n if cost < 0:\n return 0\n return cost\n\n def cost_for_modeA_problem(self, node_current, output_logs):\n # Function for planning problem with desired time, position, speed and orientation\n if self.position_desired is None:\n if output_logs:\n print(\"exit Cost function because position desired is None!\")\n return self.time_desired.start - node_current.list_paths[-1][-1].time_step\n else:\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n # Calc Variables:\n distance = self.calc_euclidean_distance(current_node=node_current)\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal, path_last[-1].orientation)\n orientationToGoalDiffdegree = (abs(orientationToGoalDiff) * 180) / 3.14\n desired_orient = (self.orientation_desired.end + self.orientation_desired.start) / 2\n desired_velocity = (self.velocity_desired.start + self.velocity_desired.end) / 2\n diff_desiredOrient = abs(self.calc_orientation_diff(desired_orient, path_last[-1].orientation))\n diff_deiredVelocity = abs(velocity - desired_velocity)\n angle_intervall = abs(abs(self.orientation_desired.start) - abs(self.orientation_desired.end))\n\n # Output data for debugging:\n if output_logs:\n print(\"Distance to goal of current node is: \", distance)\n print(\"Velocity of current node is: \", velocity)\n print(\"Orientation of current position: \", node_current.list_paths[-1][-1].orientation)\n print(\"Angle to goal of current node is: \", angleToGoal)\n print(\"orientation diff to goal of current node is(deg): \", orientationToGoalDiffdegree)\n print(\"diff desired orient of current node is(deg): \", diff_desiredOrient)\n print(\"diff desired velocity of current node is(deg): \", diff_deiredVelocity)\n # test 16.01:\n current_orient = path_last[-1].orientation\n if distance <= 10:\n if current_orient < self.orientation_desired.start or current_orient > self.orientation_desired.end:\n return np.inf\n if velocity < self.velocity_desired.start or velocity > self.velocity_desired.end:\n return np.inf\n\n weight = 10\n # if very colse to goal, minimize the diff velocity and diff orient\n cost = (distance / velocity) + weight* diff_deiredVelocity + weight* diff_desiredOrient\n #cost = distance + diff_desiredOrient + diff_deiredVelocity\n return cost\n\n def cost_for_modeB_problem(self, node_current, output_logs):\n # Function for planning problem with desired time, position, speed\n if self.position_desired is None:\n if output_logs:\n print(\"exit Cost function because position desired is None!\")\n return self.time_desired.start - node_current.list_paths[-1][-1].time_step\n else:\n\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n # Calc Variables:\n distance = self.calc_distance_to_goal_from_point(node_current.list_paths[-1][-1])\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal, path_last[-1].orientation)\n orientationToGoalDiffdegree = (abs(orientationToGoalDiff)*180)/3.14\n desired_velocity = (self.velocity_desired.start + self.velocity_desired.end)/2\n diff_deiredVelocity = abs(velocity - desired_velocity)\n self.test_if_in_goal_lanelet(node_current)\n # Output data for debugging:\n if output_logs:\n print(\"Distance to goal of current node is: \", distance)\n print(\"Velocity of current node is: \", velocity)\n print(\"Orientation of current position: \",node_current.list_paths[-1][-1].orientation)\n print(\"Angle to goal of current node is: \", angleToGoal)\n print(\"orientation diff to goal of current node is(deg): \", orientationToGoalDiffdegree)\n print(\"diff desired velocity of current node is(deg): \", diff_deiredVelocity)\n\n # If very close to target but time is still not reached:\n #if distance <= 0.1 and node_current.list_paths[-1][-1].time_step < self.time_desired.start:\n # return self.time_desired.start - node_current.list_paths[-1][-1].time_step * 0.01\n if self.planningProblem.goal.is_reached_only_pos(node_current.list_paths[-1][-1]):\n cost = (self.time_desired.start - node_current.list_paths[-1][-1].time_step) * 0.01\n cost = cost + diff_deiredVelocity + velocity *0.01\n return cost\n\n cost = ( distance / velocity ) + 2 * diff_deiredVelocity + velocity*0.01\n return cost\n\n def cost_for_modeC_problem(self, node_current, output_logs):\n # Function for planning problem with desired time, position, speed and orientation\n if self.position_desired is None:\n if output_logs:\n print(\"exit Cost function because position desired is None!\")\n return self.time_desired.start - node_current.list_paths[-1][-1].time_step\n else:\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n # Calc Variables:\n distance = self.calc_euclidean_distance(current_node=node_current)\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal, path_last[-1].orientation)\n orientationToGoalDiffdegree = (abs(orientationToGoalDiff)*180)/3.14\n desired_orient = (self.orientation_desired.end + self.orientation_desired.start) / 2\n diff_desiredOrient = self.calc_orientation_diff(desired_orient, path_last[-1].orientation)\n angle_intervall = abs(abs(self.orientation_desired.start) - abs(self.orientation_desired.end))\n\n # Calcualte distance between currrent position and reference path:\n arry = node_current.list_paths[-1][-1].position\n a = np.array([arry[0], arry[1]])\n if self.routeplannerresult is not None:\n distance_to_refrence = self.calc_distance_to_nearest_point(self.routeplannerresult.reference_path,\n a)\n else:\n distance_to_refrence = 0\n\n # Output data for debugging:\n if output_logs:\n print(\"distance to reference path: \", distance_to_refrence)\n print(\"Distance to goal of current node is: \", distance)\n print(\"Velocity of current node is: \", velocity)\n print(\"Orientation of current position: \",node_current.list_paths[-1][-1].orientation)\n print(\"Angle to goal of current node is: \", angleToGoal)\n print(\"orientation diff to goal of current node is(deg): \", orientationToGoalDiffdegree)\n print(\"diff desired orient of current node is(deg): \", diff_desiredOrient)\n\n # If very close to target but time is still not reached:\n if distance <= 0.1 and node_current.list_paths[-1][-1].time_step < self.time_desired.start:\n return self.time_desired.start - node_current.list_paths[-1][-1].time_step\n\n if self.planningProblem.goal.is_reached_only_pos(node_current.list_paths[-1][-1]):\n cost = (self.time_desired.start - node_current.list_paths[-1][-1].time_step) * 0.01\n cost = cost + diff_desiredOrient + velocity *0.01\n return cost\n\n cost = ( distance / velocity ) + 2 * diff_desiredOrient + velocity*0.01\n return cost\n\n def cost_for_modeD_problem(self, node_current, output_logs):\n\n totaltogoal = self.calc_distance_to_goal_from_point(node_current.list_paths[-1][-1])\n if self.position_desired is None:\n if output_logs:\n print(\"exit Cost function because position desired is None!\")\n return self.time_desired.start - node_current.list_paths[-1][-1].time_step\n else:\n if self.planningProblem.goal.is_reached_only_pos(node_current.list_paths[-1][-1]):\n return (self.time_desired.start - node_current.list_paths[-1][-1].time_step) *0.01\n velocity = node_current.list_paths[-1][-1].velocity\n if np.isclose(velocity, 0):\n return np.inf\n cost = totaltogoal / node_current.list_paths[-1][-1].velocity\n return cost\n\n def cost_for_Survival_problem(self, node_current, output_logs):\n currentorient = node_current.list_paths[-1][-1].orientation\n currentpos = node_current.list_paths[-1][-1].position\n currenttimestep = node_current.list_paths[-1][-1].time_step\n currentVel = node_current.list_paths[-1][-1].velocity\n for obst in self.list_obstacles:\n obstPos = obst.state_at_time(currenttimestep)\n if currentorient is not None and obstPos is not None:\n disttoobst = self.euclidean_distance(currentpos, obstPos.position)\n lookaheadVar = 1.375 * currentVel + 2.5\n if disttoobst <= lookaheadVar:\n # calc orientation diff between car and obstacle:\n vectorToObst = np.array([currentpos, obstPos.position])\n vectorToObstOrient = self.calc_angle_of_position(vectorToObst, currentpos)\n orientdiff = self.calc_orientation_diff(currentorient, vectorToObstOrient)\n if abs(orientdiff) <= 0.261799:\n if not 'velocity' in obstPos.attributes:\n continue\n if node_current.list_paths[-1][-1].velocity > obstPos.velocity and obstPos.velocity != 0:\n return np.inf\n return self.time_desired.start - node_current.list_paths[-1][-1].time_step\n\n def calc_distance_to_ref_from_point(self, state):\n #calc distance of points to each point of refrence path:\n currentpos = state.position\n distances = []\n for p in self.refPathParsedPnts:\n distances.append(self.euclidean_distance(currentpos, p))\n smallest_points = nsmallest(2, distances)\n index1 = distances.index(smallest_points[0])\n index2 = distances.index(smallest_points[1])\n p1 = self.refPathParsedPnts[index1]\n p2 = self.refPathParsedPnts[index2]\n distance_to_refrence = np.abs(np.cross(p2 - p1, currentpos - p1) / np.linalg.norm(p2 - p1))\n return distance_to_refrence\n\n def calc_distance_to_goal_from_point(self, state):\n #calc distance of points to each point of refrence path:\n currentpos = state.position\n distances = []\n for p in self.refPathParsedPnts:\n distances.append(self.euclidean_distance(currentpos, p))\n index_smallest_dist = distances.index(min(distances))\n totaltogoal = 0\n for p in range(index_smallest_dist, len(self.refPathParsedPnts) - 1):\n totaltogoal = totaltogoal + self.euclidean_distance(self.refPathParsedPnts[p],self.refPathParsedPnts[p+1])\n\n return totaltogoal\n\n def get_index_nearest_obst_infront(self,node_current):\n # loop through all obstacles at time step x and find if any is close of current pos:\n currentorient = node_current.list_paths[-1][-1].orientation\n currentpos = node_current.list_paths[-1][-1].position\n currenttimestep = node_current.list_paths[-1][-1].time_step\n currentVel = node_current.list_paths[-1][-1].velocity\n disttoobst = [np.inf] * len(self.list_obstacles)\n for i in range(len(self.list_obstacles)):\n obst = self.list_obstacles[i]\n obstPos = obst.state_at_time(currenttimestep)\n if currentorient is not None and obstPos is not None:\n dist = self.euclidean_distance(currentpos, obstPos.position)\n lookaheadVar = 1.375 * currentVel + 2.5\n if dist <= lookaheadVar:\n # calc orientation diff between car and obstacle:\n vectorToObst = np.array([currentpos, obstPos.position])\n vectorToObstOrient = self.calc_angle_of_position(vectorToObst, currentpos)\n orientdiff = self.calc_orientation_diff(currentorient, vectorToObstOrient)\n if abs(orientdiff) <= 0.261799:\n disttoobst[i]= dist\n else:\n disttoobst[i]= np.inf\n else:\n disttoobst[i]= np.inf\n index_smallest_dist = disttoobst.index(min(disttoobst))\n if disttoobst[index_smallest_dist] == np.inf:\n index_smallest_dist = -1\n return index_smallest_dist\n\n\n def test_if_in_goal_lanelet(self, node_current):\n pos = [node_current.list_paths[-1][-1].position]\n currentlanelet = self.scenario.lanelet_network.find_lanelet_by_position(pos)\n currentlanelet = currentlanelet[0][0]\n #result = self.is_goal_in_lane(currentlanelet)\n result = False\n if self.planningProblem.goal.lanelets_of_goal_position is not None:\n if currentlanelet in self.planningProblem.goal.lanelets_of_goal_position.get(0):\n result = True\n return result\n\n def cost_for_modeA_problem_old(self, node_current, output_logs):\n # Function for planning problem with desired time, position, speed and orientation\n if self.position_desired is None:\n if output_logs:\n print(\"exit Cost function because position desired is None!\")\n return self.time_desired.start - node_current.list_paths[-1][-1].time_step\n else:\n velocity = node_current.list_paths[-1][-1].velocity\n path_last = node_current.list_paths[-1]\n if np.isclose(velocity, 0):\n return np.inf\n else:\n # Calc Variables:\n distance = self.calc_euclidean_distance(current_node=node_current)\n angleToGoal = self.calc_angle_to_goal(path_last[-1])\n orientationToGoalDiff = self.calc_orientation_diff(angleToGoal, path_last[-1].orientation)\n orientationToGoalDiffdegree = (abs(orientationToGoalDiff)*180)/3.14\n desired_orient = (self.orientation_desired.end + self.orientation_desired.start) / 2\n desired_velocity = (self.velocity_desired.start + self.velocity_desired.end)/2\n diff_desiredOrient = self.calc_orientation_diff(desired_orient, path_last[-1].orientation)\n diff_deiredVelocity = abs(velocity - desired_velocity)\n angle_intervall = abs(abs(self.orientation_desired.start) - abs(self.orientation_desired.end))\n\n # Output data for debugging:\n if output_logs:\n print(\"Distance to goal of current node is: \", distance)\n print(\"Velocity of current node is: \", velocity)\n print(\"Orientation of current position: \",node_current.list_paths[-1][-1].orientation)\n print(\"Angle to goal of current node is: \", angleToGoal)\n print(\"orientation diff to goal of current node is(deg): \", orientationToGoalDiffdegree)\n print(\"diff desired orient of current node is(deg): \", diff_desiredOrient)\n print(\"diff desired velocity of current node is(deg): \", diff_deiredVelocity)\n\n # if very colse to goal, minimize the diff velocity and diff orient\n if distance <= 1:\n desired_vel_weight = 1\n desired_orient_weight = 1\n cost = desired_vel_weight * diff_deiredVelocity\n if angle_intervall < 1 and angle_intervall != 0:\n cost = cost + desired_orient_weight * diff_desiredOrient\n return cost\n\n\n # If very close to target but time is still not reached:\n if distance <= 0.1 and node_current.list_paths[-1][-1].time_step < self.time_desired.start:\n return (self.time_desired.start - node_current.list_paths[-1][-1].time_step) *0.001\n\n # check if goal in in field of view:\n if orientationToGoalDiffdegree > 45:\n # goal is not in field of view:\n # give more weight to speed and follow reference path blindly:\n # block to differentiate between large distance to goal and small distance:\n if distance >= 10: # too far away from target, just follow the least distance and target lanelet.\n velocity_weight = 1\n cost = distance / velocity\n\n return cost\n\n if distance < 10 and distance >= 5: # almost close, reduce speed.\n return np.inf\n\n if distance < 5: # very close andjust orientation angle..\n return np.inf\n else:\n # goal is in field of view:\n # give more weight to distance and speed and orientation goals:\n # goal is not in field of view:\n # give more weight to speed and follow reference path blindly:\n # block to differentiate between large distance to goal and small distance:\n if distance >= 10: # too far away from target, just follow the least distance and target lanelet.\n velocity_weight = 1\n cost = distance / velocity * velocity_weight\n return cost\n\n if distance < 10 and distance >= 5: # almost close, reduce speed.\n velocity_weight = 0.5\n desired_vel_weight = 1\n desired_orient_weight = 1\n cost = distance / velocity\n cost = cost + desired_vel_weight * diff_deiredVelocity\n if angle_intervall < 1 and angle_intervall != 0:\n cost = cost + desired_orient_weight * diff_desiredOrient\n return cost\n\n if distance < 5: # very close andjust orientation angle..\n cost = distance / velocity\n desired_vel_weight = 3\n desired_orient_weight = 3\n cost = cost + desired_vel_weight * diff_deiredVelocity\n if angle_intervall < 1 and angle_intervall != 0:\n cost = cost + desired_orient_weight * diff_desiredOrient\n return cost\n",
"step-ids": [
9,
11,
12,
13,
17
]
}
|
[
9,
11,
12,
13,
17
] |
<|reserved_special_token_0|>
class Sliders(timelapse.TimeLapse):
def __init__(self, server_list, nick='Sliders', channel='#sliders',
realname='Sliders', sliding_window=60, **params):
super().__init__(server_list, nick=nick, channel=channel, **params)
self.lapsed = merge(self.lapsed, sliding_stream(sliding_window))
self.sliders_transform = random.choice(transform.all_transforms)
def on_lapsed_message(self, msg):
if isinstance(msg, transform.Transform):
self.sliders_transform = msg
self.connection.privmsg(self.lapsed_channel,
"\x01ACTION s'ouvre vers un monde parallèle peuplé de jumeaux "
+ msg.name + '\x01')
else:
super().on_lapsed_message(self.sliders_transform(msg))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def sliding_stream(delay_secs=20):
ts = datetime.datetime.now()
delay = datetime.timedelta(0, delay_secs)
while True:
yield ts, random.choice(transform.all_transforms)
ts = ts + delay
class Sliders(timelapse.TimeLapse):
def __init__(self, server_list, nick='Sliders', channel='#sliders',
realname='Sliders', sliding_window=60, **params):
super().__init__(server_list, nick=nick, channel=channel, **params)
self.lapsed = merge(self.lapsed, sliding_stream(sliding_window))
self.sliders_transform = random.choice(transform.all_transforms)
def on_lapsed_message(self, msg):
if isinstance(msg, transform.Transform):
self.sliders_transform = msg
self.connection.privmsg(self.lapsed_channel,
"\x01ACTION s'ouvre vers un monde parallèle peuplé de jumeaux "
+ msg.name + '\x01')
else:
super().on_lapsed_message(self.sliders_transform(msg))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def merge(s1, s2):
try:
x1 = next(s1)
except StopIteration:
yield from s2
return
try:
x2 = next(s2)
except StopIteration:
yield from s1
return
while True:
if x2 > x1:
yield x1
try:
x1 = next(s1)
except StopIteration:
yield x2
yield from s2
return
else:
yield x2
try:
x2 = next(s2)
except StopIteration:
yield x1
yield from s1
return
def sliding_stream(delay_secs=20):
ts = datetime.datetime.now()
delay = datetime.timedelta(0, delay_secs)
while True:
yield ts, random.choice(transform.all_transforms)
ts = ts + delay
class Sliders(timelapse.TimeLapse):
def __init__(self, server_list, nick='Sliders', channel='#sliders',
realname='Sliders', sliding_window=60, **params):
super().__init__(server_list, nick=nick, channel=channel, **params)
self.lapsed = merge(self.lapsed, sliding_stream(sliding_window))
self.sliders_transform = random.choice(transform.all_transforms)
def on_lapsed_message(self, msg):
if isinstance(msg, transform.Transform):
self.sliders_transform = msg
self.connection.privmsg(self.lapsed_channel,
"\x01ACTION s'ouvre vers un monde parallèle peuplé de jumeaux "
+ msg.name + '\x01')
else:
super().on_lapsed_message(self.sliders_transform(msg))
<|reserved_special_token_1|>
import datetime
import logging
import random
import transform
import timelapse
def merge(s1, s2):
try:
x1 = next(s1)
except StopIteration:
yield from s2
return
try:
x2 = next(s2)
except StopIteration:
yield from s1
return
while True:
if x2 > x1:
yield x1
try:
x1 = next(s1)
except StopIteration:
yield x2
yield from s2
return
else:
yield x2
try:
x2 = next(s2)
except StopIteration:
yield x1
yield from s1
return
def sliding_stream(delay_secs=20):
ts = datetime.datetime.now()
delay = datetime.timedelta(0, delay_secs)
while True:
yield ts, random.choice(transform.all_transforms)
ts = ts + delay
class Sliders(timelapse.TimeLapse):
def __init__(self, server_list, nick='Sliders', channel='#sliders',
realname='Sliders', sliding_window=60, **params):
super().__init__(server_list, nick=nick, channel=channel, **params)
self.lapsed = merge(self.lapsed, sliding_stream(sliding_window))
self.sliders_transform = random.choice(transform.all_transforms)
def on_lapsed_message(self, msg):
if isinstance(msg, transform.Transform):
self.sliders_transform = msg
self.connection.privmsg(self.lapsed_channel,
"\x01ACTION s'ouvre vers un monde parallèle peuplé de jumeaux "
+ msg.name + '\x01')
else:
super().on_lapsed_message(self.sliders_transform(msg))
<|reserved_special_token_1|>
import datetime
import logging
import random
import transform
import timelapse
# merge two iterators producing sorted values
def merge(s1, s2):
try:
x1 = next(s1)
except StopIteration:
yield from s2
return
try:
x2 = next(s2)
except StopIteration:
yield from s1
return
while True:
if x2 > x1:
yield x1
try:
x1 = next(s1)
except StopIteration:
yield x2
yield from s2
return
else:
yield x2
try:
x2 = next(s2)
except StopIteration:
yield x1
yield from s1
return
def sliding_stream(delay_secs=20):
ts = datetime.datetime.now()
delay = datetime.timedelta(0,delay_secs)
while True:
yield(ts, random.choice(transform.all_transforms))
ts = ts + delay
class Sliders(timelapse.TimeLapse):
def __init__(self, server_list, nick="Sliders", channel="#sliders", realname="Sliders",
sliding_window = 60, **params):
super().__init__(server_list, nick=nick, channel=channel, **params)
self.lapsed = merge(self.lapsed, sliding_stream(sliding_window))
self.sliders_transform = random.choice(transform.all_transforms)
def on_lapsed_message(self, msg):
if isinstance(msg, transform.Transform):
self.sliders_transform = msg
self.connection.privmsg(self.lapsed_channel,
"\x01ACTION s'ouvre vers un monde parallèle peuplé de jumeaux "
+ msg.name + "\x01")
else:
super().on_lapsed_message(self.sliders_transform(msg))
|
flexible
|
{
"blob_id": "c651d49c98a4cf457c8252c94c6785dea8e9af60",
"index": 3909,
"step-1": "<mask token>\n\n\nclass Sliders(timelapse.TimeLapse):\n\n def __init__(self, server_list, nick='Sliders', channel='#sliders',\n realname='Sliders', sliding_window=60, **params):\n super().__init__(server_list, nick=nick, channel=channel, **params)\n self.lapsed = merge(self.lapsed, sliding_stream(sliding_window))\n self.sliders_transform = random.choice(transform.all_transforms)\n\n def on_lapsed_message(self, msg):\n if isinstance(msg, transform.Transform):\n self.sliders_transform = msg\n self.connection.privmsg(self.lapsed_channel, \n \"\\x01ACTION s'ouvre vers un monde parallèle peuplé de jumeaux \"\n + msg.name + '\\x01')\n else:\n super().on_lapsed_message(self.sliders_transform(msg))\n",
"step-2": "<mask token>\n\n\ndef sliding_stream(delay_secs=20):\n ts = datetime.datetime.now()\n delay = datetime.timedelta(0, delay_secs)\n while True:\n yield ts, random.choice(transform.all_transforms)\n ts = ts + delay\n\n\nclass Sliders(timelapse.TimeLapse):\n\n def __init__(self, server_list, nick='Sliders', channel='#sliders',\n realname='Sliders', sliding_window=60, **params):\n super().__init__(server_list, nick=nick, channel=channel, **params)\n self.lapsed = merge(self.lapsed, sliding_stream(sliding_window))\n self.sliders_transform = random.choice(transform.all_transforms)\n\n def on_lapsed_message(self, msg):\n if isinstance(msg, transform.Transform):\n self.sliders_transform = msg\n self.connection.privmsg(self.lapsed_channel, \n \"\\x01ACTION s'ouvre vers un monde parallèle peuplé de jumeaux \"\n + msg.name + '\\x01')\n else:\n super().on_lapsed_message(self.sliders_transform(msg))\n",
"step-3": "<mask token>\n\n\ndef merge(s1, s2):\n try:\n x1 = next(s1)\n except StopIteration:\n yield from s2\n return\n try:\n x2 = next(s2)\n except StopIteration:\n yield from s1\n return\n while True:\n if x2 > x1:\n yield x1\n try:\n x1 = next(s1)\n except StopIteration:\n yield x2\n yield from s2\n return\n else:\n yield x2\n try:\n x2 = next(s2)\n except StopIteration:\n yield x1\n yield from s1\n return\n\n\ndef sliding_stream(delay_secs=20):\n ts = datetime.datetime.now()\n delay = datetime.timedelta(0, delay_secs)\n while True:\n yield ts, random.choice(transform.all_transforms)\n ts = ts + delay\n\n\nclass Sliders(timelapse.TimeLapse):\n\n def __init__(self, server_list, nick='Sliders', channel='#sliders',\n realname='Sliders', sliding_window=60, **params):\n super().__init__(server_list, nick=nick, channel=channel, **params)\n self.lapsed = merge(self.lapsed, sliding_stream(sliding_window))\n self.sliders_transform = random.choice(transform.all_transforms)\n\n def on_lapsed_message(self, msg):\n if isinstance(msg, transform.Transform):\n self.sliders_transform = msg\n self.connection.privmsg(self.lapsed_channel, \n \"\\x01ACTION s'ouvre vers un monde parallèle peuplé de jumeaux \"\n + msg.name + '\\x01')\n else:\n super().on_lapsed_message(self.sliders_transform(msg))\n",
"step-4": "import datetime\nimport logging\nimport random\nimport transform\nimport timelapse\n\n\ndef merge(s1, s2):\n try:\n x1 = next(s1)\n except StopIteration:\n yield from s2\n return\n try:\n x2 = next(s2)\n except StopIteration:\n yield from s1\n return\n while True:\n if x2 > x1:\n yield x1\n try:\n x1 = next(s1)\n except StopIteration:\n yield x2\n yield from s2\n return\n else:\n yield x2\n try:\n x2 = next(s2)\n except StopIteration:\n yield x1\n yield from s1\n return\n\n\ndef sliding_stream(delay_secs=20):\n ts = datetime.datetime.now()\n delay = datetime.timedelta(0, delay_secs)\n while True:\n yield ts, random.choice(transform.all_transforms)\n ts = ts + delay\n\n\nclass Sliders(timelapse.TimeLapse):\n\n def __init__(self, server_list, nick='Sliders', channel='#sliders',\n realname='Sliders', sliding_window=60, **params):\n super().__init__(server_list, nick=nick, channel=channel, **params)\n self.lapsed = merge(self.lapsed, sliding_stream(sliding_window))\n self.sliders_transform = random.choice(transform.all_transforms)\n\n def on_lapsed_message(self, msg):\n if isinstance(msg, transform.Transform):\n self.sliders_transform = msg\n self.connection.privmsg(self.lapsed_channel, \n \"\\x01ACTION s'ouvre vers un monde parallèle peuplé de jumeaux \"\n + msg.name + '\\x01')\n else:\n super().on_lapsed_message(self.sliders_transform(msg))\n",
"step-5": "import datetime\nimport logging\nimport random\nimport transform\nimport timelapse\n\n# merge two iterators producing sorted values\ndef merge(s1, s2):\n try:\n x1 = next(s1)\n except StopIteration:\n yield from s2\n return\n\n try:\n x2 = next(s2)\n except StopIteration:\n yield from s1\n return\n\n while True:\n if x2 > x1:\n yield x1\n try:\n x1 = next(s1)\n except StopIteration:\n yield x2\n yield from s2\n return\n else:\n yield x2\n try:\n x2 = next(s2)\n except StopIteration:\n yield x1\n yield from s1\n return\n \n\ndef sliding_stream(delay_secs=20):\n ts = datetime.datetime.now()\n delay = datetime.timedelta(0,delay_secs)\n while True:\n yield(ts, random.choice(transform.all_transforms))\n ts = ts + delay\n\nclass Sliders(timelapse.TimeLapse):\n def __init__(self, server_list, nick=\"Sliders\", channel=\"#sliders\", realname=\"Sliders\",\n sliding_window = 60, **params):\n super().__init__(server_list, nick=nick, channel=channel, **params)\n self.lapsed = merge(self.lapsed, sliding_stream(sliding_window))\n self.sliders_transform = random.choice(transform.all_transforms)\n\n def on_lapsed_message(self, msg):\n\n if isinstance(msg, transform.Transform):\n self.sliders_transform = msg\n self.connection.privmsg(self.lapsed_channel,\n \"\\x01ACTION s'ouvre vers un monde parallèle peuplé de jumeaux \"\n + msg.name + \"\\x01\")\n else:\n super().on_lapsed_message(self.sliders_transform(msg))\n\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import pygame
class BackGround:
def __init__(self, x, y):
self.y = y
self.x = x
def set_image(self, src):
self.image = pygame.image.load(src)
self.rect = self.image.get_rect()
self.rect.y = self.y
self.rect.x = self.x
def draw(self, screen):
screen.blit(self.image, self.rect)
|
normal
|
{
"blob_id": "071e3cf6b4337e0079bbb2c7694fff2468142070",
"index": 6505,
"step-1": "<mask token>\n\n\nclass BackGround:\n\n def __init__(self, x, y):\n self.y = y\n self.x = x\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass BackGround:\n\n def __init__(self, x, y):\n self.y = y\n self.x = x\n <mask token>\n\n def draw(self, screen):\n screen.blit(self.image, self.rect)\n",
"step-3": "<mask token>\n\n\nclass BackGround:\n\n def __init__(self, x, y):\n self.y = y\n self.x = x\n\n def set_image(self, src):\n self.image = pygame.image.load(src)\n self.rect = self.image.get_rect()\n self.rect.y = self.y\n self.rect.x = self.x\n\n def draw(self, screen):\n screen.blit(self.image, self.rect)\n",
"step-4": "import pygame\n\n\nclass BackGround:\n\n def __init__(self, x, y):\n self.y = y\n self.x = x\n\n def set_image(self, src):\n self.image = pygame.image.load(src)\n self.rect = self.image.get_rect()\n self.rect.y = self.y\n self.rect.x = self.x\n\n def draw(self, screen):\n screen.blit(self.image, self.rect)\n",
"step-5": null,
"step-ids": [
2,
3,
4,
5
]
}
|
[
2,
3,
4,
5
] |
#!/C:\Program Files (x86)\Python35-32
#importar librarias necesarias
from urllib.request import urlopen
from bs4 import BeautifulSoup
|
normal
|
{
"blob_id": "7a59c8c883a9aaa723175783e01aa62e23503fde",
"index": 376,
"step-1": "<mask token>\n",
"step-2": "from urllib.request import urlopen\nfrom bs4 import BeautifulSoup\n",
"step-3": "#!/C:\\Program Files (x86)\\Python35-32\n\n#importar librarias necesarias\nfrom urllib.request import urlopen\nfrom bs4 import BeautifulSoup\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from abc import ABC, abstractmethod
class DatasetFileManager(ABC):
@abstractmethod
def read_dataset(self):
pass
|
normal
|
{
"blob_id": "5ef65ace397be17be62625ed27b5753d15565d61",
"index": 555,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass DatasetFileManager(ABC):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass DatasetFileManager(ABC):\n\n @abstractmethod\n def read_dataset(self):\n pass\n",
"step-4": "from abc import ABC, abstractmethod\n\n\nclass DatasetFileManager(ABC):\n\n @abstractmethod\n def read_dataset(self):\n pass\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class MyGame(arcade.Window):
def __init__(self, width, height, title):
super().__init__(width, height, title)
self.drawer = 0
self.wardrobe = 0
self.bookshelves = 0
self.door = 0
self.bed = 0
self.book_1 = 0
self.book_2 = 0
self.book_3 = 0
self.endscreen = 0
self.movement_tutorial = 0
self.code = 0
self.exit_key = 0
arcade.set_background_color(arcade.color.BROWN)
self.ball = Ball(400, 300, 0, 0, 15)
def on_draw(self):
arcade.start_render()
self.ball.draw()
arcade.draw_rectangle_filled(35, 560, 60, 80, arcade.color.AMAZON)
arcade.draw_rectangle_filled(7, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(17, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(27, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(37, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(47, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(57, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(67, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(57, 560, 20, 15, arcade.color.GRAY)
arcade.draw_circle_filled(62, 563, 2, arcade.color.BLACK)
arcade.draw_triangle_filled(62, 562, 60, 559, 64, 559, arcade.color
.BLACK)
arcade.draw_rectangle_filled(740, 80, 70, 120, arcade.color.GRAY)
arcade.draw_rectangle_filled(740, 120, 60, 30, arcade.color.WHITE)
arcade.draw_rectangle_filled(740, 60, 70, 80, arcade.color.WHITE)
arcade.draw_rectangle_filled(365, 550, 60, 90, arcade.color.GRAY)
arcade.draw_rectangle_filled(365, 570, 50, 30, arcade.color.BLACK)
arcade.draw_rectangle_filled(365, 530, 50, 30, arcade.color.BLACK)
arcade.draw_rectangle_filled(345, 567, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(353, 567, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(361, 567, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(369, 567, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(377, 567, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(385, 567, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(345, 527, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(353, 527, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(361, 527, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(369, 527, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(377, 527, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(385, 527, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(435, 550, 60, 90, arcade.color.GRAY)
arcade.draw_rectangle_filled(435, 570, 50, 30, arcade.color.BLACK)
arcade.draw_rectangle_filled(435, 530, 50, 30, arcade.color.BLACK)
arcade.draw_rectangle_filled(415, 567, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(423, 567, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(431, 567, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(439, 567, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(447, 567, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(455, 567, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(415, 527, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(423, 527, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(431, 527, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(439, 527, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(447, 527, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(455, 527, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(30, 30, 50, 50, arcade.color.GRAY)
arcade.draw_rectangle_filled(30, 30, 42, 42, arcade.color.WHITE)
arcade.draw_rectangle_filled(750, 540, 80, 100, arcade.color.GRAY)
arcade.draw_rectangle_filled(750, 540, 4, 100, arcade.color.BLACK)
arcade.draw_circle_filled(740, 540, 3, arcade.color.YELLOW)
arcade.draw_circle_filled(760, 540, 3, arcade.color.YELLOW)
if self.ball.position_x < 115 and self.ball.position_y > 470:
arcade.draw_text('Hold D to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Door', 235, 314, arcade.color.WHITE,
font_size=18)
if self.ball.position_x > 635 and self.ball.position_y < 210:
arcade.draw_text('Hold E to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Bed', 235, 314, arcade.color.WHITE,
font_size=18)
if (self.ball.position_x > 255 and self.ball.position_x < 535 and
self.ball.position_y > 435):
arcade.draw_text('Hold O to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Bookshelves', 235, 314, arcade.color.
WHITE, font_size=18)
if self.ball.position_x < 105 and self.ball.position_y < 105:
arcade.draw_text('Hold R to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Drawer', 235, 314, arcade.color.WHITE,
font_size=18)
if self.ball.position_x > 660 and self.ball.position_y > 440:
arcade.draw_text('Hold W to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Wardrobe', 235, 314, arcade.color.WHITE,
font_size=18)
if self.movement_tutorial == 0:
arcade.draw_text('Use arrow keys to move', 235, 368, arcade.
color.WHITE, font_size=18)
if self.drawer == 1:
if self.code == 1:
arcade.draw_text('Congratulations!', 435, 338, arcade.color
.WHITE, font_size=18)
arcade.draw_text('You got a key', 435, 314, arcade.color.
WHITE, font_size=18)
self.exit_key = 1
else:
arcade.draw_text('It seems I need', 435, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('a code to open this', 435, 314, arcade.
color.WHITE, font_size=18)
if self.bed == 1:
arcade.draw_text("It's just a bed", 435, 338, arcade.color.
WHITE, font_size=18)
if self.wardrobe == 1:
arcade.draw_text('There are many outfits here', 435, 338,
arcade.color.WHITE, font_size=18)
if self.bookshelves == 1:
arcade.draw_text('There are many books in here', 435, 338,
arcade.color.WHITE, font_size=18)
arcade.draw_text('which one should I read? A, B, C', 435, 314,
arcade.color.WHITE, font_size=18)
if self.book_1 == 1:
arcade.draw_text('There is a key in the', 435, 338, arcade.
color.WHITE, font_size=18)
arcade.draw_text('drawer... huh', 435, 314, arcade.color.WHITE,
font_size=18)
if self.book_2 == 1:
arcade.draw_text('Congratulations!', 435, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('You got a code', 435, 314, arcade.color.WHITE,
font_size=18)
self.code = 1
if self.book_3 == 1:
arcade.draw_text("It's the Bible", 435, 338, arcade.color.WHITE,
font_size=18)
if self.door == 1:
if self.exit_key == 1:
self.endscreen = 1
else:
arcade.draw_text('It seems that I need', 435, 338, arcade.
color.WHITE, font_size=18)
arcade.draw_text('a key to open this', 435, 314, arcade.
color.WHITE, font_size=18)
if self.endscreen == 1:
arcade.draw_rectangle_filled(400, 300, 800, 600, arcade.color.BLACK
)
arcade.draw_text('Congratulations! you beat the game', 235, 468,
arcade.color.WHITE, font_size=18)
arcade.draw_rectangle_filled(290, 190, 20, 180, arcade.color.
WHITE_SMOKE)
arcade.draw_rectangle_filled(270, 190, 20, 180, arcade.color.GRAY)
arcade.draw_triangle_filled(260, 100, 280, 100, 280, 70, arcade
.color.GRAY)
arcade.draw_triangle_filled(300, 100, 280, 100, 280, 70, arcade
.color.WHITE)
arcade.draw_rectangle_filled(280, 184, 4, 196, arcade.color.BLACK)
arcade.draw_rectangle_filled(280, 300, 40, 40, arcade.color.PURPLE)
arcade.draw_triangle_filled(280, 265, 270, 280, 290, 280,
arcade.color.GOLD)
arcade.draw_rectangle_filled(240, 290, 50, 20, arcade.color.
PURPLE, 30)
arcade.draw_rectangle_filled(320, 290, 50, 20, arcade.color.
PURPLE, 330)
arcade.draw_rectangle_filled(220, 283, 50, 2, arcade.color.
BLACK, 30)
arcade.draw_rectangle_filled(220, 275, 59, 2, arcade.color.
BLACK, 30)
arcade.draw_rectangle_filled(340, 283, 50, 2, arcade.color.
BLACK, 330)
arcade.draw_rectangle_filled(340, 275, 59, 2, arcade.color.
BLACK, 330)
arcade.draw_rectangle_filled(280, 340, 15, 50, arcade.color.PURPLE)
arcade.draw_triangle_filled(260, 320, 280, 320, 280, 340,
arcade.color.PURPLE)
arcade.draw_triangle_filled(265, 320, 280, 320, 280, 365,
arcade.color.PURPLE)
arcade.draw_triangle_filled(300, 320, 280, 320, 280, 340,
arcade.color.PURPLE)
arcade.draw_triangle_filled(295, 320, 280, 320, 280, 365,
arcade.color.PURPLE)
arcade.draw_circle_filled(280, 375, 15, arcade.color.LIGHT_BROWN)
def on_update(self, delta_time):
self.ball.update()
def on_key_press(self, key, modifiers):
if key == arcade.key.LEFT:
self.ball.change_x = -MOVEMENT_SPEED
self.movement_tutorial = 1
elif key == arcade.key.RIGHT:
self.ball.change_x = MOVEMENT_SPEED
self.movement_tutorial = 1
elif key == arcade.key.UP:
self.ball.change_y = MOVEMENT_SPEED
self.movement_tutorial = 1
elif key == arcade.key.DOWN:
self.ball.change_y = -MOVEMENT_SPEED
self.movement_tutorial = 1
if key == arcade.key.R:
self.drawer = 1
if key == arcade.key.W:
self.wardrobe = 1
if key == arcade.key.D:
self.door = 1
if key == arcade.key.O:
self.bookshelves = 1
if key == arcade.key.E:
self.bed = 1
if key == arcade.key.A:
self.book_1 = 1
if key == arcade.key.B:
self.book_2 = 1
if key == arcade.key.C:
self.book_3 = 1
def on_key_release(self, key, modifiers):
if key == arcade.key.LEFT or key == arcade.key.RIGHT:
self.ball.change_x = 0
elif key == arcade.key.UP or key == arcade.key.DOWN:
self.ball.change_y = 0
if key == arcade.key.R:
self.drawer = 0
if key == arcade.key.W:
self.wardrobe = 0
if key == arcade.key.D:
self.door = 0
if key == arcade.key.O:
self.bookshelves = 0
if key == arcade.key.E:
self.bed = 0
if key == arcade.key.A:
self.book_1 = 0
if key == arcade.key.B:
self.book_2 = 0
if key == arcade.key.C:
self.book_3 = 0
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Ball:
<|reserved_special_token_0|>
def draw(self):
""" Draw the balls with the instance variables we have. """
arcade.draw_circle_filled(self.position_x, self.position_y, self.
radius, self.player_color)
<|reserved_special_token_0|>
class MyGame(arcade.Window):
def __init__(self, width, height, title):
super().__init__(width, height, title)
self.drawer = 0
self.wardrobe = 0
self.bookshelves = 0
self.door = 0
self.bed = 0
self.book_1 = 0
self.book_2 = 0
self.book_3 = 0
self.endscreen = 0
self.movement_tutorial = 0
self.code = 0
self.exit_key = 0
arcade.set_background_color(arcade.color.BROWN)
self.ball = Ball(400, 300, 0, 0, 15)
def on_draw(self):
arcade.start_render()
self.ball.draw()
arcade.draw_rectangle_filled(35, 560, 60, 80, arcade.color.AMAZON)
arcade.draw_rectangle_filled(7, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(17, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(27, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(37, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(47, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(57, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(67, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(57, 560, 20, 15, arcade.color.GRAY)
arcade.draw_circle_filled(62, 563, 2, arcade.color.BLACK)
arcade.draw_triangle_filled(62, 562, 60, 559, 64, 559, arcade.color
.BLACK)
arcade.draw_rectangle_filled(740, 80, 70, 120, arcade.color.GRAY)
arcade.draw_rectangle_filled(740, 120, 60, 30, arcade.color.WHITE)
arcade.draw_rectangle_filled(740, 60, 70, 80, arcade.color.WHITE)
arcade.draw_rectangle_filled(365, 550, 60, 90, arcade.color.GRAY)
arcade.draw_rectangle_filled(365, 570, 50, 30, arcade.color.BLACK)
arcade.draw_rectangle_filled(365, 530, 50, 30, arcade.color.BLACK)
arcade.draw_rectangle_filled(345, 567, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(353, 567, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(361, 567, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(369, 567, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(377, 567, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(385, 567, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(345, 527, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(353, 527, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(361, 527, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(369, 527, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(377, 527, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(385, 527, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(435, 550, 60, 90, arcade.color.GRAY)
arcade.draw_rectangle_filled(435, 570, 50, 30, arcade.color.BLACK)
arcade.draw_rectangle_filled(435, 530, 50, 30, arcade.color.BLACK)
arcade.draw_rectangle_filled(415, 567, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(423, 567, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(431, 567, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(439, 567, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(447, 567, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(455, 567, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(415, 527, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(423, 527, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(431, 527, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(439, 527, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(447, 527, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(455, 527, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(30, 30, 50, 50, arcade.color.GRAY)
arcade.draw_rectangle_filled(30, 30, 42, 42, arcade.color.WHITE)
arcade.draw_rectangle_filled(750, 540, 80, 100, arcade.color.GRAY)
arcade.draw_rectangle_filled(750, 540, 4, 100, arcade.color.BLACK)
arcade.draw_circle_filled(740, 540, 3, arcade.color.YELLOW)
arcade.draw_circle_filled(760, 540, 3, arcade.color.YELLOW)
if self.ball.position_x < 115 and self.ball.position_y > 470:
arcade.draw_text('Hold D to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Door', 235, 314, arcade.color.WHITE,
font_size=18)
if self.ball.position_x > 635 and self.ball.position_y < 210:
arcade.draw_text('Hold E to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Bed', 235, 314, arcade.color.WHITE,
font_size=18)
if (self.ball.position_x > 255 and self.ball.position_x < 535 and
self.ball.position_y > 435):
arcade.draw_text('Hold O to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Bookshelves', 235, 314, arcade.color.
WHITE, font_size=18)
if self.ball.position_x < 105 and self.ball.position_y < 105:
arcade.draw_text('Hold R to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Drawer', 235, 314, arcade.color.WHITE,
font_size=18)
if self.ball.position_x > 660 and self.ball.position_y > 440:
arcade.draw_text('Hold W to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Wardrobe', 235, 314, arcade.color.WHITE,
font_size=18)
if self.movement_tutorial == 0:
arcade.draw_text('Use arrow keys to move', 235, 368, arcade.
color.WHITE, font_size=18)
if self.drawer == 1:
if self.code == 1:
arcade.draw_text('Congratulations!', 435, 338, arcade.color
.WHITE, font_size=18)
arcade.draw_text('You got a key', 435, 314, arcade.color.
WHITE, font_size=18)
self.exit_key = 1
else:
arcade.draw_text('It seems I need', 435, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('a code to open this', 435, 314, arcade.
color.WHITE, font_size=18)
if self.bed == 1:
arcade.draw_text("It's just a bed", 435, 338, arcade.color.
WHITE, font_size=18)
if self.wardrobe == 1:
arcade.draw_text('There are many outfits here', 435, 338,
arcade.color.WHITE, font_size=18)
if self.bookshelves == 1:
arcade.draw_text('There are many books in here', 435, 338,
arcade.color.WHITE, font_size=18)
arcade.draw_text('which one should I read? A, B, C', 435, 314,
arcade.color.WHITE, font_size=18)
if self.book_1 == 1:
arcade.draw_text('There is a key in the', 435, 338, arcade.
color.WHITE, font_size=18)
arcade.draw_text('drawer... huh', 435, 314, arcade.color.WHITE,
font_size=18)
if self.book_2 == 1:
arcade.draw_text('Congratulations!', 435, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('You got a code', 435, 314, arcade.color.WHITE,
font_size=18)
self.code = 1
if self.book_3 == 1:
arcade.draw_text("It's the Bible", 435, 338, arcade.color.WHITE,
font_size=18)
if self.door == 1:
if self.exit_key == 1:
self.endscreen = 1
else:
arcade.draw_text('It seems that I need', 435, 338, arcade.
color.WHITE, font_size=18)
arcade.draw_text('a key to open this', 435, 314, arcade.
color.WHITE, font_size=18)
if self.endscreen == 1:
arcade.draw_rectangle_filled(400, 300, 800, 600, arcade.color.BLACK
)
arcade.draw_text('Congratulations! you beat the game', 235, 468,
arcade.color.WHITE, font_size=18)
arcade.draw_rectangle_filled(290, 190, 20, 180, arcade.color.
WHITE_SMOKE)
arcade.draw_rectangle_filled(270, 190, 20, 180, arcade.color.GRAY)
arcade.draw_triangle_filled(260, 100, 280, 100, 280, 70, arcade
.color.GRAY)
arcade.draw_triangle_filled(300, 100, 280, 100, 280, 70, arcade
.color.WHITE)
arcade.draw_rectangle_filled(280, 184, 4, 196, arcade.color.BLACK)
arcade.draw_rectangle_filled(280, 300, 40, 40, arcade.color.PURPLE)
arcade.draw_triangle_filled(280, 265, 270, 280, 290, 280,
arcade.color.GOLD)
arcade.draw_rectangle_filled(240, 290, 50, 20, arcade.color.
PURPLE, 30)
arcade.draw_rectangle_filled(320, 290, 50, 20, arcade.color.
PURPLE, 330)
arcade.draw_rectangle_filled(220, 283, 50, 2, arcade.color.
BLACK, 30)
arcade.draw_rectangle_filled(220, 275, 59, 2, arcade.color.
BLACK, 30)
arcade.draw_rectangle_filled(340, 283, 50, 2, arcade.color.
BLACK, 330)
arcade.draw_rectangle_filled(340, 275, 59, 2, arcade.color.
BLACK, 330)
arcade.draw_rectangle_filled(280, 340, 15, 50, arcade.color.PURPLE)
arcade.draw_triangle_filled(260, 320, 280, 320, 280, 340,
arcade.color.PURPLE)
arcade.draw_triangle_filled(265, 320, 280, 320, 280, 365,
arcade.color.PURPLE)
arcade.draw_triangle_filled(300, 320, 280, 320, 280, 340,
arcade.color.PURPLE)
arcade.draw_triangle_filled(295, 320, 280, 320, 280, 365,
arcade.color.PURPLE)
arcade.draw_circle_filled(280, 375, 15, arcade.color.LIGHT_BROWN)
def on_update(self, delta_time):
self.ball.update()
def on_key_press(self, key, modifiers):
if key == arcade.key.LEFT:
self.ball.change_x = -MOVEMENT_SPEED
self.movement_tutorial = 1
elif key == arcade.key.RIGHT:
self.ball.change_x = MOVEMENT_SPEED
self.movement_tutorial = 1
elif key == arcade.key.UP:
self.ball.change_y = MOVEMENT_SPEED
self.movement_tutorial = 1
elif key == arcade.key.DOWN:
self.ball.change_y = -MOVEMENT_SPEED
self.movement_tutorial = 1
if key == arcade.key.R:
self.drawer = 1
if key == arcade.key.W:
self.wardrobe = 1
if key == arcade.key.D:
self.door = 1
if key == arcade.key.O:
self.bookshelves = 1
if key == arcade.key.E:
self.bed = 1
if key == arcade.key.A:
self.book_1 = 1
if key == arcade.key.B:
self.book_2 = 1
if key == arcade.key.C:
self.book_3 = 1
def on_key_release(self, key, modifiers):
if key == arcade.key.LEFT or key == arcade.key.RIGHT:
self.ball.change_x = 0
elif key == arcade.key.UP or key == arcade.key.DOWN:
self.ball.change_y = 0
if key == arcade.key.R:
self.drawer = 0
if key == arcade.key.W:
self.wardrobe = 0
if key == arcade.key.D:
self.door = 0
if key == arcade.key.O:
self.bookshelves = 0
if key == arcade.key.E:
self.bed = 0
if key == arcade.key.A:
self.book_1 = 0
if key == arcade.key.B:
self.book_2 = 0
if key == arcade.key.C:
self.book_3 = 0
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Ball:
def __init__(self, position_x, position_y, change_x, change_y, radius):
self.position_x = position_x
self.position_y = position_y
self.change_x = change_x
self.change_y = change_y
self.radius = radius
self.player_color = arcade.color.AMETHYST
def draw(self):
""" Draw the balls with the instance variables we have. """
arcade.draw_circle_filled(self.position_x, self.position_y, self.
radius, self.player_color)
def update(self):
self.position_y += self.change_y
self.position_x += self.change_x
if self.position_x < self.radius:
self.position_x = self.radius
if self.position_x > SCREEN_WIDTH - self.radius:
self.position_x = SCREEN_WIDTH - self.radius
if self.position_y < self.radius:
self.position_y = self.radius
if self.position_y > SCREEN_HEIGHT - self.radius:
self.position_y = SCREEN_HEIGHT - self.radius
class MyGame(arcade.Window):
def __init__(self, width, height, title):
super().__init__(width, height, title)
self.drawer = 0
self.wardrobe = 0
self.bookshelves = 0
self.door = 0
self.bed = 0
self.book_1 = 0
self.book_2 = 0
self.book_3 = 0
self.endscreen = 0
self.movement_tutorial = 0
self.code = 0
self.exit_key = 0
arcade.set_background_color(arcade.color.BROWN)
self.ball = Ball(400, 300, 0, 0, 15)
def on_draw(self):
arcade.start_render()
self.ball.draw()
arcade.draw_rectangle_filled(35, 560, 60, 80, arcade.color.AMAZON)
arcade.draw_rectangle_filled(7, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(17, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(27, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(37, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(47, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(57, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(67, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(57, 560, 20, 15, arcade.color.GRAY)
arcade.draw_circle_filled(62, 563, 2, arcade.color.BLACK)
arcade.draw_triangle_filled(62, 562, 60, 559, 64, 559, arcade.color
.BLACK)
arcade.draw_rectangle_filled(740, 80, 70, 120, arcade.color.GRAY)
arcade.draw_rectangle_filled(740, 120, 60, 30, arcade.color.WHITE)
arcade.draw_rectangle_filled(740, 60, 70, 80, arcade.color.WHITE)
arcade.draw_rectangle_filled(365, 550, 60, 90, arcade.color.GRAY)
arcade.draw_rectangle_filled(365, 570, 50, 30, arcade.color.BLACK)
arcade.draw_rectangle_filled(365, 530, 50, 30, arcade.color.BLACK)
arcade.draw_rectangle_filled(345, 567, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(353, 567, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(361, 567, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(369, 567, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(377, 567, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(385, 567, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(345, 527, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(353, 527, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(361, 527, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(369, 527, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(377, 527, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(385, 527, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(435, 550, 60, 90, arcade.color.GRAY)
arcade.draw_rectangle_filled(435, 570, 50, 30, arcade.color.BLACK)
arcade.draw_rectangle_filled(435, 530, 50, 30, arcade.color.BLACK)
arcade.draw_rectangle_filled(415, 567, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(423, 567, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(431, 567, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(439, 567, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(447, 567, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(455, 567, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(415, 527, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(423, 527, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(431, 527, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(439, 527, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(447, 527, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(455, 527, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(30, 30, 50, 50, arcade.color.GRAY)
arcade.draw_rectangle_filled(30, 30, 42, 42, arcade.color.WHITE)
arcade.draw_rectangle_filled(750, 540, 80, 100, arcade.color.GRAY)
arcade.draw_rectangle_filled(750, 540, 4, 100, arcade.color.BLACK)
arcade.draw_circle_filled(740, 540, 3, arcade.color.YELLOW)
arcade.draw_circle_filled(760, 540, 3, arcade.color.YELLOW)
if self.ball.position_x < 115 and self.ball.position_y > 470:
arcade.draw_text('Hold D to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Door', 235, 314, arcade.color.WHITE,
font_size=18)
if self.ball.position_x > 635 and self.ball.position_y < 210:
arcade.draw_text('Hold E to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Bed', 235, 314, arcade.color.WHITE,
font_size=18)
if (self.ball.position_x > 255 and self.ball.position_x < 535 and
self.ball.position_y > 435):
arcade.draw_text('Hold O to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Bookshelves', 235, 314, arcade.color.
WHITE, font_size=18)
if self.ball.position_x < 105 and self.ball.position_y < 105:
arcade.draw_text('Hold R to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Drawer', 235, 314, arcade.color.WHITE,
font_size=18)
if self.ball.position_x > 660 and self.ball.position_y > 440:
arcade.draw_text('Hold W to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Wardrobe', 235, 314, arcade.color.WHITE,
font_size=18)
if self.movement_tutorial == 0:
arcade.draw_text('Use arrow keys to move', 235, 368, arcade.
color.WHITE, font_size=18)
if self.drawer == 1:
if self.code == 1:
arcade.draw_text('Congratulations!', 435, 338, arcade.color
.WHITE, font_size=18)
arcade.draw_text('You got a key', 435, 314, arcade.color.
WHITE, font_size=18)
self.exit_key = 1
else:
arcade.draw_text('It seems I need', 435, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('a code to open this', 435, 314, arcade.
color.WHITE, font_size=18)
if self.bed == 1:
arcade.draw_text("It's just a bed", 435, 338, arcade.color.
WHITE, font_size=18)
if self.wardrobe == 1:
arcade.draw_text('There are many outfits here', 435, 338,
arcade.color.WHITE, font_size=18)
if self.bookshelves == 1:
arcade.draw_text('There are many books in here', 435, 338,
arcade.color.WHITE, font_size=18)
arcade.draw_text('which one should I read? A, B, C', 435, 314,
arcade.color.WHITE, font_size=18)
if self.book_1 == 1:
arcade.draw_text('There is a key in the', 435, 338, arcade.
color.WHITE, font_size=18)
arcade.draw_text('drawer... huh', 435, 314, arcade.color.WHITE,
font_size=18)
if self.book_2 == 1:
arcade.draw_text('Congratulations!', 435, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('You got a code', 435, 314, arcade.color.WHITE,
font_size=18)
self.code = 1
if self.book_3 == 1:
arcade.draw_text("It's the Bible", 435, 338, arcade.color.WHITE,
font_size=18)
if self.door == 1:
if self.exit_key == 1:
self.endscreen = 1
else:
arcade.draw_text('It seems that I need', 435, 338, arcade.
color.WHITE, font_size=18)
arcade.draw_text('a key to open this', 435, 314, arcade.
color.WHITE, font_size=18)
if self.endscreen == 1:
arcade.draw_rectangle_filled(400, 300, 800, 600, arcade.color.BLACK
)
arcade.draw_text('Congratulations! you beat the game', 235, 468,
arcade.color.WHITE, font_size=18)
arcade.draw_rectangle_filled(290, 190, 20, 180, arcade.color.
WHITE_SMOKE)
arcade.draw_rectangle_filled(270, 190, 20, 180, arcade.color.GRAY)
arcade.draw_triangle_filled(260, 100, 280, 100, 280, 70, arcade
.color.GRAY)
arcade.draw_triangle_filled(300, 100, 280, 100, 280, 70, arcade
.color.WHITE)
arcade.draw_rectangle_filled(280, 184, 4, 196, arcade.color.BLACK)
arcade.draw_rectangle_filled(280, 300, 40, 40, arcade.color.PURPLE)
arcade.draw_triangle_filled(280, 265, 270, 280, 290, 280,
arcade.color.GOLD)
arcade.draw_rectangle_filled(240, 290, 50, 20, arcade.color.
PURPLE, 30)
arcade.draw_rectangle_filled(320, 290, 50, 20, arcade.color.
PURPLE, 330)
arcade.draw_rectangle_filled(220, 283, 50, 2, arcade.color.
BLACK, 30)
arcade.draw_rectangle_filled(220, 275, 59, 2, arcade.color.
BLACK, 30)
arcade.draw_rectangle_filled(340, 283, 50, 2, arcade.color.
BLACK, 330)
arcade.draw_rectangle_filled(340, 275, 59, 2, arcade.color.
BLACK, 330)
arcade.draw_rectangle_filled(280, 340, 15, 50, arcade.color.PURPLE)
arcade.draw_triangle_filled(260, 320, 280, 320, 280, 340,
arcade.color.PURPLE)
arcade.draw_triangle_filled(265, 320, 280, 320, 280, 365,
arcade.color.PURPLE)
arcade.draw_triangle_filled(300, 320, 280, 320, 280, 340,
arcade.color.PURPLE)
arcade.draw_triangle_filled(295, 320, 280, 320, 280, 365,
arcade.color.PURPLE)
arcade.draw_circle_filled(280, 375, 15, arcade.color.LIGHT_BROWN)
def on_update(self, delta_time):
self.ball.update()
def on_key_press(self, key, modifiers):
if key == arcade.key.LEFT:
self.ball.change_x = -MOVEMENT_SPEED
self.movement_tutorial = 1
elif key == arcade.key.RIGHT:
self.ball.change_x = MOVEMENT_SPEED
self.movement_tutorial = 1
elif key == arcade.key.UP:
self.ball.change_y = MOVEMENT_SPEED
self.movement_tutorial = 1
elif key == arcade.key.DOWN:
self.ball.change_y = -MOVEMENT_SPEED
self.movement_tutorial = 1
if key == arcade.key.R:
self.drawer = 1
if key == arcade.key.W:
self.wardrobe = 1
if key == arcade.key.D:
self.door = 1
if key == arcade.key.O:
self.bookshelves = 1
if key == arcade.key.E:
self.bed = 1
if key == arcade.key.A:
self.book_1 = 1
if key == arcade.key.B:
self.book_2 = 1
if key == arcade.key.C:
self.book_3 = 1
def on_key_release(self, key, modifiers):
if key == arcade.key.LEFT or key == arcade.key.RIGHT:
self.ball.change_x = 0
elif key == arcade.key.UP or key == arcade.key.DOWN:
self.ball.change_y = 0
if key == arcade.key.R:
self.drawer = 0
if key == arcade.key.W:
self.wardrobe = 0
if key == arcade.key.D:
self.door = 0
if key == arcade.key.O:
self.bookshelves = 0
if key == arcade.key.E:
self.bed = 0
if key == arcade.key.A:
self.book_1 = 0
if key == arcade.key.B:
self.book_2 = 0
if key == arcade.key.C:
self.book_3 = 0
def main():
""" Main method """
game = MyGame(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
arcade.run()
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
SPRITE_SCALING = 0.5
SCREEN_WIDTH = 800
SCREEN_HEIGHT = 600
SCREEN_TITLE = 'Raymond Game'
MOVEMENT_SPEED = 50
class Ball:
def __init__(self, position_x, position_y, change_x, change_y, radius):
self.position_x = position_x
self.position_y = position_y
self.change_x = change_x
self.change_y = change_y
self.radius = radius
self.player_color = arcade.color.AMETHYST
def draw(self):
""" Draw the balls with the instance variables we have. """
arcade.draw_circle_filled(self.position_x, self.position_y, self.
radius, self.player_color)
def update(self):
self.position_y += self.change_y
self.position_x += self.change_x
if self.position_x < self.radius:
self.position_x = self.radius
if self.position_x > SCREEN_WIDTH - self.radius:
self.position_x = SCREEN_WIDTH - self.radius
if self.position_y < self.radius:
self.position_y = self.radius
if self.position_y > SCREEN_HEIGHT - self.radius:
self.position_y = SCREEN_HEIGHT - self.radius
class MyGame(arcade.Window):
def __init__(self, width, height, title):
super().__init__(width, height, title)
self.drawer = 0
self.wardrobe = 0
self.bookshelves = 0
self.door = 0
self.bed = 0
self.book_1 = 0
self.book_2 = 0
self.book_3 = 0
self.endscreen = 0
self.movement_tutorial = 0
self.code = 0
self.exit_key = 0
arcade.set_background_color(arcade.color.BROWN)
self.ball = Ball(400, 300, 0, 0, 15)
def on_draw(self):
arcade.start_render()
self.ball.draw()
arcade.draw_rectangle_filled(35, 560, 60, 80, arcade.color.AMAZON)
arcade.draw_rectangle_filled(7, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(17, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(27, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(37, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(47, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(57, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(67, 560, 4, 80, arcade.color.GRAY)
arcade.draw_rectangle_filled(57, 560, 20, 15, arcade.color.GRAY)
arcade.draw_circle_filled(62, 563, 2, arcade.color.BLACK)
arcade.draw_triangle_filled(62, 562, 60, 559, 64, 559, arcade.color
.BLACK)
arcade.draw_rectangle_filled(740, 80, 70, 120, arcade.color.GRAY)
arcade.draw_rectangle_filled(740, 120, 60, 30, arcade.color.WHITE)
arcade.draw_rectangle_filled(740, 60, 70, 80, arcade.color.WHITE)
arcade.draw_rectangle_filled(365, 550, 60, 90, arcade.color.GRAY)
arcade.draw_rectangle_filled(365, 570, 50, 30, arcade.color.BLACK)
arcade.draw_rectangle_filled(365, 530, 50, 30, arcade.color.BLACK)
arcade.draw_rectangle_filled(345, 567, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(353, 567, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(361, 567, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(369, 567, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(377, 567, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(385, 567, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(345, 527, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(353, 527, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(361, 527, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(369, 527, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(377, 527, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(385, 527, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(435, 550, 60, 90, arcade.color.GRAY)
arcade.draw_rectangle_filled(435, 570, 50, 30, arcade.color.BLACK)
arcade.draw_rectangle_filled(435, 530, 50, 30, arcade.color.BLACK)
arcade.draw_rectangle_filled(415, 567, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(423, 567, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(431, 567, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(439, 567, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(447, 567, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(455, 567, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(415, 527, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(423, 527, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(431, 527, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(439, 527, 6, 24, arcade.color.RED)
arcade.draw_rectangle_filled(447, 527, 6, 24, arcade.color.ORANGE)
arcade.draw_rectangle_filled(455, 527, 6, 24, arcade.color.BLUE)
arcade.draw_rectangle_filled(30, 30, 50, 50, arcade.color.GRAY)
arcade.draw_rectangle_filled(30, 30, 42, 42, arcade.color.WHITE)
arcade.draw_rectangle_filled(750, 540, 80, 100, arcade.color.GRAY)
arcade.draw_rectangle_filled(750, 540, 4, 100, arcade.color.BLACK)
arcade.draw_circle_filled(740, 540, 3, arcade.color.YELLOW)
arcade.draw_circle_filled(760, 540, 3, arcade.color.YELLOW)
if self.ball.position_x < 115 and self.ball.position_y > 470:
arcade.draw_text('Hold D to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Door', 235, 314, arcade.color.WHITE,
font_size=18)
if self.ball.position_x > 635 and self.ball.position_y < 210:
arcade.draw_text('Hold E to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Bed', 235, 314, arcade.color.WHITE,
font_size=18)
if (self.ball.position_x > 255 and self.ball.position_x < 535 and
self.ball.position_y > 435):
arcade.draw_text('Hold O to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Bookshelves', 235, 314, arcade.color.
WHITE, font_size=18)
if self.ball.position_x < 105 and self.ball.position_y < 105:
arcade.draw_text('Hold R to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Drawer', 235, 314, arcade.color.WHITE,
font_size=18)
if self.ball.position_x > 660 and self.ball.position_y > 440:
arcade.draw_text('Hold W to interact', 235, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('with Wardrobe', 235, 314, arcade.color.WHITE,
font_size=18)
if self.movement_tutorial == 0:
arcade.draw_text('Use arrow keys to move', 235, 368, arcade.
color.WHITE, font_size=18)
if self.drawer == 1:
if self.code == 1:
arcade.draw_text('Congratulations!', 435, 338, arcade.color
.WHITE, font_size=18)
arcade.draw_text('You got a key', 435, 314, arcade.color.
WHITE, font_size=18)
self.exit_key = 1
else:
arcade.draw_text('It seems I need', 435, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('a code to open this', 435, 314, arcade.
color.WHITE, font_size=18)
if self.bed == 1:
arcade.draw_text("It's just a bed", 435, 338, arcade.color.
WHITE, font_size=18)
if self.wardrobe == 1:
arcade.draw_text('There are many outfits here', 435, 338,
arcade.color.WHITE, font_size=18)
if self.bookshelves == 1:
arcade.draw_text('There are many books in here', 435, 338,
arcade.color.WHITE, font_size=18)
arcade.draw_text('which one should I read? A, B, C', 435, 314,
arcade.color.WHITE, font_size=18)
if self.book_1 == 1:
arcade.draw_text('There is a key in the', 435, 338, arcade.
color.WHITE, font_size=18)
arcade.draw_text('drawer... huh', 435, 314, arcade.color.WHITE,
font_size=18)
if self.book_2 == 1:
arcade.draw_text('Congratulations!', 435, 338, arcade.color.
WHITE, font_size=18)
arcade.draw_text('You got a code', 435, 314, arcade.color.WHITE,
font_size=18)
self.code = 1
if self.book_3 == 1:
arcade.draw_text("It's the Bible", 435, 338, arcade.color.WHITE,
font_size=18)
if self.door == 1:
if self.exit_key == 1:
self.endscreen = 1
else:
arcade.draw_text('It seems that I need', 435, 338, arcade.
color.WHITE, font_size=18)
arcade.draw_text('a key to open this', 435, 314, arcade.
color.WHITE, font_size=18)
if self.endscreen == 1:
arcade.draw_rectangle_filled(400, 300, 800, 600, arcade.color.BLACK
)
arcade.draw_text('Congratulations! you beat the game', 235, 468,
arcade.color.WHITE, font_size=18)
arcade.draw_rectangle_filled(290, 190, 20, 180, arcade.color.
WHITE_SMOKE)
arcade.draw_rectangle_filled(270, 190, 20, 180, arcade.color.GRAY)
arcade.draw_triangle_filled(260, 100, 280, 100, 280, 70, arcade
.color.GRAY)
arcade.draw_triangle_filled(300, 100, 280, 100, 280, 70, arcade
.color.WHITE)
arcade.draw_rectangle_filled(280, 184, 4, 196, arcade.color.BLACK)
arcade.draw_rectangle_filled(280, 300, 40, 40, arcade.color.PURPLE)
arcade.draw_triangle_filled(280, 265, 270, 280, 290, 280,
arcade.color.GOLD)
arcade.draw_rectangle_filled(240, 290, 50, 20, arcade.color.
PURPLE, 30)
arcade.draw_rectangle_filled(320, 290, 50, 20, arcade.color.
PURPLE, 330)
arcade.draw_rectangle_filled(220, 283, 50, 2, arcade.color.
BLACK, 30)
arcade.draw_rectangle_filled(220, 275, 59, 2, arcade.color.
BLACK, 30)
arcade.draw_rectangle_filled(340, 283, 50, 2, arcade.color.
BLACK, 330)
arcade.draw_rectangle_filled(340, 275, 59, 2, arcade.color.
BLACK, 330)
arcade.draw_rectangle_filled(280, 340, 15, 50, arcade.color.PURPLE)
arcade.draw_triangle_filled(260, 320, 280, 320, 280, 340,
arcade.color.PURPLE)
arcade.draw_triangle_filled(265, 320, 280, 320, 280, 365,
arcade.color.PURPLE)
arcade.draw_triangle_filled(300, 320, 280, 320, 280, 340,
arcade.color.PURPLE)
arcade.draw_triangle_filled(295, 320, 280, 320, 280, 365,
arcade.color.PURPLE)
arcade.draw_circle_filled(280, 375, 15, arcade.color.LIGHT_BROWN)
def on_update(self, delta_time):
self.ball.update()
def on_key_press(self, key, modifiers):
if key == arcade.key.LEFT:
self.ball.change_x = -MOVEMENT_SPEED
self.movement_tutorial = 1
elif key == arcade.key.RIGHT:
self.ball.change_x = MOVEMENT_SPEED
self.movement_tutorial = 1
elif key == arcade.key.UP:
self.ball.change_y = MOVEMENT_SPEED
self.movement_tutorial = 1
elif key == arcade.key.DOWN:
self.ball.change_y = -MOVEMENT_SPEED
self.movement_tutorial = 1
if key == arcade.key.R:
self.drawer = 1
if key == arcade.key.W:
self.wardrobe = 1
if key == arcade.key.D:
self.door = 1
if key == arcade.key.O:
self.bookshelves = 1
if key == arcade.key.E:
self.bed = 1
if key == arcade.key.A:
self.book_1 = 1
if key == arcade.key.B:
self.book_2 = 1
if key == arcade.key.C:
self.book_3 = 1
def on_key_release(self, key, modifiers):
if key == arcade.key.LEFT or key == arcade.key.RIGHT:
self.ball.change_x = 0
elif key == arcade.key.UP or key == arcade.key.DOWN:
self.ball.change_y = 0
if key == arcade.key.R:
self.drawer = 0
if key == arcade.key.W:
self.wardrobe = 0
if key == arcade.key.D:
self.door = 0
if key == arcade.key.O:
self.bookshelves = 0
if key == arcade.key.E:
self.bed = 0
if key == arcade.key.A:
self.book_1 = 0
if key == arcade.key.B:
self.book_2 = 0
if key == arcade.key.C:
self.book_3 = 0
def main():
""" Main method """
game = MyGame(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
arcade.run()
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import arcade
import os
SPRITE_SCALING = 0.5
SCREEN_WIDTH = 800
SCREEN_HEIGHT = 600
SCREEN_TITLE = "Raymond Game"
MOVEMENT_SPEED = 50
class Ball:
def __init__(self, position_x, position_y, change_x, change_y, radius):
# Take the parameters of the init function above, and create instance variables out of them.
self.position_x = position_x
self.position_y = position_y
self.change_x = change_x
self.change_y = change_y
self.radius = radius
self.player_color = arcade.color.AMETHYST
def draw(self):
""" Draw the balls with the instance variables we have. """
arcade.draw_circle_filled(self.position_x, self.position_y, self.radius,self.player_color)
def update(self):
# Move the ball
self.position_y += self.change_y
self.position_x += self.change_x
# See if the ball hit the edge of the screen. If so, change direction
if self.position_x < self.radius:
self.position_x = self.radius
if self.position_x > SCREEN_WIDTH - self.radius:
self.position_x = SCREEN_WIDTH - self.radius
if self.position_y < self.radius:
self.position_y = self.radius
if self.position_y > SCREEN_HEIGHT - self.radius:
self.position_y = SCREEN_HEIGHT - self.radius
class MyGame(arcade.Window):
def __init__(self, width, height, title):
super().__init__(width, height, title)
self.drawer = 0
self.wardrobe = 0
self.bookshelves = 0
self.door = 0
self.bed = 0
self.book_1 = 0
self.book_2 = 0
self.book_3 = 0
self.endscreen = 0
self.movement_tutorial = 0
self.code = 0
self.exit_key = 0
arcade.set_background_color(arcade.color.BROWN)
self.ball = Ball(400,300, 0, 0, 15)
def on_draw(self):
arcade.start_render()
self.ball.draw()
#door
arcade.draw_rectangle_filled(35,560,60,80,arcade.color.AMAZON)
arcade.draw_rectangle_filled(7,560,4,80,arcade.color.GRAY)
arcade.draw_rectangle_filled(17,560,4,80,arcade.color.GRAY)
arcade.draw_rectangle_filled(27,560,4,80,arcade.color.GRAY)
arcade.draw_rectangle_filled(37,560,4,80,arcade.color.GRAY)
arcade.draw_rectangle_filled(47,560,4,80,arcade.color.GRAY)
arcade.draw_rectangle_filled(57,560,4,80,arcade.color.GRAY)
arcade.draw_rectangle_filled(67,560,4,80,arcade.color.GRAY)
arcade.draw_rectangle_filled(57,560,20,15,arcade.color.GRAY)
arcade.draw_circle_filled(62,563,2,arcade.color.BLACK)
arcade.draw_triangle_filled(62,562,60,559,64,559,arcade.color.BLACK)
#bed
arcade.draw_rectangle_filled (740,80,70,120,arcade.color.GRAY)
arcade.draw_rectangle_filled (740,120,60,30,arcade.color.WHITE)
arcade.draw_rectangle_filled (740,60,70,80,arcade.color.WHITE)
#bookshelves
arcade.draw_rectangle_filled (365,550,60,90,arcade.color.GRAY)
arcade.draw_rectangle_filled (365,570,50,30,arcade.color.BLACK)
arcade.draw_rectangle_filled (365,530,50,30,arcade.color.BLACK)
arcade.draw_rectangle_filled (345,567,6,24,arcade.color.RED)
arcade.draw_rectangle_filled (353,567,6,24,arcade.color.ORANGE)
arcade.draw_rectangle_filled (361,567,6,24,arcade.color.BLUE)
arcade.draw_rectangle_filled (369,567,6,24,arcade.color.RED)
arcade.draw_rectangle_filled (377,567,6,24,arcade.color.ORANGE)
arcade.draw_rectangle_filled (385,567,6,24,arcade.color.BLUE)
arcade.draw_rectangle_filled (345,527,6,24,arcade.color.RED)
arcade.draw_rectangle_filled (353,527,6,24,arcade.color.ORANGE)
arcade.draw_rectangle_filled (361,527,6,24,arcade.color.BLUE)
arcade.draw_rectangle_filled (369,527,6,24,arcade.color.RED)
arcade.draw_rectangle_filled (377,527,6,24,arcade.color.ORANGE)
arcade.draw_rectangle_filled (385,527,6,24,arcade.color.BLUE)
arcade.draw_rectangle_filled (435,550,60,90,arcade.color.GRAY)
arcade.draw_rectangle_filled (435,570,50,30,arcade.color.BLACK)
arcade.draw_rectangle_filled (435,530,50,30,arcade.color.BLACK)
arcade.draw_rectangle_filled (415,567,6,24,arcade.color.RED)
arcade.draw_rectangle_filled (423,567,6,24,arcade.color.ORANGE)
arcade.draw_rectangle_filled (431,567,6,24,arcade.color.BLUE)
arcade.draw_rectangle_filled (439,567,6,24,arcade.color.RED)
arcade.draw_rectangle_filled (447,567,6,24,arcade.color.ORANGE)
arcade.draw_rectangle_filled (455,567,6,24,arcade.color.BLUE)
arcade.draw_rectangle_filled (415,527,6,24,arcade.color.RED)
arcade.draw_rectangle_filled (423,527,6,24,arcade.color.ORANGE)
arcade.draw_rectangle_filled (431,527,6,24,arcade.color.BLUE)
arcade.draw_rectangle_filled (439,527,6,24,arcade.color.RED)
arcade.draw_rectangle_filled (447,527,6,24,arcade.color.ORANGE)
arcade.draw_rectangle_filled (455,527,6,24,arcade.color.BLUE)
#drawer
arcade.draw_rectangle_filled (30,30,50,50,arcade.color.GRAY)
arcade.draw_rectangle_filled (30,30,42,42,arcade.color.WHITE)
#wardrobe
arcade.draw_rectangle_filled (750,540,80,100,arcade.color.GRAY)
arcade.draw_rectangle_filled (750,540,4,100,arcade.color.BLACK)
arcade.draw_circle_filled (740,540,3,arcade.color.YELLOW)
arcade.draw_circle_filled (760,540,3,arcade.color.YELLOW)
if self.ball.position_x < 115 and self.ball.position_y > 470:
arcade.draw_text("Hold D to interact", 235, 338, arcade.color.WHITE, font_size=18)
arcade.draw_text("with Door", 235, 314, arcade.color.WHITE, font_size=18)
if self.ball.position_x > 635 and self.ball.position_y < 210:
arcade.draw_text("Hold E to interact", 235, 338, arcade.color.WHITE, font_size=18)
arcade.draw_text("with Bed", 235, 314, arcade.color.WHITE, font_size=18)
if self.ball.position_x > 255 and self.ball.position_x < 535 and self.ball.position_y > 435:
arcade.draw_text("Hold O to interact", 235, 338, arcade.color.WHITE, font_size=18)
arcade.draw_text("with Bookshelves", 235, 314, arcade.color.WHITE, font_size=18)
if self.ball.position_x < 105 and self.ball.position_y < 105:
arcade.draw_text("Hold R to interact", 235, 338, arcade.color.WHITE, font_size=18)
arcade.draw_text("with Drawer", 235, 314, arcade.color.WHITE, font_size=18)
if self.ball.position_x > 660 and self.ball.position_y > 440:
arcade.draw_text("Hold W to interact", 235, 338, arcade.color.WHITE, font_size=18)
arcade.draw_text("with Wardrobe", 235, 314, arcade.color.WHITE, font_size=18)
if self.movement_tutorial == 0:
arcade.draw_text("Use arrow keys to move", 235, 368, arcade.color.WHITE, font_size=18)
if self.drawer == 1:
if self.code == 1:
arcade.draw_text("Congratulations!", 435, 338, arcade.color.WHITE, font_size=18)
arcade.draw_text("You got a key", 435, 314, arcade.color.WHITE, font_size=18)
self.exit_key = 1
else:
arcade.draw_text("It seems I need", 435, 338, arcade.color.WHITE, font_size=18)
arcade.draw_text("a code to open this", 435, 314, arcade.color.WHITE, font_size=18)
if self.bed == 1:
arcade.draw_text("It's just a bed", 435, 338, arcade.color.WHITE, font_size=18)
if self.wardrobe == 1:
arcade.draw_text("There are many outfits here", 435, 338, arcade.color.WHITE, font_size=18)
if self.bookshelves == 1:
arcade.draw_text("There are many books in here", 435, 338, arcade.color.WHITE, font_size=18)
arcade.draw_text("which one should I read? A, B, C", 435, 314, arcade.color.WHITE, font_size=18)
if self.book_1 == 1:
arcade.draw_text("There is a key in the", 435, 338, arcade.color.WHITE, font_size=18)
arcade.draw_text("drawer... huh", 435, 314, arcade.color.WHITE, font_size=18)
if self.book_2 == 1:
arcade.draw_text("Congratulations!", 435, 338, arcade.color.WHITE, font_size=18)
arcade.draw_text("You got a code", 435, 314, arcade.color.WHITE, font_size=18)
self.code = 1
if self.book_3 == 1:
arcade.draw_text("It's the Bible", 435, 338, arcade.color.WHITE, font_size=18)
if self.door == 1:
if self.exit_key == 1:
self.endscreen = 1
else:
arcade.draw_text("It seems that I need", 435, 338, arcade.color.WHITE, font_size=18)
arcade.draw_text("a key to open this", 435, 314, arcade.color.WHITE, font_size=18)
if self.endscreen == 1:
arcade.draw_rectangle_filled(400,300,800,600,arcade.color.BLACK)
arcade.draw_text("Congratulations! you beat the game", 235, 468, arcade.color.WHITE, font_size=18)
#sword
arcade.draw_rectangle_filled (290,190,20,180,arcade.color.WHITE_SMOKE)
arcade.draw_rectangle_filled (270,190,20,180,arcade.color.GRAY)
arcade.draw_triangle_filled (260,100,280,100,280,70,arcade.color.GRAY)
arcade.draw_triangle_filled (300,100,280,100,280,70, arcade.color.WHITE)
arcade.draw_rectangle_filled (280,184,4,196,arcade.color.BLACK)
arcade.draw_rectangle_filled (280,300,40,40,arcade.color.PURPLE)
arcade.draw_triangle_filled (280,265,270,280,290,280,arcade.color.GOLD)
arcade.draw_rectangle_filled (240,290,50,20,arcade.color.PURPLE,30)
arcade.draw_rectangle_filled (320,290,50,20,arcade.color.PURPLE,330)
arcade.draw_rectangle_filled (220,283,50,2,arcade.color.BLACK,30)
arcade.draw_rectangle_filled (220,275,59,2,arcade.color.BLACK,30)
arcade.draw_rectangle_filled (340,283,50,2,arcade.color.BLACK,330)
arcade.draw_rectangle_filled (340,275,59,2,arcade.color.BLACK,330)
arcade.draw_rectangle_filled (280,340,15,50,arcade.color.PURPLE)
arcade.draw_triangle_filled (260,320,280,320,280,340,arcade.color.PURPLE)
arcade.draw_triangle_filled (265,320,280,320,280,365,arcade.color.PURPLE)
arcade.draw_triangle_filled (300,320,280,320,280,340,arcade.color.PURPLE)
arcade.draw_triangle_filled (295,320,280,320,280,365,arcade.color.PURPLE)
arcade.draw_circle_filled (280,375,15,arcade.color.LIGHT_BROWN)
def on_update(self, delta_time):
self.ball.update()
def on_key_press(self, key, modifiers):
if key == arcade.key.LEFT:
self.ball.change_x = -MOVEMENT_SPEED
self.movement_tutorial = 1
elif key == arcade.key.RIGHT:
self.ball.change_x = MOVEMENT_SPEED
self.movement_tutorial = 1
elif key == arcade.key.UP:
self.ball.change_y = MOVEMENT_SPEED
self.movement_tutorial = 1
elif key == arcade.key.DOWN:
self.ball.change_y = -MOVEMENT_SPEED
self.movement_tutorial = 1
if key == arcade.key.R:
self.drawer = 1
if key == arcade.key.W:
self.wardrobe = 1
if key == arcade.key.D:
self.door = 1
if key == arcade.key.O:
self.bookshelves = 1
if key == arcade.key.E:
self.bed = 1
if key == arcade.key.A:
self.book_1 = 1
if key == arcade.key.B:
self.book_2 = 1
if key == arcade.key.C:
self.book_3 = 1
def on_key_release(self, key, modifiers):
if key == arcade.key.LEFT or key == arcade.key.RIGHT:
self.ball.change_x = 0
elif key == arcade.key.UP or key == arcade.key.DOWN:
self.ball.change_y = 0
if key == arcade.key.R:
self.drawer = 0
if key == arcade.key.W:
self.wardrobe = 0
if key == arcade.key.D:
self.door = 0
if key == arcade.key.O:
self.bookshelves = 0
if key == arcade.key.E:
self.bed = 0
if key == arcade.key.A:
self.book_1 = 0
if key == arcade.key.B:
self.book_2 = 0
if key == arcade.key.C:
self.book_3 = 0
def main():
""" Main method """
game = MyGame(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
arcade.run()
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "37d079ca6a22036e2660507f37442617d4842c4e",
"index": 4060,
"step-1": "<mask token>\n\n\nclass MyGame(arcade.Window):\n\n def __init__(self, width, height, title):\n super().__init__(width, height, title)\n self.drawer = 0\n self.wardrobe = 0\n self.bookshelves = 0\n self.door = 0\n self.bed = 0\n self.book_1 = 0\n self.book_2 = 0\n self.book_3 = 0\n self.endscreen = 0\n self.movement_tutorial = 0\n self.code = 0\n self.exit_key = 0\n arcade.set_background_color(arcade.color.BROWN)\n self.ball = Ball(400, 300, 0, 0, 15)\n\n def on_draw(self):\n arcade.start_render()\n self.ball.draw()\n arcade.draw_rectangle_filled(35, 560, 60, 80, arcade.color.AMAZON)\n arcade.draw_rectangle_filled(7, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(17, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(27, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(37, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(47, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(57, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(67, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(57, 560, 20, 15, arcade.color.GRAY)\n arcade.draw_circle_filled(62, 563, 2, arcade.color.BLACK)\n arcade.draw_triangle_filled(62, 562, 60, 559, 64, 559, arcade.color\n .BLACK)\n arcade.draw_rectangle_filled(740, 80, 70, 120, arcade.color.GRAY)\n arcade.draw_rectangle_filled(740, 120, 60, 30, arcade.color.WHITE)\n arcade.draw_rectangle_filled(740, 60, 70, 80, arcade.color.WHITE)\n arcade.draw_rectangle_filled(365, 550, 60, 90, arcade.color.GRAY)\n arcade.draw_rectangle_filled(365, 570, 50, 30, arcade.color.BLACK)\n arcade.draw_rectangle_filled(365, 530, 50, 30, arcade.color.BLACK)\n arcade.draw_rectangle_filled(345, 567, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(353, 567, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(361, 567, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(369, 567, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(377, 567, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(385, 567, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(345, 527, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(353, 527, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(361, 527, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(369, 527, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(377, 527, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(385, 527, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(435, 550, 60, 90, arcade.color.GRAY)\n arcade.draw_rectangle_filled(435, 570, 50, 30, arcade.color.BLACK)\n arcade.draw_rectangle_filled(435, 530, 50, 30, arcade.color.BLACK)\n arcade.draw_rectangle_filled(415, 567, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(423, 567, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(431, 567, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(439, 567, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(447, 567, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(455, 567, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(415, 527, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(423, 527, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(431, 527, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(439, 527, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(447, 527, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(455, 527, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(30, 30, 50, 50, arcade.color.GRAY)\n arcade.draw_rectangle_filled(30, 30, 42, 42, arcade.color.WHITE)\n arcade.draw_rectangle_filled(750, 540, 80, 100, arcade.color.GRAY)\n arcade.draw_rectangle_filled(750, 540, 4, 100, arcade.color.BLACK)\n arcade.draw_circle_filled(740, 540, 3, arcade.color.YELLOW)\n arcade.draw_circle_filled(760, 540, 3, arcade.color.YELLOW)\n if self.ball.position_x < 115 and self.ball.position_y > 470:\n arcade.draw_text('Hold D to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Door', 235, 314, arcade.color.WHITE,\n font_size=18)\n if self.ball.position_x > 635 and self.ball.position_y < 210:\n arcade.draw_text('Hold E to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Bed', 235, 314, arcade.color.WHITE,\n font_size=18)\n if (self.ball.position_x > 255 and self.ball.position_x < 535 and \n self.ball.position_y > 435):\n arcade.draw_text('Hold O to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Bookshelves', 235, 314, arcade.color.\n WHITE, font_size=18)\n if self.ball.position_x < 105 and self.ball.position_y < 105:\n arcade.draw_text('Hold R to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Drawer', 235, 314, arcade.color.WHITE,\n font_size=18)\n if self.ball.position_x > 660 and self.ball.position_y > 440:\n arcade.draw_text('Hold W to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Wardrobe', 235, 314, arcade.color.WHITE,\n font_size=18)\n if self.movement_tutorial == 0:\n arcade.draw_text('Use arrow keys to move', 235, 368, arcade.\n color.WHITE, font_size=18)\n if self.drawer == 1:\n if self.code == 1:\n arcade.draw_text('Congratulations!', 435, 338, arcade.color\n .WHITE, font_size=18)\n arcade.draw_text('You got a key', 435, 314, arcade.color.\n WHITE, font_size=18)\n self.exit_key = 1\n else:\n arcade.draw_text('It seems I need', 435, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('a code to open this', 435, 314, arcade.\n color.WHITE, font_size=18)\n if self.bed == 1:\n arcade.draw_text(\"It's just a bed\", 435, 338, arcade.color.\n WHITE, font_size=18)\n if self.wardrobe == 1:\n arcade.draw_text('There are many outfits here', 435, 338,\n arcade.color.WHITE, font_size=18)\n if self.bookshelves == 1:\n arcade.draw_text('There are many books in here', 435, 338,\n arcade.color.WHITE, font_size=18)\n arcade.draw_text('which one should I read? A, B, C', 435, 314,\n arcade.color.WHITE, font_size=18)\n if self.book_1 == 1:\n arcade.draw_text('There is a key in the', 435, 338, arcade.\n color.WHITE, font_size=18)\n arcade.draw_text('drawer... huh', 435, 314, arcade.color.WHITE,\n font_size=18)\n if self.book_2 == 1:\n arcade.draw_text('Congratulations!', 435, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('You got a code', 435, 314, arcade.color.WHITE,\n font_size=18)\n self.code = 1\n if self.book_3 == 1:\n arcade.draw_text(\"It's the Bible\", 435, 338, arcade.color.WHITE,\n font_size=18)\n if self.door == 1:\n if self.exit_key == 1:\n self.endscreen = 1\n else:\n arcade.draw_text('It seems that I need', 435, 338, arcade.\n color.WHITE, font_size=18)\n arcade.draw_text('a key to open this', 435, 314, arcade.\n color.WHITE, font_size=18)\n if self.endscreen == 1:\n arcade.draw_rectangle_filled(400, 300, 800, 600, arcade.color.BLACK\n )\n arcade.draw_text('Congratulations! you beat the game', 235, 468,\n arcade.color.WHITE, font_size=18)\n arcade.draw_rectangle_filled(290, 190, 20, 180, arcade.color.\n WHITE_SMOKE)\n arcade.draw_rectangle_filled(270, 190, 20, 180, arcade.color.GRAY)\n arcade.draw_triangle_filled(260, 100, 280, 100, 280, 70, arcade\n .color.GRAY)\n arcade.draw_triangle_filled(300, 100, 280, 100, 280, 70, arcade\n .color.WHITE)\n arcade.draw_rectangle_filled(280, 184, 4, 196, arcade.color.BLACK)\n arcade.draw_rectangle_filled(280, 300, 40, 40, arcade.color.PURPLE)\n arcade.draw_triangle_filled(280, 265, 270, 280, 290, 280,\n arcade.color.GOLD)\n arcade.draw_rectangle_filled(240, 290, 50, 20, arcade.color.\n PURPLE, 30)\n arcade.draw_rectangle_filled(320, 290, 50, 20, arcade.color.\n PURPLE, 330)\n arcade.draw_rectangle_filled(220, 283, 50, 2, arcade.color.\n BLACK, 30)\n arcade.draw_rectangle_filled(220, 275, 59, 2, arcade.color.\n BLACK, 30)\n arcade.draw_rectangle_filled(340, 283, 50, 2, arcade.color.\n BLACK, 330)\n arcade.draw_rectangle_filled(340, 275, 59, 2, arcade.color.\n BLACK, 330)\n arcade.draw_rectangle_filled(280, 340, 15, 50, arcade.color.PURPLE)\n arcade.draw_triangle_filled(260, 320, 280, 320, 280, 340,\n arcade.color.PURPLE)\n arcade.draw_triangle_filled(265, 320, 280, 320, 280, 365,\n arcade.color.PURPLE)\n arcade.draw_triangle_filled(300, 320, 280, 320, 280, 340,\n arcade.color.PURPLE)\n arcade.draw_triangle_filled(295, 320, 280, 320, 280, 365,\n arcade.color.PURPLE)\n arcade.draw_circle_filled(280, 375, 15, arcade.color.LIGHT_BROWN)\n\n def on_update(self, delta_time):\n self.ball.update()\n\n def on_key_press(self, key, modifiers):\n if key == arcade.key.LEFT:\n self.ball.change_x = -MOVEMENT_SPEED\n self.movement_tutorial = 1\n elif key == arcade.key.RIGHT:\n self.ball.change_x = MOVEMENT_SPEED\n self.movement_tutorial = 1\n elif key == arcade.key.UP:\n self.ball.change_y = MOVEMENT_SPEED\n self.movement_tutorial = 1\n elif key == arcade.key.DOWN:\n self.ball.change_y = -MOVEMENT_SPEED\n self.movement_tutorial = 1\n if key == arcade.key.R:\n self.drawer = 1\n if key == arcade.key.W:\n self.wardrobe = 1\n if key == arcade.key.D:\n self.door = 1\n if key == arcade.key.O:\n self.bookshelves = 1\n if key == arcade.key.E:\n self.bed = 1\n if key == arcade.key.A:\n self.book_1 = 1\n if key == arcade.key.B:\n self.book_2 = 1\n if key == arcade.key.C:\n self.book_3 = 1\n\n def on_key_release(self, key, modifiers):\n if key == arcade.key.LEFT or key == arcade.key.RIGHT:\n self.ball.change_x = 0\n elif key == arcade.key.UP or key == arcade.key.DOWN:\n self.ball.change_y = 0\n if key == arcade.key.R:\n self.drawer = 0\n if key == arcade.key.W:\n self.wardrobe = 0\n if key == arcade.key.D:\n self.door = 0\n if key == arcade.key.O:\n self.bookshelves = 0\n if key == arcade.key.E:\n self.bed = 0\n if key == arcade.key.A:\n self.book_1 = 0\n if key == arcade.key.B:\n self.book_2 = 0\n if key == arcade.key.C:\n self.book_3 = 0\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Ball:\n <mask token>\n\n def draw(self):\n \"\"\" Draw the balls with the instance variables we have. \"\"\"\n arcade.draw_circle_filled(self.position_x, self.position_y, self.\n radius, self.player_color)\n <mask token>\n\n\nclass MyGame(arcade.Window):\n\n def __init__(self, width, height, title):\n super().__init__(width, height, title)\n self.drawer = 0\n self.wardrobe = 0\n self.bookshelves = 0\n self.door = 0\n self.bed = 0\n self.book_1 = 0\n self.book_2 = 0\n self.book_3 = 0\n self.endscreen = 0\n self.movement_tutorial = 0\n self.code = 0\n self.exit_key = 0\n arcade.set_background_color(arcade.color.BROWN)\n self.ball = Ball(400, 300, 0, 0, 15)\n\n def on_draw(self):\n arcade.start_render()\n self.ball.draw()\n arcade.draw_rectangle_filled(35, 560, 60, 80, arcade.color.AMAZON)\n arcade.draw_rectangle_filled(7, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(17, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(27, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(37, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(47, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(57, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(67, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(57, 560, 20, 15, arcade.color.GRAY)\n arcade.draw_circle_filled(62, 563, 2, arcade.color.BLACK)\n arcade.draw_triangle_filled(62, 562, 60, 559, 64, 559, arcade.color\n .BLACK)\n arcade.draw_rectangle_filled(740, 80, 70, 120, arcade.color.GRAY)\n arcade.draw_rectangle_filled(740, 120, 60, 30, arcade.color.WHITE)\n arcade.draw_rectangle_filled(740, 60, 70, 80, arcade.color.WHITE)\n arcade.draw_rectangle_filled(365, 550, 60, 90, arcade.color.GRAY)\n arcade.draw_rectangle_filled(365, 570, 50, 30, arcade.color.BLACK)\n arcade.draw_rectangle_filled(365, 530, 50, 30, arcade.color.BLACK)\n arcade.draw_rectangle_filled(345, 567, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(353, 567, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(361, 567, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(369, 567, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(377, 567, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(385, 567, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(345, 527, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(353, 527, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(361, 527, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(369, 527, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(377, 527, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(385, 527, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(435, 550, 60, 90, arcade.color.GRAY)\n arcade.draw_rectangle_filled(435, 570, 50, 30, arcade.color.BLACK)\n arcade.draw_rectangle_filled(435, 530, 50, 30, arcade.color.BLACK)\n arcade.draw_rectangle_filled(415, 567, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(423, 567, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(431, 567, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(439, 567, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(447, 567, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(455, 567, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(415, 527, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(423, 527, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(431, 527, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(439, 527, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(447, 527, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(455, 527, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(30, 30, 50, 50, arcade.color.GRAY)\n arcade.draw_rectangle_filled(30, 30, 42, 42, arcade.color.WHITE)\n arcade.draw_rectangle_filled(750, 540, 80, 100, arcade.color.GRAY)\n arcade.draw_rectangle_filled(750, 540, 4, 100, arcade.color.BLACK)\n arcade.draw_circle_filled(740, 540, 3, arcade.color.YELLOW)\n arcade.draw_circle_filled(760, 540, 3, arcade.color.YELLOW)\n if self.ball.position_x < 115 and self.ball.position_y > 470:\n arcade.draw_text('Hold D to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Door', 235, 314, arcade.color.WHITE,\n font_size=18)\n if self.ball.position_x > 635 and self.ball.position_y < 210:\n arcade.draw_text('Hold E to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Bed', 235, 314, arcade.color.WHITE,\n font_size=18)\n if (self.ball.position_x > 255 and self.ball.position_x < 535 and \n self.ball.position_y > 435):\n arcade.draw_text('Hold O to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Bookshelves', 235, 314, arcade.color.\n WHITE, font_size=18)\n if self.ball.position_x < 105 and self.ball.position_y < 105:\n arcade.draw_text('Hold R to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Drawer', 235, 314, arcade.color.WHITE,\n font_size=18)\n if self.ball.position_x > 660 and self.ball.position_y > 440:\n arcade.draw_text('Hold W to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Wardrobe', 235, 314, arcade.color.WHITE,\n font_size=18)\n if self.movement_tutorial == 0:\n arcade.draw_text('Use arrow keys to move', 235, 368, arcade.\n color.WHITE, font_size=18)\n if self.drawer == 1:\n if self.code == 1:\n arcade.draw_text('Congratulations!', 435, 338, arcade.color\n .WHITE, font_size=18)\n arcade.draw_text('You got a key', 435, 314, arcade.color.\n WHITE, font_size=18)\n self.exit_key = 1\n else:\n arcade.draw_text('It seems I need', 435, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('a code to open this', 435, 314, arcade.\n color.WHITE, font_size=18)\n if self.bed == 1:\n arcade.draw_text(\"It's just a bed\", 435, 338, arcade.color.\n WHITE, font_size=18)\n if self.wardrobe == 1:\n arcade.draw_text('There are many outfits here', 435, 338,\n arcade.color.WHITE, font_size=18)\n if self.bookshelves == 1:\n arcade.draw_text('There are many books in here', 435, 338,\n arcade.color.WHITE, font_size=18)\n arcade.draw_text('which one should I read? A, B, C', 435, 314,\n arcade.color.WHITE, font_size=18)\n if self.book_1 == 1:\n arcade.draw_text('There is a key in the', 435, 338, arcade.\n color.WHITE, font_size=18)\n arcade.draw_text('drawer... huh', 435, 314, arcade.color.WHITE,\n font_size=18)\n if self.book_2 == 1:\n arcade.draw_text('Congratulations!', 435, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('You got a code', 435, 314, arcade.color.WHITE,\n font_size=18)\n self.code = 1\n if self.book_3 == 1:\n arcade.draw_text(\"It's the Bible\", 435, 338, arcade.color.WHITE,\n font_size=18)\n if self.door == 1:\n if self.exit_key == 1:\n self.endscreen = 1\n else:\n arcade.draw_text('It seems that I need', 435, 338, arcade.\n color.WHITE, font_size=18)\n arcade.draw_text('a key to open this', 435, 314, arcade.\n color.WHITE, font_size=18)\n if self.endscreen == 1:\n arcade.draw_rectangle_filled(400, 300, 800, 600, arcade.color.BLACK\n )\n arcade.draw_text('Congratulations! you beat the game', 235, 468,\n arcade.color.WHITE, font_size=18)\n arcade.draw_rectangle_filled(290, 190, 20, 180, arcade.color.\n WHITE_SMOKE)\n arcade.draw_rectangle_filled(270, 190, 20, 180, arcade.color.GRAY)\n arcade.draw_triangle_filled(260, 100, 280, 100, 280, 70, arcade\n .color.GRAY)\n arcade.draw_triangle_filled(300, 100, 280, 100, 280, 70, arcade\n .color.WHITE)\n arcade.draw_rectangle_filled(280, 184, 4, 196, arcade.color.BLACK)\n arcade.draw_rectangle_filled(280, 300, 40, 40, arcade.color.PURPLE)\n arcade.draw_triangle_filled(280, 265, 270, 280, 290, 280,\n arcade.color.GOLD)\n arcade.draw_rectangle_filled(240, 290, 50, 20, arcade.color.\n PURPLE, 30)\n arcade.draw_rectangle_filled(320, 290, 50, 20, arcade.color.\n PURPLE, 330)\n arcade.draw_rectangle_filled(220, 283, 50, 2, arcade.color.\n BLACK, 30)\n arcade.draw_rectangle_filled(220, 275, 59, 2, arcade.color.\n BLACK, 30)\n arcade.draw_rectangle_filled(340, 283, 50, 2, arcade.color.\n BLACK, 330)\n arcade.draw_rectangle_filled(340, 275, 59, 2, arcade.color.\n BLACK, 330)\n arcade.draw_rectangle_filled(280, 340, 15, 50, arcade.color.PURPLE)\n arcade.draw_triangle_filled(260, 320, 280, 320, 280, 340,\n arcade.color.PURPLE)\n arcade.draw_triangle_filled(265, 320, 280, 320, 280, 365,\n arcade.color.PURPLE)\n arcade.draw_triangle_filled(300, 320, 280, 320, 280, 340,\n arcade.color.PURPLE)\n arcade.draw_triangle_filled(295, 320, 280, 320, 280, 365,\n arcade.color.PURPLE)\n arcade.draw_circle_filled(280, 375, 15, arcade.color.LIGHT_BROWN)\n\n def on_update(self, delta_time):\n self.ball.update()\n\n def on_key_press(self, key, modifiers):\n if key == arcade.key.LEFT:\n self.ball.change_x = -MOVEMENT_SPEED\n self.movement_tutorial = 1\n elif key == arcade.key.RIGHT:\n self.ball.change_x = MOVEMENT_SPEED\n self.movement_tutorial = 1\n elif key == arcade.key.UP:\n self.ball.change_y = MOVEMENT_SPEED\n self.movement_tutorial = 1\n elif key == arcade.key.DOWN:\n self.ball.change_y = -MOVEMENT_SPEED\n self.movement_tutorial = 1\n if key == arcade.key.R:\n self.drawer = 1\n if key == arcade.key.W:\n self.wardrobe = 1\n if key == arcade.key.D:\n self.door = 1\n if key == arcade.key.O:\n self.bookshelves = 1\n if key == arcade.key.E:\n self.bed = 1\n if key == arcade.key.A:\n self.book_1 = 1\n if key == arcade.key.B:\n self.book_2 = 1\n if key == arcade.key.C:\n self.book_3 = 1\n\n def on_key_release(self, key, modifiers):\n if key == arcade.key.LEFT or key == arcade.key.RIGHT:\n self.ball.change_x = 0\n elif key == arcade.key.UP or key == arcade.key.DOWN:\n self.ball.change_y = 0\n if key == arcade.key.R:\n self.drawer = 0\n if key == arcade.key.W:\n self.wardrobe = 0\n if key == arcade.key.D:\n self.door = 0\n if key == arcade.key.O:\n self.bookshelves = 0\n if key == arcade.key.E:\n self.bed = 0\n if key == arcade.key.A:\n self.book_1 = 0\n if key == arcade.key.B:\n self.book_2 = 0\n if key == arcade.key.C:\n self.book_3 = 0\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Ball:\n\n def __init__(self, position_x, position_y, change_x, change_y, radius):\n self.position_x = position_x\n self.position_y = position_y\n self.change_x = change_x\n self.change_y = change_y\n self.radius = radius\n self.player_color = arcade.color.AMETHYST\n\n def draw(self):\n \"\"\" Draw the balls with the instance variables we have. \"\"\"\n arcade.draw_circle_filled(self.position_x, self.position_y, self.\n radius, self.player_color)\n\n def update(self):\n self.position_y += self.change_y\n self.position_x += self.change_x\n if self.position_x < self.radius:\n self.position_x = self.radius\n if self.position_x > SCREEN_WIDTH - self.radius:\n self.position_x = SCREEN_WIDTH - self.radius\n if self.position_y < self.radius:\n self.position_y = self.radius\n if self.position_y > SCREEN_HEIGHT - self.radius:\n self.position_y = SCREEN_HEIGHT - self.radius\n\n\nclass MyGame(arcade.Window):\n\n def __init__(self, width, height, title):\n super().__init__(width, height, title)\n self.drawer = 0\n self.wardrobe = 0\n self.bookshelves = 0\n self.door = 0\n self.bed = 0\n self.book_1 = 0\n self.book_2 = 0\n self.book_3 = 0\n self.endscreen = 0\n self.movement_tutorial = 0\n self.code = 0\n self.exit_key = 0\n arcade.set_background_color(arcade.color.BROWN)\n self.ball = Ball(400, 300, 0, 0, 15)\n\n def on_draw(self):\n arcade.start_render()\n self.ball.draw()\n arcade.draw_rectangle_filled(35, 560, 60, 80, arcade.color.AMAZON)\n arcade.draw_rectangle_filled(7, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(17, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(27, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(37, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(47, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(57, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(67, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(57, 560, 20, 15, arcade.color.GRAY)\n arcade.draw_circle_filled(62, 563, 2, arcade.color.BLACK)\n arcade.draw_triangle_filled(62, 562, 60, 559, 64, 559, arcade.color\n .BLACK)\n arcade.draw_rectangle_filled(740, 80, 70, 120, arcade.color.GRAY)\n arcade.draw_rectangle_filled(740, 120, 60, 30, arcade.color.WHITE)\n arcade.draw_rectangle_filled(740, 60, 70, 80, arcade.color.WHITE)\n arcade.draw_rectangle_filled(365, 550, 60, 90, arcade.color.GRAY)\n arcade.draw_rectangle_filled(365, 570, 50, 30, arcade.color.BLACK)\n arcade.draw_rectangle_filled(365, 530, 50, 30, arcade.color.BLACK)\n arcade.draw_rectangle_filled(345, 567, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(353, 567, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(361, 567, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(369, 567, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(377, 567, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(385, 567, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(345, 527, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(353, 527, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(361, 527, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(369, 527, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(377, 527, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(385, 527, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(435, 550, 60, 90, arcade.color.GRAY)\n arcade.draw_rectangle_filled(435, 570, 50, 30, arcade.color.BLACK)\n arcade.draw_rectangle_filled(435, 530, 50, 30, arcade.color.BLACK)\n arcade.draw_rectangle_filled(415, 567, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(423, 567, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(431, 567, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(439, 567, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(447, 567, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(455, 567, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(415, 527, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(423, 527, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(431, 527, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(439, 527, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(447, 527, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(455, 527, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(30, 30, 50, 50, arcade.color.GRAY)\n arcade.draw_rectangle_filled(30, 30, 42, 42, arcade.color.WHITE)\n arcade.draw_rectangle_filled(750, 540, 80, 100, arcade.color.GRAY)\n arcade.draw_rectangle_filled(750, 540, 4, 100, arcade.color.BLACK)\n arcade.draw_circle_filled(740, 540, 3, arcade.color.YELLOW)\n arcade.draw_circle_filled(760, 540, 3, arcade.color.YELLOW)\n if self.ball.position_x < 115 and self.ball.position_y > 470:\n arcade.draw_text('Hold D to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Door', 235, 314, arcade.color.WHITE,\n font_size=18)\n if self.ball.position_x > 635 and self.ball.position_y < 210:\n arcade.draw_text('Hold E to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Bed', 235, 314, arcade.color.WHITE,\n font_size=18)\n if (self.ball.position_x > 255 and self.ball.position_x < 535 and \n self.ball.position_y > 435):\n arcade.draw_text('Hold O to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Bookshelves', 235, 314, arcade.color.\n WHITE, font_size=18)\n if self.ball.position_x < 105 and self.ball.position_y < 105:\n arcade.draw_text('Hold R to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Drawer', 235, 314, arcade.color.WHITE,\n font_size=18)\n if self.ball.position_x > 660 and self.ball.position_y > 440:\n arcade.draw_text('Hold W to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Wardrobe', 235, 314, arcade.color.WHITE,\n font_size=18)\n if self.movement_tutorial == 0:\n arcade.draw_text('Use arrow keys to move', 235, 368, arcade.\n color.WHITE, font_size=18)\n if self.drawer == 1:\n if self.code == 1:\n arcade.draw_text('Congratulations!', 435, 338, arcade.color\n .WHITE, font_size=18)\n arcade.draw_text('You got a key', 435, 314, arcade.color.\n WHITE, font_size=18)\n self.exit_key = 1\n else:\n arcade.draw_text('It seems I need', 435, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('a code to open this', 435, 314, arcade.\n color.WHITE, font_size=18)\n if self.bed == 1:\n arcade.draw_text(\"It's just a bed\", 435, 338, arcade.color.\n WHITE, font_size=18)\n if self.wardrobe == 1:\n arcade.draw_text('There are many outfits here', 435, 338,\n arcade.color.WHITE, font_size=18)\n if self.bookshelves == 1:\n arcade.draw_text('There are many books in here', 435, 338,\n arcade.color.WHITE, font_size=18)\n arcade.draw_text('which one should I read? A, B, C', 435, 314,\n arcade.color.WHITE, font_size=18)\n if self.book_1 == 1:\n arcade.draw_text('There is a key in the', 435, 338, arcade.\n color.WHITE, font_size=18)\n arcade.draw_text('drawer... huh', 435, 314, arcade.color.WHITE,\n font_size=18)\n if self.book_2 == 1:\n arcade.draw_text('Congratulations!', 435, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('You got a code', 435, 314, arcade.color.WHITE,\n font_size=18)\n self.code = 1\n if self.book_3 == 1:\n arcade.draw_text(\"It's the Bible\", 435, 338, arcade.color.WHITE,\n font_size=18)\n if self.door == 1:\n if self.exit_key == 1:\n self.endscreen = 1\n else:\n arcade.draw_text('It seems that I need', 435, 338, arcade.\n color.WHITE, font_size=18)\n arcade.draw_text('a key to open this', 435, 314, arcade.\n color.WHITE, font_size=18)\n if self.endscreen == 1:\n arcade.draw_rectangle_filled(400, 300, 800, 600, arcade.color.BLACK\n )\n arcade.draw_text('Congratulations! you beat the game', 235, 468,\n arcade.color.WHITE, font_size=18)\n arcade.draw_rectangle_filled(290, 190, 20, 180, arcade.color.\n WHITE_SMOKE)\n arcade.draw_rectangle_filled(270, 190, 20, 180, arcade.color.GRAY)\n arcade.draw_triangle_filled(260, 100, 280, 100, 280, 70, arcade\n .color.GRAY)\n arcade.draw_triangle_filled(300, 100, 280, 100, 280, 70, arcade\n .color.WHITE)\n arcade.draw_rectangle_filled(280, 184, 4, 196, arcade.color.BLACK)\n arcade.draw_rectangle_filled(280, 300, 40, 40, arcade.color.PURPLE)\n arcade.draw_triangle_filled(280, 265, 270, 280, 290, 280,\n arcade.color.GOLD)\n arcade.draw_rectangle_filled(240, 290, 50, 20, arcade.color.\n PURPLE, 30)\n arcade.draw_rectangle_filled(320, 290, 50, 20, arcade.color.\n PURPLE, 330)\n arcade.draw_rectangle_filled(220, 283, 50, 2, arcade.color.\n BLACK, 30)\n arcade.draw_rectangle_filled(220, 275, 59, 2, arcade.color.\n BLACK, 30)\n arcade.draw_rectangle_filled(340, 283, 50, 2, arcade.color.\n BLACK, 330)\n arcade.draw_rectangle_filled(340, 275, 59, 2, arcade.color.\n BLACK, 330)\n arcade.draw_rectangle_filled(280, 340, 15, 50, arcade.color.PURPLE)\n arcade.draw_triangle_filled(260, 320, 280, 320, 280, 340,\n arcade.color.PURPLE)\n arcade.draw_triangle_filled(265, 320, 280, 320, 280, 365,\n arcade.color.PURPLE)\n arcade.draw_triangle_filled(300, 320, 280, 320, 280, 340,\n arcade.color.PURPLE)\n arcade.draw_triangle_filled(295, 320, 280, 320, 280, 365,\n arcade.color.PURPLE)\n arcade.draw_circle_filled(280, 375, 15, arcade.color.LIGHT_BROWN)\n\n def on_update(self, delta_time):\n self.ball.update()\n\n def on_key_press(self, key, modifiers):\n if key == arcade.key.LEFT:\n self.ball.change_x = -MOVEMENT_SPEED\n self.movement_tutorial = 1\n elif key == arcade.key.RIGHT:\n self.ball.change_x = MOVEMENT_SPEED\n self.movement_tutorial = 1\n elif key == arcade.key.UP:\n self.ball.change_y = MOVEMENT_SPEED\n self.movement_tutorial = 1\n elif key == arcade.key.DOWN:\n self.ball.change_y = -MOVEMENT_SPEED\n self.movement_tutorial = 1\n if key == arcade.key.R:\n self.drawer = 1\n if key == arcade.key.W:\n self.wardrobe = 1\n if key == arcade.key.D:\n self.door = 1\n if key == arcade.key.O:\n self.bookshelves = 1\n if key == arcade.key.E:\n self.bed = 1\n if key == arcade.key.A:\n self.book_1 = 1\n if key == arcade.key.B:\n self.book_2 = 1\n if key == arcade.key.C:\n self.book_3 = 1\n\n def on_key_release(self, key, modifiers):\n if key == arcade.key.LEFT or key == arcade.key.RIGHT:\n self.ball.change_x = 0\n elif key == arcade.key.UP or key == arcade.key.DOWN:\n self.ball.change_y = 0\n if key == arcade.key.R:\n self.drawer = 0\n if key == arcade.key.W:\n self.wardrobe = 0\n if key == arcade.key.D:\n self.door = 0\n if key == arcade.key.O:\n self.bookshelves = 0\n if key == arcade.key.E:\n self.bed = 0\n if key == arcade.key.A:\n self.book_1 = 0\n if key == arcade.key.B:\n self.book_2 = 0\n if key == arcade.key.C:\n self.book_3 = 0\n\n\ndef main():\n \"\"\" Main method \"\"\"\n game = MyGame(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)\n arcade.run()\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\nSPRITE_SCALING = 0.5\nSCREEN_WIDTH = 800\nSCREEN_HEIGHT = 600\nSCREEN_TITLE = 'Raymond Game'\nMOVEMENT_SPEED = 50\n\n\nclass Ball:\n\n def __init__(self, position_x, position_y, change_x, change_y, radius):\n self.position_x = position_x\n self.position_y = position_y\n self.change_x = change_x\n self.change_y = change_y\n self.radius = radius\n self.player_color = arcade.color.AMETHYST\n\n def draw(self):\n \"\"\" Draw the balls with the instance variables we have. \"\"\"\n arcade.draw_circle_filled(self.position_x, self.position_y, self.\n radius, self.player_color)\n\n def update(self):\n self.position_y += self.change_y\n self.position_x += self.change_x\n if self.position_x < self.radius:\n self.position_x = self.radius\n if self.position_x > SCREEN_WIDTH - self.radius:\n self.position_x = SCREEN_WIDTH - self.radius\n if self.position_y < self.radius:\n self.position_y = self.radius\n if self.position_y > SCREEN_HEIGHT - self.radius:\n self.position_y = SCREEN_HEIGHT - self.radius\n\n\nclass MyGame(arcade.Window):\n\n def __init__(self, width, height, title):\n super().__init__(width, height, title)\n self.drawer = 0\n self.wardrobe = 0\n self.bookshelves = 0\n self.door = 0\n self.bed = 0\n self.book_1 = 0\n self.book_2 = 0\n self.book_3 = 0\n self.endscreen = 0\n self.movement_tutorial = 0\n self.code = 0\n self.exit_key = 0\n arcade.set_background_color(arcade.color.BROWN)\n self.ball = Ball(400, 300, 0, 0, 15)\n\n def on_draw(self):\n arcade.start_render()\n self.ball.draw()\n arcade.draw_rectangle_filled(35, 560, 60, 80, arcade.color.AMAZON)\n arcade.draw_rectangle_filled(7, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(17, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(27, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(37, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(47, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(57, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(67, 560, 4, 80, arcade.color.GRAY)\n arcade.draw_rectangle_filled(57, 560, 20, 15, arcade.color.GRAY)\n arcade.draw_circle_filled(62, 563, 2, arcade.color.BLACK)\n arcade.draw_triangle_filled(62, 562, 60, 559, 64, 559, arcade.color\n .BLACK)\n arcade.draw_rectangle_filled(740, 80, 70, 120, arcade.color.GRAY)\n arcade.draw_rectangle_filled(740, 120, 60, 30, arcade.color.WHITE)\n arcade.draw_rectangle_filled(740, 60, 70, 80, arcade.color.WHITE)\n arcade.draw_rectangle_filled(365, 550, 60, 90, arcade.color.GRAY)\n arcade.draw_rectangle_filled(365, 570, 50, 30, arcade.color.BLACK)\n arcade.draw_rectangle_filled(365, 530, 50, 30, arcade.color.BLACK)\n arcade.draw_rectangle_filled(345, 567, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(353, 567, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(361, 567, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(369, 567, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(377, 567, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(385, 567, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(345, 527, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(353, 527, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(361, 527, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(369, 527, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(377, 527, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(385, 527, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(435, 550, 60, 90, arcade.color.GRAY)\n arcade.draw_rectangle_filled(435, 570, 50, 30, arcade.color.BLACK)\n arcade.draw_rectangle_filled(435, 530, 50, 30, arcade.color.BLACK)\n arcade.draw_rectangle_filled(415, 567, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(423, 567, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(431, 567, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(439, 567, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(447, 567, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(455, 567, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(415, 527, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(423, 527, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(431, 527, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(439, 527, 6, 24, arcade.color.RED)\n arcade.draw_rectangle_filled(447, 527, 6, 24, arcade.color.ORANGE)\n arcade.draw_rectangle_filled(455, 527, 6, 24, arcade.color.BLUE)\n arcade.draw_rectangle_filled(30, 30, 50, 50, arcade.color.GRAY)\n arcade.draw_rectangle_filled(30, 30, 42, 42, arcade.color.WHITE)\n arcade.draw_rectangle_filled(750, 540, 80, 100, arcade.color.GRAY)\n arcade.draw_rectangle_filled(750, 540, 4, 100, arcade.color.BLACK)\n arcade.draw_circle_filled(740, 540, 3, arcade.color.YELLOW)\n arcade.draw_circle_filled(760, 540, 3, arcade.color.YELLOW)\n if self.ball.position_x < 115 and self.ball.position_y > 470:\n arcade.draw_text('Hold D to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Door', 235, 314, arcade.color.WHITE,\n font_size=18)\n if self.ball.position_x > 635 and self.ball.position_y < 210:\n arcade.draw_text('Hold E to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Bed', 235, 314, arcade.color.WHITE,\n font_size=18)\n if (self.ball.position_x > 255 and self.ball.position_x < 535 and \n self.ball.position_y > 435):\n arcade.draw_text('Hold O to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Bookshelves', 235, 314, arcade.color.\n WHITE, font_size=18)\n if self.ball.position_x < 105 and self.ball.position_y < 105:\n arcade.draw_text('Hold R to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Drawer', 235, 314, arcade.color.WHITE,\n font_size=18)\n if self.ball.position_x > 660 and self.ball.position_y > 440:\n arcade.draw_text('Hold W to interact', 235, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('with Wardrobe', 235, 314, arcade.color.WHITE,\n font_size=18)\n if self.movement_tutorial == 0:\n arcade.draw_text('Use arrow keys to move', 235, 368, arcade.\n color.WHITE, font_size=18)\n if self.drawer == 1:\n if self.code == 1:\n arcade.draw_text('Congratulations!', 435, 338, arcade.color\n .WHITE, font_size=18)\n arcade.draw_text('You got a key', 435, 314, arcade.color.\n WHITE, font_size=18)\n self.exit_key = 1\n else:\n arcade.draw_text('It seems I need', 435, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('a code to open this', 435, 314, arcade.\n color.WHITE, font_size=18)\n if self.bed == 1:\n arcade.draw_text(\"It's just a bed\", 435, 338, arcade.color.\n WHITE, font_size=18)\n if self.wardrobe == 1:\n arcade.draw_text('There are many outfits here', 435, 338,\n arcade.color.WHITE, font_size=18)\n if self.bookshelves == 1:\n arcade.draw_text('There are many books in here', 435, 338,\n arcade.color.WHITE, font_size=18)\n arcade.draw_text('which one should I read? A, B, C', 435, 314,\n arcade.color.WHITE, font_size=18)\n if self.book_1 == 1:\n arcade.draw_text('There is a key in the', 435, 338, arcade.\n color.WHITE, font_size=18)\n arcade.draw_text('drawer... huh', 435, 314, arcade.color.WHITE,\n font_size=18)\n if self.book_2 == 1:\n arcade.draw_text('Congratulations!', 435, 338, arcade.color.\n WHITE, font_size=18)\n arcade.draw_text('You got a code', 435, 314, arcade.color.WHITE,\n font_size=18)\n self.code = 1\n if self.book_3 == 1:\n arcade.draw_text(\"It's the Bible\", 435, 338, arcade.color.WHITE,\n font_size=18)\n if self.door == 1:\n if self.exit_key == 1:\n self.endscreen = 1\n else:\n arcade.draw_text('It seems that I need', 435, 338, arcade.\n color.WHITE, font_size=18)\n arcade.draw_text('a key to open this', 435, 314, arcade.\n color.WHITE, font_size=18)\n if self.endscreen == 1:\n arcade.draw_rectangle_filled(400, 300, 800, 600, arcade.color.BLACK\n )\n arcade.draw_text('Congratulations! you beat the game', 235, 468,\n arcade.color.WHITE, font_size=18)\n arcade.draw_rectangle_filled(290, 190, 20, 180, arcade.color.\n WHITE_SMOKE)\n arcade.draw_rectangle_filled(270, 190, 20, 180, arcade.color.GRAY)\n arcade.draw_triangle_filled(260, 100, 280, 100, 280, 70, arcade\n .color.GRAY)\n arcade.draw_triangle_filled(300, 100, 280, 100, 280, 70, arcade\n .color.WHITE)\n arcade.draw_rectangle_filled(280, 184, 4, 196, arcade.color.BLACK)\n arcade.draw_rectangle_filled(280, 300, 40, 40, arcade.color.PURPLE)\n arcade.draw_triangle_filled(280, 265, 270, 280, 290, 280,\n arcade.color.GOLD)\n arcade.draw_rectangle_filled(240, 290, 50, 20, arcade.color.\n PURPLE, 30)\n arcade.draw_rectangle_filled(320, 290, 50, 20, arcade.color.\n PURPLE, 330)\n arcade.draw_rectangle_filled(220, 283, 50, 2, arcade.color.\n BLACK, 30)\n arcade.draw_rectangle_filled(220, 275, 59, 2, arcade.color.\n BLACK, 30)\n arcade.draw_rectangle_filled(340, 283, 50, 2, arcade.color.\n BLACK, 330)\n arcade.draw_rectangle_filled(340, 275, 59, 2, arcade.color.\n BLACK, 330)\n arcade.draw_rectangle_filled(280, 340, 15, 50, arcade.color.PURPLE)\n arcade.draw_triangle_filled(260, 320, 280, 320, 280, 340,\n arcade.color.PURPLE)\n arcade.draw_triangle_filled(265, 320, 280, 320, 280, 365,\n arcade.color.PURPLE)\n arcade.draw_triangle_filled(300, 320, 280, 320, 280, 340,\n arcade.color.PURPLE)\n arcade.draw_triangle_filled(295, 320, 280, 320, 280, 365,\n arcade.color.PURPLE)\n arcade.draw_circle_filled(280, 375, 15, arcade.color.LIGHT_BROWN)\n\n def on_update(self, delta_time):\n self.ball.update()\n\n def on_key_press(self, key, modifiers):\n if key == arcade.key.LEFT:\n self.ball.change_x = -MOVEMENT_SPEED\n self.movement_tutorial = 1\n elif key == arcade.key.RIGHT:\n self.ball.change_x = MOVEMENT_SPEED\n self.movement_tutorial = 1\n elif key == arcade.key.UP:\n self.ball.change_y = MOVEMENT_SPEED\n self.movement_tutorial = 1\n elif key == arcade.key.DOWN:\n self.ball.change_y = -MOVEMENT_SPEED\n self.movement_tutorial = 1\n if key == arcade.key.R:\n self.drawer = 1\n if key == arcade.key.W:\n self.wardrobe = 1\n if key == arcade.key.D:\n self.door = 1\n if key == arcade.key.O:\n self.bookshelves = 1\n if key == arcade.key.E:\n self.bed = 1\n if key == arcade.key.A:\n self.book_1 = 1\n if key == arcade.key.B:\n self.book_2 = 1\n if key == arcade.key.C:\n self.book_3 = 1\n\n def on_key_release(self, key, modifiers):\n if key == arcade.key.LEFT or key == arcade.key.RIGHT:\n self.ball.change_x = 0\n elif key == arcade.key.UP or key == arcade.key.DOWN:\n self.ball.change_y = 0\n if key == arcade.key.R:\n self.drawer = 0\n if key == arcade.key.W:\n self.wardrobe = 0\n if key == arcade.key.D:\n self.door = 0\n if key == arcade.key.O:\n self.bookshelves = 0\n if key == arcade.key.E:\n self.bed = 0\n if key == arcade.key.A:\n self.book_1 = 0\n if key == arcade.key.B:\n self.book_2 = 0\n if key == arcade.key.C:\n self.book_3 = 0\n\n\ndef main():\n \"\"\" Main method \"\"\"\n game = MyGame(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)\n arcade.run()\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import arcade\nimport os\n\n \nSPRITE_SCALING = 0.5\n \nSCREEN_WIDTH = 800\nSCREEN_HEIGHT = 600\nSCREEN_TITLE = \"Raymond Game\"\nMOVEMENT_SPEED = 50\n\nclass Ball:\n\n def __init__(self, position_x, position_y, change_x, change_y, radius):\n\n # Take the parameters of the init function above, and create instance variables out of them.\n self.position_x = position_x\n self.position_y = position_y\n self.change_x = change_x\n self.change_y = change_y\n self.radius = radius\n self.player_color = arcade.color.AMETHYST\n\n def draw(self):\n \"\"\" Draw the balls with the instance variables we have. \"\"\"\n arcade.draw_circle_filled(self.position_x, self.position_y, self.radius,self.player_color)\n\n def update(self):\n # Move the ball\n self.position_y += self.change_y\n self.position_x += self.change_x\n\n # See if the ball hit the edge of the screen. If so, change direction\n if self.position_x < self.radius:\n self.position_x = self.radius\n\n if self.position_x > SCREEN_WIDTH - self.radius:\n self.position_x = SCREEN_WIDTH - self.radius\n\n if self.position_y < self.radius:\n self.position_y = self.radius\n\n if self.position_y > SCREEN_HEIGHT - self.radius:\n self.position_y = SCREEN_HEIGHT - self.radius\n\nclass MyGame(arcade.Window):\n def __init__(self, width, height, title):\n super().__init__(width, height, title)\n self.drawer = 0\n self.wardrobe = 0\n self.bookshelves = 0\n self.door = 0\n self.bed = 0\n self.book_1 = 0\n self.book_2 = 0\n self.book_3 = 0\n self.endscreen = 0\n self.movement_tutorial = 0\n\n self.code = 0\n self.exit_key = 0\n \n arcade.set_background_color(arcade.color.BROWN)\n self.ball = Ball(400,300, 0, 0, 15)\n \n def on_draw(self):\n arcade.start_render()\n self.ball.draw()\n\n #door\n arcade.draw_rectangle_filled(35,560,60,80,arcade.color.AMAZON)\n arcade.draw_rectangle_filled(7,560,4,80,arcade.color.GRAY)\n arcade.draw_rectangle_filled(17,560,4,80,arcade.color.GRAY)\n arcade.draw_rectangle_filled(27,560,4,80,arcade.color.GRAY)\n arcade.draw_rectangle_filled(37,560,4,80,arcade.color.GRAY)\n arcade.draw_rectangle_filled(47,560,4,80,arcade.color.GRAY)\n arcade.draw_rectangle_filled(57,560,4,80,arcade.color.GRAY)\n arcade.draw_rectangle_filled(67,560,4,80,arcade.color.GRAY)\n arcade.draw_rectangle_filled(57,560,20,15,arcade.color.GRAY)\n arcade.draw_circle_filled(62,563,2,arcade.color.BLACK)\n arcade.draw_triangle_filled(62,562,60,559,64,559,arcade.color.BLACK)\n #bed\n arcade.draw_rectangle_filled (740,80,70,120,arcade.color.GRAY) \n arcade.draw_rectangle_filled (740,120,60,30,arcade.color.WHITE) \n arcade.draw_rectangle_filled (740,60,70,80,arcade.color.WHITE)\n #bookshelves\n arcade.draw_rectangle_filled (365,550,60,90,arcade.color.GRAY) \n arcade.draw_rectangle_filled (365,570,50,30,arcade.color.BLACK)\n arcade.draw_rectangle_filled (365,530,50,30,arcade.color.BLACK)\n arcade.draw_rectangle_filled (345,567,6,24,arcade.color.RED)\n arcade.draw_rectangle_filled (353,567,6,24,arcade.color.ORANGE)\n arcade.draw_rectangle_filled (361,567,6,24,arcade.color.BLUE)\n arcade.draw_rectangle_filled (369,567,6,24,arcade.color.RED)\n arcade.draw_rectangle_filled (377,567,6,24,arcade.color.ORANGE)\n arcade.draw_rectangle_filled (385,567,6,24,arcade.color.BLUE)\n arcade.draw_rectangle_filled (345,527,6,24,arcade.color.RED)\n arcade.draw_rectangle_filled (353,527,6,24,arcade.color.ORANGE)\n arcade.draw_rectangle_filled (361,527,6,24,arcade.color.BLUE)\n arcade.draw_rectangle_filled (369,527,6,24,arcade.color.RED)\n arcade.draw_rectangle_filled (377,527,6,24,arcade.color.ORANGE)\n arcade.draw_rectangle_filled (385,527,6,24,arcade.color.BLUE)\n arcade.draw_rectangle_filled (435,550,60,90,arcade.color.GRAY)\n arcade.draw_rectangle_filled (435,570,50,30,arcade.color.BLACK)\n arcade.draw_rectangle_filled (435,530,50,30,arcade.color.BLACK)\n arcade.draw_rectangle_filled (415,567,6,24,arcade.color.RED)\n arcade.draw_rectangle_filled (423,567,6,24,arcade.color.ORANGE)\n arcade.draw_rectangle_filled (431,567,6,24,arcade.color.BLUE)\n arcade.draw_rectangle_filled (439,567,6,24,arcade.color.RED)\n arcade.draw_rectangle_filled (447,567,6,24,arcade.color.ORANGE)\n arcade.draw_rectangle_filled (455,567,6,24,arcade.color.BLUE)\n arcade.draw_rectangle_filled (415,527,6,24,arcade.color.RED)\n arcade.draw_rectangle_filled (423,527,6,24,arcade.color.ORANGE)\n arcade.draw_rectangle_filled (431,527,6,24,arcade.color.BLUE)\n arcade.draw_rectangle_filled (439,527,6,24,arcade.color.RED)\n arcade.draw_rectangle_filled (447,527,6,24,arcade.color.ORANGE)\n arcade.draw_rectangle_filled (455,527,6,24,arcade.color.BLUE)\n #drawer\n arcade.draw_rectangle_filled (30,30,50,50,arcade.color.GRAY)\n arcade.draw_rectangle_filled (30,30,42,42,arcade.color.WHITE)\n #wardrobe\n arcade.draw_rectangle_filled (750,540,80,100,arcade.color.GRAY)\n arcade.draw_rectangle_filled (750,540,4,100,arcade.color.BLACK)\n arcade.draw_circle_filled (740,540,3,arcade.color.YELLOW) \n arcade.draw_circle_filled (760,540,3,arcade.color.YELLOW)\n\n if self.ball.position_x < 115 and self.ball.position_y > 470:\n arcade.draw_text(\"Hold D to interact\", 235, 338, arcade.color.WHITE, font_size=18)\n arcade.draw_text(\"with Door\", 235, 314, arcade.color.WHITE, font_size=18)\n\n if self.ball.position_x > 635 and self.ball.position_y < 210:\n arcade.draw_text(\"Hold E to interact\", 235, 338, arcade.color.WHITE, font_size=18)\n arcade.draw_text(\"with Bed\", 235, 314, arcade.color.WHITE, font_size=18)\n \n if self.ball.position_x > 255 and self.ball.position_x < 535 and self.ball.position_y > 435:\n arcade.draw_text(\"Hold O to interact\", 235, 338, arcade.color.WHITE, font_size=18)\n arcade.draw_text(\"with Bookshelves\", 235, 314, arcade.color.WHITE, font_size=18)\n\n if self.ball.position_x < 105 and self.ball.position_y < 105:\n arcade.draw_text(\"Hold R to interact\", 235, 338, arcade.color.WHITE, font_size=18)\n arcade.draw_text(\"with Drawer\", 235, 314, arcade.color.WHITE, font_size=18)\n \n if self.ball.position_x > 660 and self.ball.position_y > 440:\n arcade.draw_text(\"Hold W to interact\", 235, 338, arcade.color.WHITE, font_size=18)\n arcade.draw_text(\"with Wardrobe\", 235, 314, arcade.color.WHITE, font_size=18)\n \n if self.movement_tutorial == 0:\n arcade.draw_text(\"Use arrow keys to move\", 235, 368, arcade.color.WHITE, font_size=18)\n \n if self.drawer == 1:\n if self.code == 1:\n arcade.draw_text(\"Congratulations!\", 435, 338, arcade.color.WHITE, font_size=18)\n arcade.draw_text(\"You got a key\", 435, 314, arcade.color.WHITE, font_size=18)\n self.exit_key = 1\n else:\n arcade.draw_text(\"It seems I need\", 435, 338, arcade.color.WHITE, font_size=18)\n arcade.draw_text(\"a code to open this\", 435, 314, arcade.color.WHITE, font_size=18)\n\n if self.bed == 1:\n arcade.draw_text(\"It's just a bed\", 435, 338, arcade.color.WHITE, font_size=18)\n\n if self.wardrobe == 1:\n arcade.draw_text(\"There are many outfits here\", 435, 338, arcade.color.WHITE, font_size=18)\n\n if self.bookshelves == 1:\n arcade.draw_text(\"There are many books in here\", 435, 338, arcade.color.WHITE, font_size=18)\n arcade.draw_text(\"which one should I read? A, B, C\", 435, 314, arcade.color.WHITE, font_size=18)\n \n if self.book_1 == 1:\n arcade.draw_text(\"There is a key in the\", 435, 338, arcade.color.WHITE, font_size=18)\n arcade.draw_text(\"drawer... huh\", 435, 314, arcade.color.WHITE, font_size=18) \n \n if self.book_2 == 1:\n arcade.draw_text(\"Congratulations!\", 435, 338, arcade.color.WHITE, font_size=18)\n arcade.draw_text(\"You got a code\", 435, 314, arcade.color.WHITE, font_size=18)\n self.code = 1\n \n if self.book_3 == 1:\n arcade.draw_text(\"It's the Bible\", 435, 338, arcade.color.WHITE, font_size=18)\n \n if self.door == 1:\n if self.exit_key == 1:\n self.endscreen = 1\n else:\n arcade.draw_text(\"It seems that I need\", 435, 338, arcade.color.WHITE, font_size=18) \n arcade.draw_text(\"a key to open this\", 435, 314, arcade.color.WHITE, font_size=18)\n\n if self.endscreen == 1:\n arcade.draw_rectangle_filled(400,300,800,600,arcade.color.BLACK)\n arcade.draw_text(\"Congratulations! you beat the game\", 235, 468, arcade.color.WHITE, font_size=18)\n #sword\n arcade.draw_rectangle_filled (290,190,20,180,arcade.color.WHITE_SMOKE)\n arcade.draw_rectangle_filled (270,190,20,180,arcade.color.GRAY)\n arcade.draw_triangle_filled (260,100,280,100,280,70,arcade.color.GRAY)\n arcade.draw_triangle_filled (300,100,280,100,280,70, arcade.color.WHITE)\n arcade.draw_rectangle_filled (280,184,4,196,arcade.color.BLACK)\n arcade.draw_rectangle_filled (280,300,40,40,arcade.color.PURPLE)\n arcade.draw_triangle_filled (280,265,270,280,290,280,arcade.color.GOLD)\n arcade.draw_rectangle_filled (240,290,50,20,arcade.color.PURPLE,30)\n arcade.draw_rectangle_filled (320,290,50,20,arcade.color.PURPLE,330)\n arcade.draw_rectangle_filled (220,283,50,2,arcade.color.BLACK,30)\n arcade.draw_rectangle_filled (220,275,59,2,arcade.color.BLACK,30)\n arcade.draw_rectangle_filled (340,283,50,2,arcade.color.BLACK,330)\n arcade.draw_rectangle_filled (340,275,59,2,arcade.color.BLACK,330)\n arcade.draw_rectangle_filled (280,340,15,50,arcade.color.PURPLE)\n arcade.draw_triangle_filled (260,320,280,320,280,340,arcade.color.PURPLE)\n arcade.draw_triangle_filled (265,320,280,320,280,365,arcade.color.PURPLE)\n arcade.draw_triangle_filled (300,320,280,320,280,340,arcade.color.PURPLE)\n arcade.draw_triangle_filled (295,320,280,320,280,365,arcade.color.PURPLE)\n arcade.draw_circle_filled (280,375,15,arcade.color.LIGHT_BROWN)\n\n def on_update(self, delta_time):\n\n self.ball.update()\n \n def on_key_press(self, key, modifiers):\n if key == arcade.key.LEFT:\n self.ball.change_x = -MOVEMENT_SPEED\n self.movement_tutorial = 1\n elif key == arcade.key.RIGHT:\n self.ball.change_x = MOVEMENT_SPEED\n self.movement_tutorial = 1\n elif key == arcade.key.UP:\n self.ball.change_y = MOVEMENT_SPEED\n self.movement_tutorial = 1\n elif key == arcade.key.DOWN:\n self.ball.change_y = -MOVEMENT_SPEED\n self.movement_tutorial = 1\n if key == arcade.key.R:\n self.drawer = 1\n if key == arcade.key.W:\n self.wardrobe = 1\n if key == arcade.key.D:\n self.door = 1\n if key == arcade.key.O:\n self.bookshelves = 1\n if key == arcade.key.E:\n self.bed = 1\n if key == arcade.key.A:\n self.book_1 = 1\n if key == arcade.key.B:\n self.book_2 = 1\n if key == arcade.key.C:\n self.book_3 = 1\n\n def on_key_release(self, key, modifiers):\n if key == arcade.key.LEFT or key == arcade.key.RIGHT:\n self.ball.change_x = 0\n elif key == arcade.key.UP or key == arcade.key.DOWN:\n self.ball.change_y = 0\n if key == arcade.key.R:\n self.drawer = 0\n if key == arcade.key.W:\n self.wardrobe = 0\n if key == arcade.key.D:\n self.door = 0\n if key == arcade.key.O:\n self.bookshelves = 0\n if key == arcade.key.E:\n self.bed = 0\n if key == arcade.key.A:\n self.book_1 = 0\n if key == arcade.key.B:\n self.book_2 = 0\n if key == arcade.key.C:\n self.book_3 = 0\n\ndef main():\n \"\"\" Main method \"\"\"\n game = MyGame(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)\n arcade.run()\n \n \nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
6,
8,
12,
13,
15
]
}
|
[
6,
8,
12,
13,
15
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 25 19:21:32 2019
@author: Nikos
"""
import torch
import torch.optim as optim
from utilities import *
from model import *
from torch.autograd import Variable
import numpy as np
import random
class A2C_agent(object):
def __init__(self, env, actor_hidden_size, actor_lr, actor_batch_size,
critic_gamma, mem_size, critic_hidden_size, critic_lr, critic_batch_size):
self.env = env
self.actor_hidden_size = actor_hidden_size
self.actor_lr = actor_lr
self.actor_batch_size = actor_batch_size
self.critic_hidden_size = critic_hidden_size
self.critic_lr = critic_lr
self.critic_batch_size = critic_batch_size
self.critic_gamma = critic_gamma
self.mem_size = mem_size
self.num_of_states = env.observation_space.shape[0]
self.num_of_actions = env.action_space.n
self.experience_replay_buffer = ReplayBuffer(self.mem_size)
# initialize the Actor network (policy)
self.actor_network = ActorNet(self.num_of_states, self.actor_hidden_size, self.num_of_actions)
self.actor_optimizer = optim.Adam(self.actor_network.parameters(), lr = self.actor_lr)
# initialize the Critic network (v-learning)
# The difference between the critic in A2C (here) and the
# critic int he "vanilla" Actor-Critic version is that the
# critic in A2C models the value function, hence it needs
# to only output the value of each state and not the Q-value
# for each (state, action) pair. Therefore, the output size
# here needs to be a scalar.
self.critic_network = CriticNet(self.num_of_states, self.critic_hidden_size, 1)
self.critic_optimizer = optim.Adam(self.critic_network.parameters(), lr = self.critic_lr)
def act(self, state):
# compute the action distribution based on the current state via the policy net
action_distribution = self.actor_network.forward(state)
# pick an action based on that distribution
action = np.random.choice(self.num_of_actions, p = action_distribution.detach().numpy())
return action
def memorize(self, state, action, new_state, reward, done):
# this function takes a transition (state, action, new_state, reward, done)
# and stores it into the experience memory buffer
self.experience_replay_buffer.push(state, action, new_state, reward, done)
def learn(self, rewards_batch, states_batch, actions_batch, new_states_batch, new_actions_batch):
#states_batch = torch.tensor(states_batch, dtype=torch.float)
states_batch = np.asarray(states_batch)
actions_batch = torch.tensor(actions_batch, dtype=torch.long)
rewards_batch = torch.tensor(rewards_batch, dtype=torch.float)
new_states_batch = np.asarray(states_batch)
new_actions_batch = torch.tensor(actions_batch, dtype=torch.long)
V_batch = []
V_prime_batch = []
for state, new_state, new_action in zip(states_batch,\
new_states_batch, new_actions_batch):
state = torch.Tensor(state)
v_value = self.critic_network.forward(state)
# get q-value for specific action
#Q = q_values.gather(-1, action)
V_batch.append(v_value)
new_state = torch.Tensor(new_state)
v_prime_value = self.critic_network.forward(new_state)
#V_prime = q_prime_values.gather(-1, new_action)
V_prime_batch.append(v_prime_value)
# compute the log of the probabilities that the policy outputs for each state
log_probs = torch.log(self.actor_network(states_batch))
# pick those log probabilities that correspond to the actions that were selected
selected_log_probs = rewards_batch * log_probs[np.arange(len(actions_batch)), actions_batch]
# compute the monte-carlo estimate by averaging the losses and then form the optimization
# criterion, which will be the negative log probs.
actor_loss = -selected_log_probs.mean()
self.actor_optimizer.zero_grad()
actor_loss.backward()
# if we need smooth updates we clip the grads between -1 and 1
#for param in self.online_dqn_network.parameters():
# param.grad.data.clamp_(-1,1)
self.actor_optimizer.step()
# Compute TD error for V network
V_prime_batch = torch.stack(V_prime_batch)
V_batch = torch.stack(V_batch)
# A(s, a) = r_prime + gamma * V_prime - V
advantage = rewards_batch + self.critic_gamma * V_prime_batch - V_batch
#print(deltas)
critic_loss = (V_batch - (rewards_batch + self.critic_gamma * V_prime_batch)).pow(2).mean()
#print(critic_loss)
self.critic_optimizer.zero_grad()
critic_loss.backward()
self.critic_optimizer.step()
#return loss
|
normal
|
{
"blob_id": "72b086e833ab3ee4ec3102869d74513ef3657675",
"index": 1926,
"step-1": "<mask token>\n\n\nclass A2C_agent(object):\n <mask token>\n\n def act(self, state):\n action_distribution = self.actor_network.forward(state)\n action = np.random.choice(self.num_of_actions, p=\n action_distribution.detach().numpy())\n return action\n\n def memorize(self, state, action, new_state, reward, done):\n self.experience_replay_buffer.push(state, action, new_state, reward,\n done)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass A2C_agent(object):\n <mask token>\n\n def act(self, state):\n action_distribution = self.actor_network.forward(state)\n action = np.random.choice(self.num_of_actions, p=\n action_distribution.detach().numpy())\n return action\n\n def memorize(self, state, action, new_state, reward, done):\n self.experience_replay_buffer.push(state, action, new_state, reward,\n done)\n\n def learn(self, rewards_batch, states_batch, actions_batch,\n new_states_batch, new_actions_batch):\n states_batch = np.asarray(states_batch)\n actions_batch = torch.tensor(actions_batch, dtype=torch.long)\n rewards_batch = torch.tensor(rewards_batch, dtype=torch.float)\n new_states_batch = np.asarray(states_batch)\n new_actions_batch = torch.tensor(actions_batch, dtype=torch.long)\n V_batch = []\n V_prime_batch = []\n for state, new_state, new_action in zip(states_batch,\n new_states_batch, new_actions_batch):\n state = torch.Tensor(state)\n v_value = self.critic_network.forward(state)\n V_batch.append(v_value)\n new_state = torch.Tensor(new_state)\n v_prime_value = self.critic_network.forward(new_state)\n V_prime_batch.append(v_prime_value)\n log_probs = torch.log(self.actor_network(states_batch))\n selected_log_probs = rewards_batch * log_probs[np.arange(len(\n actions_batch)), actions_batch]\n actor_loss = -selected_log_probs.mean()\n self.actor_optimizer.zero_grad()\n actor_loss.backward()\n self.actor_optimizer.step()\n V_prime_batch = torch.stack(V_prime_batch)\n V_batch = torch.stack(V_batch)\n advantage = rewards_batch + self.critic_gamma * V_prime_batch - V_batch\n critic_loss = (V_batch - (rewards_batch + self.critic_gamma *\n V_prime_batch)).pow(2).mean()\n self.critic_optimizer.zero_grad()\n critic_loss.backward()\n self.critic_optimizer.step()\n",
"step-3": "<mask token>\n\n\nclass A2C_agent(object):\n\n def __init__(self, env, actor_hidden_size, actor_lr, actor_batch_size,\n critic_gamma, mem_size, critic_hidden_size, critic_lr,\n critic_batch_size):\n self.env = env\n self.actor_hidden_size = actor_hidden_size\n self.actor_lr = actor_lr\n self.actor_batch_size = actor_batch_size\n self.critic_hidden_size = critic_hidden_size\n self.critic_lr = critic_lr\n self.critic_batch_size = critic_batch_size\n self.critic_gamma = critic_gamma\n self.mem_size = mem_size\n self.num_of_states = env.observation_space.shape[0]\n self.num_of_actions = env.action_space.n\n self.experience_replay_buffer = ReplayBuffer(self.mem_size)\n self.actor_network = ActorNet(self.num_of_states, self.\n actor_hidden_size, self.num_of_actions)\n self.actor_optimizer = optim.Adam(self.actor_network.parameters(),\n lr=self.actor_lr)\n self.critic_network = CriticNet(self.num_of_states, self.\n critic_hidden_size, 1)\n self.critic_optimizer = optim.Adam(self.critic_network.parameters(),\n lr=self.critic_lr)\n\n def act(self, state):\n action_distribution = self.actor_network.forward(state)\n action = np.random.choice(self.num_of_actions, p=\n action_distribution.detach().numpy())\n return action\n\n def memorize(self, state, action, new_state, reward, done):\n self.experience_replay_buffer.push(state, action, new_state, reward,\n done)\n\n def learn(self, rewards_batch, states_batch, actions_batch,\n new_states_batch, new_actions_batch):\n states_batch = np.asarray(states_batch)\n actions_batch = torch.tensor(actions_batch, dtype=torch.long)\n rewards_batch = torch.tensor(rewards_batch, dtype=torch.float)\n new_states_batch = np.asarray(states_batch)\n new_actions_batch = torch.tensor(actions_batch, dtype=torch.long)\n V_batch = []\n V_prime_batch = []\n for state, new_state, new_action in zip(states_batch,\n new_states_batch, new_actions_batch):\n state = torch.Tensor(state)\n v_value = self.critic_network.forward(state)\n V_batch.append(v_value)\n new_state = torch.Tensor(new_state)\n v_prime_value = self.critic_network.forward(new_state)\n V_prime_batch.append(v_prime_value)\n log_probs = torch.log(self.actor_network(states_batch))\n selected_log_probs = rewards_batch * log_probs[np.arange(len(\n actions_batch)), actions_batch]\n actor_loss = -selected_log_probs.mean()\n self.actor_optimizer.zero_grad()\n actor_loss.backward()\n self.actor_optimizer.step()\n V_prime_batch = torch.stack(V_prime_batch)\n V_batch = torch.stack(V_batch)\n advantage = rewards_batch + self.critic_gamma * V_prime_batch - V_batch\n critic_loss = (V_batch - (rewards_batch + self.critic_gamma *\n V_prime_batch)).pow(2).mean()\n self.critic_optimizer.zero_grad()\n critic_loss.backward()\n self.critic_optimizer.step()\n",
"step-4": "<mask token>\nimport torch\nimport torch.optim as optim\nfrom utilities import *\nfrom model import *\nfrom torch.autograd import Variable\nimport numpy as np\nimport random\n\n\nclass A2C_agent(object):\n\n def __init__(self, env, actor_hidden_size, actor_lr, actor_batch_size,\n critic_gamma, mem_size, critic_hidden_size, critic_lr,\n critic_batch_size):\n self.env = env\n self.actor_hidden_size = actor_hidden_size\n self.actor_lr = actor_lr\n self.actor_batch_size = actor_batch_size\n self.critic_hidden_size = critic_hidden_size\n self.critic_lr = critic_lr\n self.critic_batch_size = critic_batch_size\n self.critic_gamma = critic_gamma\n self.mem_size = mem_size\n self.num_of_states = env.observation_space.shape[0]\n self.num_of_actions = env.action_space.n\n self.experience_replay_buffer = ReplayBuffer(self.mem_size)\n self.actor_network = ActorNet(self.num_of_states, self.\n actor_hidden_size, self.num_of_actions)\n self.actor_optimizer = optim.Adam(self.actor_network.parameters(),\n lr=self.actor_lr)\n self.critic_network = CriticNet(self.num_of_states, self.\n critic_hidden_size, 1)\n self.critic_optimizer = optim.Adam(self.critic_network.parameters(),\n lr=self.critic_lr)\n\n def act(self, state):\n action_distribution = self.actor_network.forward(state)\n action = np.random.choice(self.num_of_actions, p=\n action_distribution.detach().numpy())\n return action\n\n def memorize(self, state, action, new_state, reward, done):\n self.experience_replay_buffer.push(state, action, new_state, reward,\n done)\n\n def learn(self, rewards_batch, states_batch, actions_batch,\n new_states_batch, new_actions_batch):\n states_batch = np.asarray(states_batch)\n actions_batch = torch.tensor(actions_batch, dtype=torch.long)\n rewards_batch = torch.tensor(rewards_batch, dtype=torch.float)\n new_states_batch = np.asarray(states_batch)\n new_actions_batch = torch.tensor(actions_batch, dtype=torch.long)\n V_batch = []\n V_prime_batch = []\n for state, new_state, new_action in zip(states_batch,\n new_states_batch, new_actions_batch):\n state = torch.Tensor(state)\n v_value = self.critic_network.forward(state)\n V_batch.append(v_value)\n new_state = torch.Tensor(new_state)\n v_prime_value = self.critic_network.forward(new_state)\n V_prime_batch.append(v_prime_value)\n log_probs = torch.log(self.actor_network(states_batch))\n selected_log_probs = rewards_batch * log_probs[np.arange(len(\n actions_batch)), actions_batch]\n actor_loss = -selected_log_probs.mean()\n self.actor_optimizer.zero_grad()\n actor_loss.backward()\n self.actor_optimizer.step()\n V_prime_batch = torch.stack(V_prime_batch)\n V_batch = torch.stack(V_batch)\n advantage = rewards_batch + self.critic_gamma * V_prime_batch - V_batch\n critic_loss = (V_batch - (rewards_batch + self.critic_gamma *\n V_prime_batch)).pow(2).mean()\n self.critic_optimizer.zero_grad()\n critic_loss.backward()\n self.critic_optimizer.step()\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Oct 25 19:21:32 2019\n\n@author: Nikos\n\"\"\"\n\nimport torch\nimport torch.optim as optim\nfrom utilities import *\nfrom model import *\nfrom torch.autograd import Variable\nimport numpy as np\nimport random\n\nclass A2C_agent(object):\n\tdef __init__(self, env, actor_hidden_size, actor_lr, actor_batch_size, \n\t\tcritic_gamma, mem_size, critic_hidden_size, critic_lr, critic_batch_size):\n \n\t\tself.env = env\n\t\tself.actor_hidden_size = actor_hidden_size\n\t\tself.actor_lr = actor_lr\n\t\tself.actor_batch_size = actor_batch_size\n\n\t\tself.critic_hidden_size = critic_hidden_size\n\t\tself.critic_lr = critic_lr\n\t\tself.critic_batch_size = critic_batch_size\n\t\tself.critic_gamma = critic_gamma\n\n\t\tself.mem_size = mem_size\n \n\t\tself.num_of_states = env.observation_space.shape[0]\n\t\tself.num_of_actions = env.action_space.n\n\n\t\tself.experience_replay_buffer = ReplayBuffer(self.mem_size)\n \n # initialize the Actor network (policy)\n\t\tself.actor_network = ActorNet(self.num_of_states, self.actor_hidden_size, self.num_of_actions)\n \n\t\tself.actor_optimizer = optim.Adam(self.actor_network.parameters(), lr = self.actor_lr) \n\n\t\t# initialize the Critic network (v-learning)\n\t\t# The difference between the critic in A2C (here) and the \n\t# critic int he \"vanilla\" Actor-Critic version is that the\n\t# critic in A2C models the value function, hence it needs\n\t# to only output the value of each state and not the Q-value\n\t# for each (state, action) pair. Therefore, the output size\n\t# here needs to be a scalar.\n\t\tself.critic_network = CriticNet(self.num_of_states, self.critic_hidden_size, 1)\n \n\t\tself.critic_optimizer = optim.Adam(self.critic_network.parameters(), lr = self.critic_lr) \n \n\tdef act(self, state):\n \t# compute the action distribution based on the current state via the policy net\n\t\taction_distribution = self.actor_network.forward(state)\n\n # pick an action based on that distribution\n\t\taction = np.random.choice(self.num_of_actions, p = action_distribution.detach().numpy())\n\t\treturn action\n\t\t\n\tdef memorize(self, state, action, new_state, reward, done):\n # this function takes a transition (state, action, new_state, reward, done)\n # and stores it into the experience memory buffer\n\t\tself.experience_replay_buffer.push(state, action, new_state, reward, done)\n\n\tdef learn(self, rewards_batch, states_batch, actions_batch, new_states_batch, new_actions_batch):\n\n\t\t#states_batch = torch.tensor(states_batch, dtype=torch.float)\n\t\tstates_batch = np.asarray(states_batch)\n\t\tactions_batch = torch.tensor(actions_batch, dtype=torch.long)\n\t\trewards_batch = torch.tensor(rewards_batch, dtype=torch.float)\n\t\tnew_states_batch = np.asarray(states_batch)\n\t\tnew_actions_batch = torch.tensor(actions_batch, dtype=torch.long)\n\t\tV_batch = []\n\t\tV_prime_batch = []\n\n\t\tfor state, new_state, new_action in zip(states_batch,\\\n\t\t\tnew_states_batch, new_actions_batch):\n\t\t\tstate = torch.Tensor(state)\n\n\t\t\tv_value = self.critic_network.forward(state)\n\t\t\t# get q-value for specific action\n\t\t\t#Q = q_values.gather(-1, action)\n\t\t\tV_batch.append(v_value)\n\n\t\t\tnew_state = torch.Tensor(new_state)\n\t\t\tv_prime_value = self.critic_network.forward(new_state)\n\t\t\t#V_prime = q_prime_values.gather(-1, new_action)\n\t\t\tV_prime_batch.append(v_prime_value)\n \n # compute the log of the probabilities that the policy outputs for each state\n\t\tlog_probs = torch.log(self.actor_network(states_batch))\n # pick those log probabilities that correspond to the actions that were selected\n\t\tselected_log_probs = rewards_batch * log_probs[np.arange(len(actions_batch)), actions_batch]\n # compute the monte-carlo estimate by averaging the losses and then form the optimization\n # criterion, which will be the negative log probs.\n\t\tactor_loss = -selected_log_probs.mean()\n\t\tself.actor_optimizer.zero_grad()\n\t\tactor_loss.backward()\n \n # if we need smooth updates we clip the grads between -1 and 1\n #for param in self.online_dqn_network.parameters():\n # param.grad.data.clamp_(-1,1)\n\t\tself.actor_optimizer.step()\n\n\t\t# Compute TD error for V network\n\t\tV_prime_batch = torch.stack(V_prime_batch)\n\t\tV_batch = torch.stack(V_batch)\n\t\t# A(s, a) = r_prime + gamma * V_prime - V\n\t\tadvantage = rewards_batch + self.critic_gamma * V_prime_batch - V_batch\n\t\t#print(deltas)\n\n\t\tcritic_loss = (V_batch - (rewards_batch + self.critic_gamma * V_prime_batch)).pow(2).mean()\n\t\t#print(critic_loss)\n\t\tself.critic_optimizer.zero_grad()\n\t\tcritic_loss.backward()\n\t\tself.critic_optimizer.step()\n\n\n\t\t#return loss",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
setup(name='qn', version='0.2.2', description=
'Handy functions I use everyday.', url='https://github.com/frlender/qn',
author='Qiaonan Duan', author_email='geonann@gmail.com', license='MIT',
packages=find_packages(), zip_safe=False)
<|reserved_special_token_1|>
from setuptools import setup, find_packages
setup(name='qn', version='0.2.2', description=
'Handy functions I use everyday.', url='https://github.com/frlender/qn',
author='Qiaonan Duan', author_email='geonann@gmail.com', license='MIT',
packages=find_packages(), zip_safe=False)
<|reserved_special_token_1|>
from setuptools import setup, find_packages
setup(name='qn',
version='0.2.2',
description='Handy functions I use everyday.',
url='https://github.com/frlender/qn',
author='Qiaonan Duan',
author_email='geonann@gmail.com',
license='MIT',
packages=find_packages(),
# install_requires=[
# 'matplotlib',
# 'seaborn',
# 'numpy',
# 'scipy',
# 'pandas',
# 'PyYAML',
# 'matplotlib-venn',
# 'scikit-learn'
# ],
zip_safe=False)
|
flexible
|
{
"blob_id": "3b307ae7f8b8b25c93eb2dc54b2603b1291b6232",
"index": 1789,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsetup(name='qn', version='0.2.2', description=\n 'Handy functions I use everyday.', url='https://github.com/frlender/qn',\n author='Qiaonan Duan', author_email='geonann@gmail.com', license='MIT',\n packages=find_packages(), zip_safe=False)\n",
"step-3": "from setuptools import setup, find_packages\nsetup(name='qn', version='0.2.2', description=\n 'Handy functions I use everyday.', url='https://github.com/frlender/qn',\n author='Qiaonan Duan', author_email='geonann@gmail.com', license='MIT',\n packages=find_packages(), zip_safe=False)\n",
"step-4": "from setuptools import setup, find_packages\n\nsetup(name='qn',\n version='0.2.2',\n description='Handy functions I use everyday.',\n url='https://github.com/frlender/qn',\n author='Qiaonan Duan',\n author_email='geonann@gmail.com',\n license='MIT',\n packages=find_packages(),\n # install_requires=[\n # 'matplotlib',\n # 'seaborn',\n # 'numpy',\n # 'scipy',\n # 'pandas',\n # 'PyYAML',\n # 'matplotlib-venn',\n # 'scikit-learn'\n # ],\n zip_safe=False)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def load_stations(filename):
with open(filename, 'r') as f:
sta_data = f.readlines()
sta_list = []
for l in range(1, len(sta_data)):
sta_info = sta_data[l]
net_name = re.split(',', sta_info)[0]
sta_name = re.split(',', sta_info)[1]
chan_name = re.split(',', sta_info)[2]
sta_list.append([net_name, sta_name, chan_name])
return sta_list
<|reserved_special_token_0|>
def obspy_download_parallel(data_center, startday, endday, sta_file,
out_path, cores=1):
set_folders(out_path, startday, endday)
sta_list = load_stations(sta_file)
with open('download.log', 'a') as f:
f.write('>>> ' + str(time.localtime(time.time())) + '\n')
f.write('The number of stations is: ' + str(len(sta_list)) + '\n')
day = startday
while day <= endday:
t_b = time.time()
with open('download.log', 'a') as f:
f.write('Day: ' + str(day) + '\n')
print(day)
starttime = day
endtime = day + 86400
client = Client(data_center)
if cores == 1:
for i in range(len(sta_list)):
sta = sta_list[i]
print(sta)
net_name = sta[0]
sta_name = sta[1]
chan_name = sta[2]
obspy_download(client, net_name, sta_name, chan_name,
starttime, endtime, out_path)
else:
pass
t_e = time.time()
with open('download.log', 'a') as f:
f.write('Using time: ' + str(t_e - t_b) + '\n')
day = day + 86400
return None
<|reserved_special_token_0|>
def stp_download_parallel(startday, endday, sta_file, out_path, cores=1):
"""
:param startday: obspy.core.utcdatetime.UTCDateTime
:param endday: obspy.core.utcdatetime.UTCDateTime
:param sta_file: Network,Station,Channel,Latitude,Longitude
:param out_path:
:param cores:
:return:
"""
if os.path.exists('download.log'):
os.remove('download.log')
with open('download.log', 'a') as f:
f.write('>>> ' + str(time.localtime(time.time())) + '\n')
set_folders(out_path, startday, endday)
sta_list = load_stations(sta_file)
pool = multiprocessing.Pool(processes=cores)
tasks = []
day = startday
while day <= endday:
print(day)
stp_run_download(sta_list, day, out_path)
day = day + 86400
'\n # chunksize is how many tasks will be processed by one processor\n rs = pool.starmap_async(stp_run_download, tasks, chunksize=1)\n # close() & join() is necessary\n # No more work\n pool.close()\n\n # simple progress bar\n while (True):\n remaining = rs._number_left\n print("finished:{0}/{1}".format(len(tasks) - remaining, len(tasks)),\n end=\'\r\') # \'\r\' means remove the last line\n if (rs.ready()):\n break\n time.sleep(0.5)\n\n # Wait for completion\n pool.join()\n '
return None
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def load_stations(filename):
with open(filename, 'r') as f:
sta_data = f.readlines()
sta_list = []
for l in range(1, len(sta_data)):
sta_info = sta_data[l]
net_name = re.split(',', sta_info)[0]
sta_name = re.split(',', sta_info)[1]
chan_name = re.split(',', sta_info)[2]
sta_list.append([net_name, sta_name, chan_name])
return sta_list
def set_folders(out_path, startday, endday):
day = startday
while day <= endday:
year_folder = str(day.year).zfill(4)
day_folder = str(day.year).zfill(4) + str(day.month).zfill(2) + str(day
.day).zfill(2)
out_folder = os.path.join(out_path, year_folder, day_folder)
if not os.path.exists(out_folder):
os.makedirs(out_folder)
day = day + 86400
return None
<|reserved_special_token_0|>
def obspy_download_parallel(data_center, startday, endday, sta_file,
out_path, cores=1):
set_folders(out_path, startday, endday)
sta_list = load_stations(sta_file)
with open('download.log', 'a') as f:
f.write('>>> ' + str(time.localtime(time.time())) + '\n')
f.write('The number of stations is: ' + str(len(sta_list)) + '\n')
day = startday
while day <= endday:
t_b = time.time()
with open('download.log', 'a') as f:
f.write('Day: ' + str(day) + '\n')
print(day)
starttime = day
endtime = day + 86400
client = Client(data_center)
if cores == 1:
for i in range(len(sta_list)):
sta = sta_list[i]
print(sta)
net_name = sta[0]
sta_name = sta[1]
chan_name = sta[2]
obspy_download(client, net_name, sta_name, chan_name,
starttime, endtime, out_path)
else:
pass
t_e = time.time()
with open('download.log', 'a') as f:
f.write('Using time: ' + str(t_e - t_b) + '\n')
day = day + 86400
return None
def stp_run_download(sta_list, download_date, out_path):
with open('download.log', 'a') as f:
f.write(str(download_date) + '\n')
tb = time.time()
year = str(download_date.year).zfill(4)
month = str(download_date.month).zfill(2)
day = str(download_date.day).zfill(2)
day_folder = year + month + day
out_folder = os.path.join(out_path, year, day_folder)
out_folder_old = os.path.join(out_path + '_old', year, day_folder)
p = subprocess.Popen(['stp'], stdin=subprocess.PIPE)
s = 'MSEED \n'
for i in range(len(sta_list)):
sta = sta_list[i]
net_name = sta[0]
sta_name = sta[1]
chan_name = sta[2]
out_sta_file = glob.glob(os.path.join(out_folder_old, '*%s.%s.%s*' %
(net_name, sta_name, chan_name)))
if len(out_sta_file) == 0:
s += 'WIN {} {} {} {}/{}/{},00:00:00 +1d \n'.format(net_name,
sta_name, chan_name, year, month, day)
s += 'quit \n'
p.communicate(s.encode())
out_files = glob.glob('%s%s%s*.*' % (year, month, day))
for out_file in out_files:
shutil.move(out_file, out_folder)
te = time.time()
with open('download.log', 'a') as f:
f.write('Using time: ' + str(te - tb) + '\n')
def stp_download_parallel(startday, endday, sta_file, out_path, cores=1):
"""
:param startday: obspy.core.utcdatetime.UTCDateTime
:param endday: obspy.core.utcdatetime.UTCDateTime
:param sta_file: Network,Station,Channel,Latitude,Longitude
:param out_path:
:param cores:
:return:
"""
if os.path.exists('download.log'):
os.remove('download.log')
with open('download.log', 'a') as f:
f.write('>>> ' + str(time.localtime(time.time())) + '\n')
set_folders(out_path, startday, endday)
sta_list = load_stations(sta_file)
pool = multiprocessing.Pool(processes=cores)
tasks = []
day = startday
while day <= endday:
print(day)
stp_run_download(sta_list, day, out_path)
day = day + 86400
'\n # chunksize is how many tasks will be processed by one processor\n rs = pool.starmap_async(stp_run_download, tasks, chunksize=1)\n # close() & join() is necessary\n # No more work\n pool.close()\n\n # simple progress bar\n while (True):\n remaining = rs._number_left\n print("finished:{0}/{1}".format(len(tasks) - remaining, len(tasks)),\n end=\'\r\') # \'\r\' means remove the last line\n if (rs.ready()):\n break\n time.sleep(0.5)\n\n # Wait for completion\n pool.join()\n '
return None
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def load_stations(filename):
with open(filename, 'r') as f:
sta_data = f.readlines()
sta_list = []
for l in range(1, len(sta_data)):
sta_info = sta_data[l]
net_name = re.split(',', sta_info)[0]
sta_name = re.split(',', sta_info)[1]
chan_name = re.split(',', sta_info)[2]
sta_list.append([net_name, sta_name, chan_name])
return sta_list
def set_folders(out_path, startday, endday):
day = startday
while day <= endday:
year_folder = str(day.year).zfill(4)
day_folder = str(day.year).zfill(4) + str(day.month).zfill(2) + str(day
.day).zfill(2)
out_folder = os.path.join(out_path, year_folder, day_folder)
if not os.path.exists(out_folder):
os.makedirs(out_folder)
day = day + 86400
return None
def obspy_download(client, net_name, sta_name, chn_name, starttime, endtime,
out_path, time_thre=10):
year_folder = str(starttime.year)
day_folder = str(starttime.year).zfill(4) + str(starttime.month).zfill(2
) + str(starttime.day).zfill(2)
out_folder = os.path.join(out_path, year_folder, day_folder)
outfile = os.path.join(out_folder, net_name + '.' + sta_name + '.' +
chn_name + '.mseed')
if not os.path.exists(outfile):
t = 0
flag = False
while flag == False and t < time_thre:
try:
client.get_waveforms(network=net_name, station=sta_name,
location='--', channel=chn_name, starttime=starttime,
endtime=endtime, filename=outfile)
flag = True
except BaseException:
pass
time.sleep(0.5)
t += 1
if not flag:
with open('download.log', 'a') as f:
f.write('No data: ' + outfile + '\n')
return None
def obspy_download_parallel(data_center, startday, endday, sta_file,
out_path, cores=1):
set_folders(out_path, startday, endday)
sta_list = load_stations(sta_file)
with open('download.log', 'a') as f:
f.write('>>> ' + str(time.localtime(time.time())) + '\n')
f.write('The number of stations is: ' + str(len(sta_list)) + '\n')
day = startday
while day <= endday:
t_b = time.time()
with open('download.log', 'a') as f:
f.write('Day: ' + str(day) + '\n')
print(day)
starttime = day
endtime = day + 86400
client = Client(data_center)
if cores == 1:
for i in range(len(sta_list)):
sta = sta_list[i]
print(sta)
net_name = sta[0]
sta_name = sta[1]
chan_name = sta[2]
obspy_download(client, net_name, sta_name, chan_name,
starttime, endtime, out_path)
else:
pass
t_e = time.time()
with open('download.log', 'a') as f:
f.write('Using time: ' + str(t_e - t_b) + '\n')
day = day + 86400
return None
def stp_run_download(sta_list, download_date, out_path):
with open('download.log', 'a') as f:
f.write(str(download_date) + '\n')
tb = time.time()
year = str(download_date.year).zfill(4)
month = str(download_date.month).zfill(2)
day = str(download_date.day).zfill(2)
day_folder = year + month + day
out_folder = os.path.join(out_path, year, day_folder)
out_folder_old = os.path.join(out_path + '_old', year, day_folder)
p = subprocess.Popen(['stp'], stdin=subprocess.PIPE)
s = 'MSEED \n'
for i in range(len(sta_list)):
sta = sta_list[i]
net_name = sta[0]
sta_name = sta[1]
chan_name = sta[2]
out_sta_file = glob.glob(os.path.join(out_folder_old, '*%s.%s.%s*' %
(net_name, sta_name, chan_name)))
if len(out_sta_file) == 0:
s += 'WIN {} {} {} {}/{}/{},00:00:00 +1d \n'.format(net_name,
sta_name, chan_name, year, month, day)
s += 'quit \n'
p.communicate(s.encode())
out_files = glob.glob('%s%s%s*.*' % (year, month, day))
for out_file in out_files:
shutil.move(out_file, out_folder)
te = time.time()
with open('download.log', 'a') as f:
f.write('Using time: ' + str(te - tb) + '\n')
def stp_download_parallel(startday, endday, sta_file, out_path, cores=1):
"""
:param startday: obspy.core.utcdatetime.UTCDateTime
:param endday: obspy.core.utcdatetime.UTCDateTime
:param sta_file: Network,Station,Channel,Latitude,Longitude
:param out_path:
:param cores:
:return:
"""
if os.path.exists('download.log'):
os.remove('download.log')
with open('download.log', 'a') as f:
f.write('>>> ' + str(time.localtime(time.time())) + '\n')
set_folders(out_path, startday, endday)
sta_list = load_stations(sta_file)
pool = multiprocessing.Pool(processes=cores)
tasks = []
day = startday
while day <= endday:
print(day)
stp_run_download(sta_list, day, out_path)
day = day + 86400
'\n # chunksize is how many tasks will be processed by one processor\n rs = pool.starmap_async(stp_run_download, tasks, chunksize=1)\n # close() & join() is necessary\n # No more work\n pool.close()\n\n # simple progress bar\n while (True):\n remaining = rs._number_left\n print("finished:{0}/{1}".format(len(tasks) - remaining, len(tasks)),\n end=\'\r\') # \'\r\' means remove the last line\n if (rs.ready()):\n break\n time.sleep(0.5)\n\n # Wait for completion\n pool.join()\n '
return None
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def load_stations(filename):
with open(filename, 'r') as f:
sta_data = f.readlines()
sta_list = []
for l in range(1, len(sta_data)):
sta_info = sta_data[l]
net_name = re.split(',', sta_info)[0]
sta_name = re.split(',', sta_info)[1]
chan_name = re.split(',', sta_info)[2]
sta_list.append([net_name, sta_name, chan_name])
return sta_list
def set_folders(out_path, startday, endday):
day = startday
while day <= endday:
year_folder = str(day.year).zfill(4)
day_folder = str(day.year).zfill(4) + str(day.month).zfill(2) + str(day
.day).zfill(2)
out_folder = os.path.join(out_path, year_folder, day_folder)
if not os.path.exists(out_folder):
os.makedirs(out_folder)
day = day + 86400
return None
def obspy_download(client, net_name, sta_name, chn_name, starttime, endtime,
out_path, time_thre=10):
year_folder = str(starttime.year)
day_folder = str(starttime.year).zfill(4) + str(starttime.month).zfill(2
) + str(starttime.day).zfill(2)
out_folder = os.path.join(out_path, year_folder, day_folder)
outfile = os.path.join(out_folder, net_name + '.' + sta_name + '.' +
chn_name + '.mseed')
if not os.path.exists(outfile):
t = 0
flag = False
while flag == False and t < time_thre:
try:
client.get_waveforms(network=net_name, station=sta_name,
location='--', channel=chn_name, starttime=starttime,
endtime=endtime, filename=outfile)
flag = True
except BaseException:
pass
time.sleep(0.5)
t += 1
if not flag:
with open('download.log', 'a') as f:
f.write('No data: ' + outfile + '\n')
return None
def obspy_download_parallel(data_center, startday, endday, sta_file,
out_path, cores=1):
set_folders(out_path, startday, endday)
sta_list = load_stations(sta_file)
with open('download.log', 'a') as f:
f.write('>>> ' + str(time.localtime(time.time())) + '\n')
f.write('The number of stations is: ' + str(len(sta_list)) + '\n')
day = startday
while day <= endday:
t_b = time.time()
with open('download.log', 'a') as f:
f.write('Day: ' + str(day) + '\n')
print(day)
starttime = day
endtime = day + 86400
client = Client(data_center)
if cores == 1:
for i in range(len(sta_list)):
sta = sta_list[i]
print(sta)
net_name = sta[0]
sta_name = sta[1]
chan_name = sta[2]
obspy_download(client, net_name, sta_name, chan_name,
starttime, endtime, out_path)
else:
pass
t_e = time.time()
with open('download.log', 'a') as f:
f.write('Using time: ' + str(t_e - t_b) + '\n')
day = day + 86400
return None
def stp_run_download(sta_list, download_date, out_path):
with open('download.log', 'a') as f:
f.write(str(download_date) + '\n')
tb = time.time()
year = str(download_date.year).zfill(4)
month = str(download_date.month).zfill(2)
day = str(download_date.day).zfill(2)
day_folder = year + month + day
out_folder = os.path.join(out_path, year, day_folder)
out_folder_old = os.path.join(out_path + '_old', year, day_folder)
p = subprocess.Popen(['stp'], stdin=subprocess.PIPE)
s = 'MSEED \n'
for i in range(len(sta_list)):
sta = sta_list[i]
net_name = sta[0]
sta_name = sta[1]
chan_name = sta[2]
out_sta_file = glob.glob(os.path.join(out_folder_old, '*%s.%s.%s*' %
(net_name, sta_name, chan_name)))
if len(out_sta_file) == 0:
s += 'WIN {} {} {} {}/{}/{},00:00:00 +1d \n'.format(net_name,
sta_name, chan_name, year, month, day)
s += 'quit \n'
p.communicate(s.encode())
out_files = glob.glob('%s%s%s*.*' % (year, month, day))
for out_file in out_files:
shutil.move(out_file, out_folder)
te = time.time()
with open('download.log', 'a') as f:
f.write('Using time: ' + str(te - tb) + '\n')
def stp_download_parallel(startday, endday, sta_file, out_path, cores=1):
"""
:param startday: obspy.core.utcdatetime.UTCDateTime
:param endday: obspy.core.utcdatetime.UTCDateTime
:param sta_file: Network,Station,Channel,Latitude,Longitude
:param out_path:
:param cores:
:return:
"""
if os.path.exists('download.log'):
os.remove('download.log')
with open('download.log', 'a') as f:
f.write('>>> ' + str(time.localtime(time.time())) + '\n')
set_folders(out_path, startday, endday)
sta_list = load_stations(sta_file)
pool = multiprocessing.Pool(processes=cores)
tasks = []
day = startday
while day <= endday:
print(day)
stp_run_download(sta_list, day, out_path)
day = day + 86400
'\n # chunksize is how many tasks will be processed by one processor\n rs = pool.starmap_async(stp_run_download, tasks, chunksize=1)\n # close() & join() is necessary\n # No more work\n pool.close()\n\n # simple progress bar\n while (True):\n remaining = rs._number_left\n print("finished:{0}/{1}".format(len(tasks) - remaining, len(tasks)),\n end=\'\r\') # \'\r\' means remove the last line\n if (rs.ready()):\n break\n time.sleep(0.5)\n\n # Wait for completion\n pool.join()\n '
return None
if __name__ == '__main__':
LOCAL_PATH = (
'/Users/yunnaidan/Project/Dynamic_Triggering/Workspace/Central_California'
)
REMOTE_PATH = '/home/yunnd/Workspace/Dynamic_triggering/Central_California'
if platform.system() == 'Darwin':
ROOT_PATH = LOCAL_PATH
if platform.system() == 'Linux':
ROOT_PATH = REMOTE_PATH
startday = UTCDateTime('2009-01-03')
endday = UTCDateTime('2009-01-05')
sta_file = os.path.join(ROOT_PATH,
'data/station_info/stations_CI_selected_for_download_BH.txt')
out_path = os.path.join(ROOT_PATH, 'data/time_series/raw_data/mseed')
data_center = 'SCEDC'
obspy_download_parallel(data_center, startday, endday, sta_file,
out_path, cores=1)
pass
<|reserved_special_token_1|>
"""
@version:
author:yunnaidan
@time: 2019/07/22
@file: download_mseed.py
@function:
"""
from obspy.clients.fdsn import Client
from obspy.core import UTCDateTime
import numpy as np
import obspy
import os
import re
import time
import glob
import shutil
import platform
import subprocess
import multiprocessing
def load_stations(filename):
with open(filename, 'r') as f:
sta_data = f.readlines()
sta_list = []
for l in range(1, len(sta_data)):
sta_info = sta_data[l]
net_name = re.split(',', sta_info)[0]
sta_name = re.split(',', sta_info)[1]
chan_name = re.split(',', sta_info)[2]
sta_list.append([net_name, sta_name, chan_name])
return sta_list
def set_folders(out_path, startday, endday):
day = startday
while day <= endday:
year_folder = str(day.year).zfill(4)
day_folder = str(day.year).zfill(
4) + str(day.month).zfill(2) + str(day.day).zfill(2)
out_folder = os.path.join(out_path, year_folder, day_folder)
if not os.path.exists(out_folder):
os.makedirs(out_folder)
day = day + 86400
return None
def obspy_download(
client,
net_name,
sta_name,
chn_name,
starttime,
endtime,
out_path,
time_thre=10):
year_folder = str(starttime.year)
day_folder = str(starttime.year).zfill(
4) + str(starttime.month).zfill(2) + str(starttime.day).zfill(2)
out_folder = os.path.join(out_path, year_folder, day_folder)
outfile = os.path.join(
out_folder, net_name + '.' + sta_name + '.' + chn_name + '.mseed')
# Incremental download
if not os.path.exists(outfile):
t = 0
flag = False
while flag == False and t < time_thre:
try:
client.get_waveforms(
network=net_name,
station=sta_name,
location='--',
channel=chn_name,
starttime=starttime,
endtime=endtime,
filename=outfile)
flag = True
except BaseException:
pass
time.sleep(0.5)
t += 1
if not flag:
with open('download.log', 'a') as f:
f.write('No data: ' + outfile + '\n')
return None
def obspy_download_parallel(
data_center,
startday,
endday,
sta_file,
out_path,
cores=1):
set_folders(out_path, startday, endday)
sta_list = load_stations(sta_file)
with open('download.log', 'a') as f:
f.write('>>> ' + str(time.localtime(time.time())) + '\n')
f.write('The number of stations is: ' + str(len(sta_list)) + '\n')
day = startday
while day <= endday:
t_b = time.time()
with open('download.log', 'a') as f:
f.write('Day: ' + str(day) + '\n')
print(day)
starttime = day
endtime = day + 86400
client = Client(data_center)
if cores == 1:
for i in range(len(sta_list)):
sta = sta_list[i]
print (sta)
net_name = sta[0]
sta_name = sta[1]
chan_name = sta[2]
obspy_download(
client,
net_name,
sta_name,
chan_name,
starttime,
endtime,
out_path)
else:
pass
t_e = time.time()
with open('download.log', 'a') as f:
f.write('Using time: ' + str(t_e - t_b) + '\n')
day = day + 86400
return None
def stp_run_download(sta_list, download_date, out_path):
with open('download.log', 'a') as f:
f.write(str(download_date) + '\n')
tb = time.time()
year = str(download_date.year).zfill(4)
month = str(download_date.month).zfill(2)
day = str(download_date.day).zfill(2)
day_folder = year + month + day
out_folder = os.path.join(out_path, year, day_folder)
out_folder_old = os.path.join(out_path + '_old', year, day_folder)
p = subprocess.Popen(['stp'], stdin=subprocess.PIPE)
s = "MSEED \n"
for i in range(len(sta_list)):
sta = sta_list[i]
net_name = sta[0]
sta_name = sta[1]
chan_name = sta[2]
out_sta_file = glob.glob(
os.path.join(
out_folder_old, '*%s.%s.%s*' %
(net_name, sta_name, chan_name)))
if len(out_sta_file) == 0:
s += "WIN {} {} {} {}/{}/{},00:00:00 +1d \n".format(
net_name, sta_name, chan_name, year, month, day)
s += "quit \n"
p.communicate(s.encode())
out_files = glob.glob('%s%s%s*.*' % (year, month, day))
for out_file in out_files:
shutil.move(out_file, out_folder)
te = time.time()
with open('download.log', 'a') as f:
f.write('Using time: ' + str(te - tb) + '\n')
def stp_download_parallel(startday, endday, sta_file, out_path, cores=1):
'''
:param startday: obspy.core.utcdatetime.UTCDateTime
:param endday: obspy.core.utcdatetime.UTCDateTime
:param sta_file: Network,Station,Channel,Latitude,Longitude
:param out_path:
:param cores:
:return:
'''
if os.path.exists('download.log'):
os.remove('download.log')
with open('download.log', 'a') as f:
f.write('>>> ' + str(time.localtime(time.time())) + '\n')
set_folders(out_path, startday, endday)
sta_list = load_stations(sta_file)
pool = multiprocessing.Pool(processes=cores)
tasks = []
day = startday
while day <= endday:
print(day)
# tasks.append((sta_list, day, out_path))
stp_run_download(sta_list, day, out_path)
day = day + 86400
'''
# chunksize is how many tasks will be processed by one processor
rs = pool.starmap_async(stp_run_download, tasks, chunksize=1)
# close() & join() is necessary
# No more work
pool.close()
# simple progress bar
while (True):
remaining = rs._number_left
print("finished:{0}/{1}".format(len(tasks) - remaining, len(tasks)),
end='\r') # '\r' means remove the last line
if (rs.ready()):
break
time.sleep(0.5)
# Wait for completion
pool.join()
'''
return None
if __name__ == '__main__':
LOCAL_PATH = '/Users/yunnaidan/Project/Dynamic_Triggering/Workspace/Central_California'
REMOTE_PATH = '/home/yunnd/Workspace/Dynamic_triggering/Central_California'
if platform.system() == 'Darwin':
ROOT_PATH = LOCAL_PATH
if platform.system() == 'Linux':
ROOT_PATH = REMOTE_PATH
startday = UTCDateTime('2009-01-03')
endday = UTCDateTime('2009-01-05')
sta_file = os.path.join(
ROOT_PATH,
'data/station_info/stations_CI_selected_for_download_BH.txt')
out_path = os.path.join(ROOT_PATH, 'data/time_series/raw_data/mseed')
data_center = 'SCEDC'
obspy_download_parallel(
data_center,
startday,
endday,
sta_file,
out_path,
cores=1)
# stp_download_parallel(startday, endday, sta_file, out_path, cores=15)
pass
|
flexible
|
{
"blob_id": "34db3c9998e1d7647dd954e82e18147504cc74fc",
"index": 6736,
"step-1": "<mask token>\n\n\ndef load_stations(filename):\n with open(filename, 'r') as f:\n sta_data = f.readlines()\n sta_list = []\n for l in range(1, len(sta_data)):\n sta_info = sta_data[l]\n net_name = re.split(',', sta_info)[0]\n sta_name = re.split(',', sta_info)[1]\n chan_name = re.split(',', sta_info)[2]\n sta_list.append([net_name, sta_name, chan_name])\n return sta_list\n\n\n<mask token>\n\n\ndef obspy_download_parallel(data_center, startday, endday, sta_file,\n out_path, cores=1):\n set_folders(out_path, startday, endday)\n sta_list = load_stations(sta_file)\n with open('download.log', 'a') as f:\n f.write('>>> ' + str(time.localtime(time.time())) + '\\n')\n f.write('The number of stations is: ' + str(len(sta_list)) + '\\n')\n day = startday\n while day <= endday:\n t_b = time.time()\n with open('download.log', 'a') as f:\n f.write('Day: ' + str(day) + '\\n')\n print(day)\n starttime = day\n endtime = day + 86400\n client = Client(data_center)\n if cores == 1:\n for i in range(len(sta_list)):\n sta = sta_list[i]\n print(sta)\n net_name = sta[0]\n sta_name = sta[1]\n chan_name = sta[2]\n obspy_download(client, net_name, sta_name, chan_name,\n starttime, endtime, out_path)\n else:\n pass\n t_e = time.time()\n with open('download.log', 'a') as f:\n f.write('Using time: ' + str(t_e - t_b) + '\\n')\n day = day + 86400\n return None\n\n\n<mask token>\n\n\ndef stp_download_parallel(startday, endday, sta_file, out_path, cores=1):\n \"\"\"\n\n :param startday: obspy.core.utcdatetime.UTCDateTime\n :param endday: obspy.core.utcdatetime.UTCDateTime\n :param sta_file: Network,Station,Channel,Latitude,Longitude\n :param out_path:\n :param cores:\n :return:\n \"\"\"\n if os.path.exists('download.log'):\n os.remove('download.log')\n with open('download.log', 'a') as f:\n f.write('>>> ' + str(time.localtime(time.time())) + '\\n')\n set_folders(out_path, startday, endday)\n sta_list = load_stations(sta_file)\n pool = multiprocessing.Pool(processes=cores)\n tasks = []\n day = startday\n while day <= endday:\n print(day)\n stp_run_download(sta_list, day, out_path)\n day = day + 86400\n '\\n # chunksize is how many tasks will be processed by one processor\\n rs = pool.starmap_async(stp_run_download, tasks, chunksize=1)\\n # close() & join() is necessary\\n # No more work\\n pool.close()\\n\\n # simple progress bar\\n while (True):\\n remaining = rs._number_left\\n print(\"finished:{0}/{1}\".format(len(tasks) - remaining, len(tasks)),\\n end=\\'\\r\\') # \\'\\r\\' means remove the last line\\n if (rs.ready()):\\n break\\n time.sleep(0.5)\\n\\n # Wait for completion\\n pool.join()\\n '\n return None\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef load_stations(filename):\n with open(filename, 'r') as f:\n sta_data = f.readlines()\n sta_list = []\n for l in range(1, len(sta_data)):\n sta_info = sta_data[l]\n net_name = re.split(',', sta_info)[0]\n sta_name = re.split(',', sta_info)[1]\n chan_name = re.split(',', sta_info)[2]\n sta_list.append([net_name, sta_name, chan_name])\n return sta_list\n\n\ndef set_folders(out_path, startday, endday):\n day = startday\n while day <= endday:\n year_folder = str(day.year).zfill(4)\n day_folder = str(day.year).zfill(4) + str(day.month).zfill(2) + str(day\n .day).zfill(2)\n out_folder = os.path.join(out_path, year_folder, day_folder)\n if not os.path.exists(out_folder):\n os.makedirs(out_folder)\n day = day + 86400\n return None\n\n\n<mask token>\n\n\ndef obspy_download_parallel(data_center, startday, endday, sta_file,\n out_path, cores=1):\n set_folders(out_path, startday, endday)\n sta_list = load_stations(sta_file)\n with open('download.log', 'a') as f:\n f.write('>>> ' + str(time.localtime(time.time())) + '\\n')\n f.write('The number of stations is: ' + str(len(sta_list)) + '\\n')\n day = startday\n while day <= endday:\n t_b = time.time()\n with open('download.log', 'a') as f:\n f.write('Day: ' + str(day) + '\\n')\n print(day)\n starttime = day\n endtime = day + 86400\n client = Client(data_center)\n if cores == 1:\n for i in range(len(sta_list)):\n sta = sta_list[i]\n print(sta)\n net_name = sta[0]\n sta_name = sta[1]\n chan_name = sta[2]\n obspy_download(client, net_name, sta_name, chan_name,\n starttime, endtime, out_path)\n else:\n pass\n t_e = time.time()\n with open('download.log', 'a') as f:\n f.write('Using time: ' + str(t_e - t_b) + '\\n')\n day = day + 86400\n return None\n\n\ndef stp_run_download(sta_list, download_date, out_path):\n with open('download.log', 'a') as f:\n f.write(str(download_date) + '\\n')\n tb = time.time()\n year = str(download_date.year).zfill(4)\n month = str(download_date.month).zfill(2)\n day = str(download_date.day).zfill(2)\n day_folder = year + month + day\n out_folder = os.path.join(out_path, year, day_folder)\n out_folder_old = os.path.join(out_path + '_old', year, day_folder)\n p = subprocess.Popen(['stp'], stdin=subprocess.PIPE)\n s = 'MSEED \\n'\n for i in range(len(sta_list)):\n sta = sta_list[i]\n net_name = sta[0]\n sta_name = sta[1]\n chan_name = sta[2]\n out_sta_file = glob.glob(os.path.join(out_folder_old, '*%s.%s.%s*' %\n (net_name, sta_name, chan_name)))\n if len(out_sta_file) == 0:\n s += 'WIN {} {} {} {}/{}/{},00:00:00 +1d \\n'.format(net_name,\n sta_name, chan_name, year, month, day)\n s += 'quit \\n'\n p.communicate(s.encode())\n out_files = glob.glob('%s%s%s*.*' % (year, month, day))\n for out_file in out_files:\n shutil.move(out_file, out_folder)\n te = time.time()\n with open('download.log', 'a') as f:\n f.write('Using time: ' + str(te - tb) + '\\n')\n\n\ndef stp_download_parallel(startday, endday, sta_file, out_path, cores=1):\n \"\"\"\n\n :param startday: obspy.core.utcdatetime.UTCDateTime\n :param endday: obspy.core.utcdatetime.UTCDateTime\n :param sta_file: Network,Station,Channel,Latitude,Longitude\n :param out_path:\n :param cores:\n :return:\n \"\"\"\n if os.path.exists('download.log'):\n os.remove('download.log')\n with open('download.log', 'a') as f:\n f.write('>>> ' + str(time.localtime(time.time())) + '\\n')\n set_folders(out_path, startday, endday)\n sta_list = load_stations(sta_file)\n pool = multiprocessing.Pool(processes=cores)\n tasks = []\n day = startday\n while day <= endday:\n print(day)\n stp_run_download(sta_list, day, out_path)\n day = day + 86400\n '\\n # chunksize is how many tasks will be processed by one processor\\n rs = pool.starmap_async(stp_run_download, tasks, chunksize=1)\\n # close() & join() is necessary\\n # No more work\\n pool.close()\\n\\n # simple progress bar\\n while (True):\\n remaining = rs._number_left\\n print(\"finished:{0}/{1}\".format(len(tasks) - remaining, len(tasks)),\\n end=\\'\\r\\') # \\'\\r\\' means remove the last line\\n if (rs.ready()):\\n break\\n time.sleep(0.5)\\n\\n # Wait for completion\\n pool.join()\\n '\n return None\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef load_stations(filename):\n with open(filename, 'r') as f:\n sta_data = f.readlines()\n sta_list = []\n for l in range(1, len(sta_data)):\n sta_info = sta_data[l]\n net_name = re.split(',', sta_info)[0]\n sta_name = re.split(',', sta_info)[1]\n chan_name = re.split(',', sta_info)[2]\n sta_list.append([net_name, sta_name, chan_name])\n return sta_list\n\n\ndef set_folders(out_path, startday, endday):\n day = startday\n while day <= endday:\n year_folder = str(day.year).zfill(4)\n day_folder = str(day.year).zfill(4) + str(day.month).zfill(2) + str(day\n .day).zfill(2)\n out_folder = os.path.join(out_path, year_folder, day_folder)\n if not os.path.exists(out_folder):\n os.makedirs(out_folder)\n day = day + 86400\n return None\n\n\ndef obspy_download(client, net_name, sta_name, chn_name, starttime, endtime,\n out_path, time_thre=10):\n year_folder = str(starttime.year)\n day_folder = str(starttime.year).zfill(4) + str(starttime.month).zfill(2\n ) + str(starttime.day).zfill(2)\n out_folder = os.path.join(out_path, year_folder, day_folder)\n outfile = os.path.join(out_folder, net_name + '.' + sta_name + '.' +\n chn_name + '.mseed')\n if not os.path.exists(outfile):\n t = 0\n flag = False\n while flag == False and t < time_thre:\n try:\n client.get_waveforms(network=net_name, station=sta_name,\n location='--', channel=chn_name, starttime=starttime,\n endtime=endtime, filename=outfile)\n flag = True\n except BaseException:\n pass\n time.sleep(0.5)\n t += 1\n if not flag:\n with open('download.log', 'a') as f:\n f.write('No data: ' + outfile + '\\n')\n return None\n\n\ndef obspy_download_parallel(data_center, startday, endday, sta_file,\n out_path, cores=1):\n set_folders(out_path, startday, endday)\n sta_list = load_stations(sta_file)\n with open('download.log', 'a') as f:\n f.write('>>> ' + str(time.localtime(time.time())) + '\\n')\n f.write('The number of stations is: ' + str(len(sta_list)) + '\\n')\n day = startday\n while day <= endday:\n t_b = time.time()\n with open('download.log', 'a') as f:\n f.write('Day: ' + str(day) + '\\n')\n print(day)\n starttime = day\n endtime = day + 86400\n client = Client(data_center)\n if cores == 1:\n for i in range(len(sta_list)):\n sta = sta_list[i]\n print(sta)\n net_name = sta[0]\n sta_name = sta[1]\n chan_name = sta[2]\n obspy_download(client, net_name, sta_name, chan_name,\n starttime, endtime, out_path)\n else:\n pass\n t_e = time.time()\n with open('download.log', 'a') as f:\n f.write('Using time: ' + str(t_e - t_b) + '\\n')\n day = day + 86400\n return None\n\n\ndef stp_run_download(sta_list, download_date, out_path):\n with open('download.log', 'a') as f:\n f.write(str(download_date) + '\\n')\n tb = time.time()\n year = str(download_date.year).zfill(4)\n month = str(download_date.month).zfill(2)\n day = str(download_date.day).zfill(2)\n day_folder = year + month + day\n out_folder = os.path.join(out_path, year, day_folder)\n out_folder_old = os.path.join(out_path + '_old', year, day_folder)\n p = subprocess.Popen(['stp'], stdin=subprocess.PIPE)\n s = 'MSEED \\n'\n for i in range(len(sta_list)):\n sta = sta_list[i]\n net_name = sta[0]\n sta_name = sta[1]\n chan_name = sta[2]\n out_sta_file = glob.glob(os.path.join(out_folder_old, '*%s.%s.%s*' %\n (net_name, sta_name, chan_name)))\n if len(out_sta_file) == 0:\n s += 'WIN {} {} {} {}/{}/{},00:00:00 +1d \\n'.format(net_name,\n sta_name, chan_name, year, month, day)\n s += 'quit \\n'\n p.communicate(s.encode())\n out_files = glob.glob('%s%s%s*.*' % (year, month, day))\n for out_file in out_files:\n shutil.move(out_file, out_folder)\n te = time.time()\n with open('download.log', 'a') as f:\n f.write('Using time: ' + str(te - tb) + '\\n')\n\n\ndef stp_download_parallel(startday, endday, sta_file, out_path, cores=1):\n \"\"\"\n\n :param startday: obspy.core.utcdatetime.UTCDateTime\n :param endday: obspy.core.utcdatetime.UTCDateTime\n :param sta_file: Network,Station,Channel,Latitude,Longitude\n :param out_path:\n :param cores:\n :return:\n \"\"\"\n if os.path.exists('download.log'):\n os.remove('download.log')\n with open('download.log', 'a') as f:\n f.write('>>> ' + str(time.localtime(time.time())) + '\\n')\n set_folders(out_path, startday, endday)\n sta_list = load_stations(sta_file)\n pool = multiprocessing.Pool(processes=cores)\n tasks = []\n day = startday\n while day <= endday:\n print(day)\n stp_run_download(sta_list, day, out_path)\n day = day + 86400\n '\\n # chunksize is how many tasks will be processed by one processor\\n rs = pool.starmap_async(stp_run_download, tasks, chunksize=1)\\n # close() & join() is necessary\\n # No more work\\n pool.close()\\n\\n # simple progress bar\\n while (True):\\n remaining = rs._number_left\\n print(\"finished:{0}/{1}\".format(len(tasks) - remaining, len(tasks)),\\n end=\\'\\r\\') # \\'\\r\\' means remove the last line\\n if (rs.ready()):\\n break\\n time.sleep(0.5)\\n\\n # Wait for completion\\n pool.join()\\n '\n return None\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef load_stations(filename):\n with open(filename, 'r') as f:\n sta_data = f.readlines()\n sta_list = []\n for l in range(1, len(sta_data)):\n sta_info = sta_data[l]\n net_name = re.split(',', sta_info)[0]\n sta_name = re.split(',', sta_info)[1]\n chan_name = re.split(',', sta_info)[2]\n sta_list.append([net_name, sta_name, chan_name])\n return sta_list\n\n\ndef set_folders(out_path, startday, endday):\n day = startday\n while day <= endday:\n year_folder = str(day.year).zfill(4)\n day_folder = str(day.year).zfill(4) + str(day.month).zfill(2) + str(day\n .day).zfill(2)\n out_folder = os.path.join(out_path, year_folder, day_folder)\n if not os.path.exists(out_folder):\n os.makedirs(out_folder)\n day = day + 86400\n return None\n\n\ndef obspy_download(client, net_name, sta_name, chn_name, starttime, endtime,\n out_path, time_thre=10):\n year_folder = str(starttime.year)\n day_folder = str(starttime.year).zfill(4) + str(starttime.month).zfill(2\n ) + str(starttime.day).zfill(2)\n out_folder = os.path.join(out_path, year_folder, day_folder)\n outfile = os.path.join(out_folder, net_name + '.' + sta_name + '.' +\n chn_name + '.mseed')\n if not os.path.exists(outfile):\n t = 0\n flag = False\n while flag == False and t < time_thre:\n try:\n client.get_waveforms(network=net_name, station=sta_name,\n location='--', channel=chn_name, starttime=starttime,\n endtime=endtime, filename=outfile)\n flag = True\n except BaseException:\n pass\n time.sleep(0.5)\n t += 1\n if not flag:\n with open('download.log', 'a') as f:\n f.write('No data: ' + outfile + '\\n')\n return None\n\n\ndef obspy_download_parallel(data_center, startday, endday, sta_file,\n out_path, cores=1):\n set_folders(out_path, startday, endday)\n sta_list = load_stations(sta_file)\n with open('download.log', 'a') as f:\n f.write('>>> ' + str(time.localtime(time.time())) + '\\n')\n f.write('The number of stations is: ' + str(len(sta_list)) + '\\n')\n day = startday\n while day <= endday:\n t_b = time.time()\n with open('download.log', 'a') as f:\n f.write('Day: ' + str(day) + '\\n')\n print(day)\n starttime = day\n endtime = day + 86400\n client = Client(data_center)\n if cores == 1:\n for i in range(len(sta_list)):\n sta = sta_list[i]\n print(sta)\n net_name = sta[0]\n sta_name = sta[1]\n chan_name = sta[2]\n obspy_download(client, net_name, sta_name, chan_name,\n starttime, endtime, out_path)\n else:\n pass\n t_e = time.time()\n with open('download.log', 'a') as f:\n f.write('Using time: ' + str(t_e - t_b) + '\\n')\n day = day + 86400\n return None\n\n\ndef stp_run_download(sta_list, download_date, out_path):\n with open('download.log', 'a') as f:\n f.write(str(download_date) + '\\n')\n tb = time.time()\n year = str(download_date.year).zfill(4)\n month = str(download_date.month).zfill(2)\n day = str(download_date.day).zfill(2)\n day_folder = year + month + day\n out_folder = os.path.join(out_path, year, day_folder)\n out_folder_old = os.path.join(out_path + '_old', year, day_folder)\n p = subprocess.Popen(['stp'], stdin=subprocess.PIPE)\n s = 'MSEED \\n'\n for i in range(len(sta_list)):\n sta = sta_list[i]\n net_name = sta[0]\n sta_name = sta[1]\n chan_name = sta[2]\n out_sta_file = glob.glob(os.path.join(out_folder_old, '*%s.%s.%s*' %\n (net_name, sta_name, chan_name)))\n if len(out_sta_file) == 0:\n s += 'WIN {} {} {} {}/{}/{},00:00:00 +1d \\n'.format(net_name,\n sta_name, chan_name, year, month, day)\n s += 'quit \\n'\n p.communicate(s.encode())\n out_files = glob.glob('%s%s%s*.*' % (year, month, day))\n for out_file in out_files:\n shutil.move(out_file, out_folder)\n te = time.time()\n with open('download.log', 'a') as f:\n f.write('Using time: ' + str(te - tb) + '\\n')\n\n\ndef stp_download_parallel(startday, endday, sta_file, out_path, cores=1):\n \"\"\"\n\n :param startday: obspy.core.utcdatetime.UTCDateTime\n :param endday: obspy.core.utcdatetime.UTCDateTime\n :param sta_file: Network,Station,Channel,Latitude,Longitude\n :param out_path:\n :param cores:\n :return:\n \"\"\"\n if os.path.exists('download.log'):\n os.remove('download.log')\n with open('download.log', 'a') as f:\n f.write('>>> ' + str(time.localtime(time.time())) + '\\n')\n set_folders(out_path, startday, endday)\n sta_list = load_stations(sta_file)\n pool = multiprocessing.Pool(processes=cores)\n tasks = []\n day = startday\n while day <= endday:\n print(day)\n stp_run_download(sta_list, day, out_path)\n day = day + 86400\n '\\n # chunksize is how many tasks will be processed by one processor\\n rs = pool.starmap_async(stp_run_download, tasks, chunksize=1)\\n # close() & join() is necessary\\n # No more work\\n pool.close()\\n\\n # simple progress bar\\n while (True):\\n remaining = rs._number_left\\n print(\"finished:{0}/{1}\".format(len(tasks) - remaining, len(tasks)),\\n end=\\'\\r\\') # \\'\\r\\' means remove the last line\\n if (rs.ready()):\\n break\\n time.sleep(0.5)\\n\\n # Wait for completion\\n pool.join()\\n '\n return None\n\n\nif __name__ == '__main__':\n LOCAL_PATH = (\n '/Users/yunnaidan/Project/Dynamic_Triggering/Workspace/Central_California'\n )\n REMOTE_PATH = '/home/yunnd/Workspace/Dynamic_triggering/Central_California'\n if platform.system() == 'Darwin':\n ROOT_PATH = LOCAL_PATH\n if platform.system() == 'Linux':\n ROOT_PATH = REMOTE_PATH\n startday = UTCDateTime('2009-01-03')\n endday = UTCDateTime('2009-01-05')\n sta_file = os.path.join(ROOT_PATH,\n 'data/station_info/stations_CI_selected_for_download_BH.txt')\n out_path = os.path.join(ROOT_PATH, 'data/time_series/raw_data/mseed')\n data_center = 'SCEDC'\n obspy_download_parallel(data_center, startday, endday, sta_file,\n out_path, cores=1)\n pass\n",
"step-5": "\"\"\"\n@version:\nauthor:yunnaidan\n@time: 2019/07/22\n@file: download_mseed.py\n@function:\n\"\"\"\nfrom obspy.clients.fdsn import Client\nfrom obspy.core import UTCDateTime\nimport numpy as np\nimport obspy\nimport os\nimport re\nimport time\nimport glob\nimport shutil\nimport platform\nimport subprocess\nimport multiprocessing\n\n\ndef load_stations(filename):\n with open(filename, 'r') as f:\n sta_data = f.readlines()\n sta_list = []\n for l in range(1, len(sta_data)):\n sta_info = sta_data[l]\n net_name = re.split(',', sta_info)[0]\n sta_name = re.split(',', sta_info)[1]\n chan_name = re.split(',', sta_info)[2]\n sta_list.append([net_name, sta_name, chan_name])\n\n return sta_list\n\n\ndef set_folders(out_path, startday, endday):\n day = startday\n while day <= endday:\n year_folder = str(day.year).zfill(4)\n day_folder = str(day.year).zfill(\n 4) + str(day.month).zfill(2) + str(day.day).zfill(2)\n out_folder = os.path.join(out_path, year_folder, day_folder)\n if not os.path.exists(out_folder):\n os.makedirs(out_folder)\n\n day = day + 86400\n\n return None\n\n\ndef obspy_download(\n client,\n net_name,\n sta_name,\n chn_name,\n starttime,\n endtime,\n out_path,\n time_thre=10):\n year_folder = str(starttime.year)\n day_folder = str(starttime.year).zfill(\n 4) + str(starttime.month).zfill(2) + str(starttime.day).zfill(2)\n out_folder = os.path.join(out_path, year_folder, day_folder)\n\n outfile = os.path.join(\n out_folder, net_name + '.' + sta_name + '.' + chn_name + '.mseed')\n # Incremental download\n if not os.path.exists(outfile):\n t = 0\n flag = False\n while flag == False and t < time_thre:\n try:\n client.get_waveforms(\n network=net_name,\n station=sta_name,\n location='--',\n channel=chn_name,\n starttime=starttime,\n endtime=endtime,\n filename=outfile)\n flag = True\n except BaseException:\n pass\n time.sleep(0.5)\n t += 1\n\n if not flag:\n with open('download.log', 'a') as f:\n f.write('No data: ' + outfile + '\\n')\n\n return None\n\n\ndef obspy_download_parallel(\n data_center,\n startday,\n endday,\n sta_file,\n out_path,\n cores=1):\n\n set_folders(out_path, startday, endday)\n sta_list = load_stations(sta_file)\n\n with open('download.log', 'a') as f:\n f.write('>>> ' + str(time.localtime(time.time())) + '\\n')\n f.write('The number of stations is: ' + str(len(sta_list)) + '\\n')\n\n day = startday\n while day <= endday:\n t_b = time.time()\n with open('download.log', 'a') as f:\n f.write('Day: ' + str(day) + '\\n')\n print(day)\n starttime = day\n endtime = day + 86400\n\n client = Client(data_center)\n\n if cores == 1:\n for i in range(len(sta_list)):\n sta = sta_list[i]\n print (sta)\n net_name = sta[0]\n sta_name = sta[1]\n chan_name = sta[2]\n obspy_download(\n client,\n net_name,\n sta_name,\n chan_name,\n starttime,\n endtime,\n out_path)\n else:\n pass\n\n t_e = time.time()\n with open('download.log', 'a') as f:\n f.write('Using time: ' + str(t_e - t_b) + '\\n')\n day = day + 86400\n\n return None\n\n\ndef stp_run_download(sta_list, download_date, out_path):\n with open('download.log', 'a') as f:\n f.write(str(download_date) + '\\n')\n\n tb = time.time()\n year = str(download_date.year).zfill(4)\n month = str(download_date.month).zfill(2)\n day = str(download_date.day).zfill(2)\n day_folder = year + month + day\n out_folder = os.path.join(out_path, year, day_folder)\n\n out_folder_old = os.path.join(out_path + '_old', year, day_folder)\n\n p = subprocess.Popen(['stp'], stdin=subprocess.PIPE)\n s = \"MSEED \\n\"\n\n for i in range(len(sta_list)):\n\n sta = sta_list[i]\n net_name = sta[0]\n sta_name = sta[1]\n chan_name = sta[2]\n\n out_sta_file = glob.glob(\n os.path.join(\n out_folder_old, '*%s.%s.%s*' %\n (net_name, sta_name, chan_name)))\n\n if len(out_sta_file) == 0:\n s += \"WIN {} {} {} {}/{}/{},00:00:00 +1d \\n\".format(\n net_name, sta_name, chan_name, year, month, day)\n\n s += \"quit \\n\"\n p.communicate(s.encode())\n\n out_files = glob.glob('%s%s%s*.*' % (year, month, day))\n for out_file in out_files:\n shutil.move(out_file, out_folder)\n\n te = time.time()\n with open('download.log', 'a') as f:\n f.write('Using time: ' + str(te - tb) + '\\n')\n\n\ndef stp_download_parallel(startday, endday, sta_file, out_path, cores=1):\n '''\n\n :param startday: obspy.core.utcdatetime.UTCDateTime\n :param endday: obspy.core.utcdatetime.UTCDateTime\n :param sta_file: Network,Station,Channel,Latitude,Longitude\n :param out_path:\n :param cores:\n :return:\n '''\n if os.path.exists('download.log'):\n os.remove('download.log')\n with open('download.log', 'a') as f:\n f.write('>>> ' + str(time.localtime(time.time())) + '\\n')\n\n set_folders(out_path, startday, endday)\n sta_list = load_stations(sta_file)\n\n pool = multiprocessing.Pool(processes=cores)\n tasks = []\n\n day = startday\n while day <= endday:\n print(day)\n # tasks.append((sta_list, day, out_path))\n stp_run_download(sta_list, day, out_path)\n day = day + 86400\n\n '''\n # chunksize is how many tasks will be processed by one processor\n rs = pool.starmap_async(stp_run_download, tasks, chunksize=1)\n # close() & join() is necessary\n # No more work\n pool.close()\n\n # simple progress bar\n while (True):\n remaining = rs._number_left\n print(\"finished:{0}/{1}\".format(len(tasks) - remaining, len(tasks)),\n end='\\r') # '\\r' means remove the last line\n if (rs.ready()):\n break\n time.sleep(0.5)\n\n # Wait for completion\n pool.join()\n '''\n\n return None\n\n\nif __name__ == '__main__':\n LOCAL_PATH = '/Users/yunnaidan/Project/Dynamic_Triggering/Workspace/Central_California'\n REMOTE_PATH = '/home/yunnd/Workspace/Dynamic_triggering/Central_California'\n if platform.system() == 'Darwin':\n ROOT_PATH = LOCAL_PATH\n if platform.system() == 'Linux':\n ROOT_PATH = REMOTE_PATH\n\n startday = UTCDateTime('2009-01-03')\n endday = UTCDateTime('2009-01-05')\n\n sta_file = os.path.join(\n ROOT_PATH,\n 'data/station_info/stations_CI_selected_for_download_BH.txt')\n\n out_path = os.path.join(ROOT_PATH, 'data/time_series/raw_data/mseed')\n data_center = 'SCEDC'\n obspy_download_parallel(\n data_center,\n startday,\n endday,\n sta_file,\n out_path,\n cores=1)\n # stp_download_parallel(startday, endday, sta_file, out_path, cores=15)\n\n pass\n",
"step-ids": [
3,
5,
6,
7,
9
]
}
|
[
3,
5,
6,
7,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main(args):
init_logging()
records = []
def handler(stats):
records.append(stats)
data_dir = args.data_dir or os.environ.get('HAIL_BENCHMARK_DIR'
) or '/tmp/hail_benchmark_data'
profiler_path = os.environ.get('ASYNC_PROFILER_HOME')
if args.profile and profiler_path is None:
raise KeyError(
'In order to use --profile, you must download async-profiler and set `ASYNC_PROFILER_HOME`'
)
config = RunConfig(args.n_iter, handler, noisy=not args.quiet, timeout=
args.timeout, dry_run=args.dry_run, data_dir=data_dir, cores=args.
cores, verbose=args.verbose, log=args.log, profiler_path=
profiler_path, profile=args.profile, prof_fmt=args.prof_fmt)
if args.tests:
run_list(args.tests.split(','), config)
if args.pattern:
run_pattern(args.pattern, config)
if not args.pattern and not args.tests:
run_all(config)
if args.dry_run:
return
data = {'config': {'cores': args.cores, 'version': hl.__version__,
'timestamp': str(datetime.datetime.now()), 'system': sys.platform},
'benchmarks': records}
if args.output:
with open(args.output, 'w') as out:
json.dump(data, out)
else:
print(json.dumps(data))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main(args):
init_logging()
records = []
def handler(stats):
records.append(stats)
data_dir = args.data_dir or os.environ.get('HAIL_BENCHMARK_DIR'
) or '/tmp/hail_benchmark_data'
profiler_path = os.environ.get('ASYNC_PROFILER_HOME')
if args.profile and profiler_path is None:
raise KeyError(
'In order to use --profile, you must download async-profiler and set `ASYNC_PROFILER_HOME`'
)
config = RunConfig(args.n_iter, handler, noisy=not args.quiet, timeout=
args.timeout, dry_run=args.dry_run, data_dir=data_dir, cores=args.
cores, verbose=args.verbose, log=args.log, profiler_path=
profiler_path, profile=args.profile, prof_fmt=args.prof_fmt)
if args.tests:
run_list(args.tests.split(','), config)
if args.pattern:
run_pattern(args.pattern, config)
if not args.pattern and not args.tests:
run_all(config)
if args.dry_run:
return
data = {'config': {'cores': args.cores, 'version': hl.__version__,
'timestamp': str(datetime.datetime.now()), 'system': sys.platform},
'benchmarks': records}
if args.output:
with open(args.output, 'w') as out:
json.dump(data, out)
else:
print(json.dumps(data))
def register_main(subparser) ->'None':
parser = subparser.add_parser('run', help=
'Run Hail benchmarks locally.', description=
'Run Hail benchmarks locally.')
parser.add_argument('--tests', '-t', type=str, required=False, help=
'Run specific comma-delimited tests instead of running all tests.')
parser.add_argument('--cores', '-c', type=int, default=1, help=
'Number of cores to use.')
parser.add_argument('--pattern', '-k', type=str, required=False, help=
'Run all tests that substring match the pattern')
parser.add_argument('--n-iter', '-n', type=int, default=3, help=
'Number of iterations for each test.')
parser.add_argument('--log', '-l', type=str, help='Log file path')
parser.add_argument('--quiet', '-q', action='store_true', help=
'Do not print testing information to stderr in real time.')
parser.add_argument('--verbose', '-v', action='store_true', help=
'Do not silence Hail logging to standard output.')
parser.add_argument('--output', '-o', type=str, help='Output file path.')
parser.add_argument('--data-dir', '-d', type=str, help='Data directory.')
parser.add_argument('--timeout', type=int, default=1800, help=
'Timeout in seconds after which benchmarks will be interrupted')
parser.add_argument('--dry-run', action='store_true', help=
'Print benchmarks to execute, but do not run.')
parser.add_argument('--profile', '-p', choices=['cpu', 'alloc',
'itimer'], nargs='?', const='cpu', help='Run with async-profiler.')
parser.add_argument('--prof-fmt', '-f', choices=['html', 'flame', 'jfr'
], default='html', help='Choose profiler output.')
parser.set_defaults(main=main)
<|reserved_special_token_1|>
import argparse
import datetime
import json
import os
import sys
import hail as hl
from .utils import run_all, run_pattern, run_list, RunConfig
from .. import init_logging
def main(args):
init_logging()
records = []
def handler(stats):
records.append(stats)
data_dir = args.data_dir or os.environ.get('HAIL_BENCHMARK_DIR'
) or '/tmp/hail_benchmark_data'
profiler_path = os.environ.get('ASYNC_PROFILER_HOME')
if args.profile and profiler_path is None:
raise KeyError(
'In order to use --profile, you must download async-profiler and set `ASYNC_PROFILER_HOME`'
)
config = RunConfig(args.n_iter, handler, noisy=not args.quiet, timeout=
args.timeout, dry_run=args.dry_run, data_dir=data_dir, cores=args.
cores, verbose=args.verbose, log=args.log, profiler_path=
profiler_path, profile=args.profile, prof_fmt=args.prof_fmt)
if args.tests:
run_list(args.tests.split(','), config)
if args.pattern:
run_pattern(args.pattern, config)
if not args.pattern and not args.tests:
run_all(config)
if args.dry_run:
return
data = {'config': {'cores': args.cores, 'version': hl.__version__,
'timestamp': str(datetime.datetime.now()), 'system': sys.platform},
'benchmarks': records}
if args.output:
with open(args.output, 'w') as out:
json.dump(data, out)
else:
print(json.dumps(data))
def register_main(subparser) ->'None':
parser = subparser.add_parser('run', help=
'Run Hail benchmarks locally.', description=
'Run Hail benchmarks locally.')
parser.add_argument('--tests', '-t', type=str, required=False, help=
'Run specific comma-delimited tests instead of running all tests.')
parser.add_argument('--cores', '-c', type=int, default=1, help=
'Number of cores to use.')
parser.add_argument('--pattern', '-k', type=str, required=False, help=
'Run all tests that substring match the pattern')
parser.add_argument('--n-iter', '-n', type=int, default=3, help=
'Number of iterations for each test.')
parser.add_argument('--log', '-l', type=str, help='Log file path')
parser.add_argument('--quiet', '-q', action='store_true', help=
'Do not print testing information to stderr in real time.')
parser.add_argument('--verbose', '-v', action='store_true', help=
'Do not silence Hail logging to standard output.')
parser.add_argument('--output', '-o', type=str, help='Output file path.')
parser.add_argument('--data-dir', '-d', type=str, help='Data directory.')
parser.add_argument('--timeout', type=int, default=1800, help=
'Timeout in seconds after which benchmarks will be interrupted')
parser.add_argument('--dry-run', action='store_true', help=
'Print benchmarks to execute, but do not run.')
parser.add_argument('--profile', '-p', choices=['cpu', 'alloc',
'itimer'], nargs='?', const='cpu', help='Run with async-profiler.')
parser.add_argument('--prof-fmt', '-f', choices=['html', 'flame', 'jfr'
], default='html', help='Choose profiler output.')
parser.set_defaults(main=main)
<|reserved_special_token_1|>
import argparse
import datetime
import json
import os
import sys
import hail as hl
from .utils import run_all, run_pattern, run_list, RunConfig
from .. import init_logging
def main(args):
init_logging()
records = []
def handler(stats):
records.append(stats)
data_dir = args.data_dir or os.environ.get('HAIL_BENCHMARK_DIR') or '/tmp/hail_benchmark_data'
profiler_path = os.environ.get('ASYNC_PROFILER_HOME')
if args.profile and profiler_path is None:
raise KeyError("In order to use --profile, you must download async-profiler and set `ASYNC_PROFILER_HOME`")
config = RunConfig(args.n_iter, handler, noisy=not args.quiet, timeout=args.timeout, dry_run=args.dry_run,
data_dir=data_dir, cores=args.cores, verbose=args.verbose, log=args.log,
profiler_path=profiler_path, profile=args.profile, prof_fmt=args.prof_fmt)
if args.tests:
run_list(args.tests.split(','), config)
if args.pattern:
run_pattern(args.pattern, config)
if not args.pattern and not args.tests:
run_all(config)
if args.dry_run:
return
data = {'config': {'cores': args.cores,
'version': hl.__version__,
'timestamp': str(datetime.datetime.now()),
'system': sys.platform},
'benchmarks': records}
if args.output:
with open(args.output, 'w') as out:
json.dump(data, out)
else:
print(json.dumps(data))
def register_main(subparser) -> 'None':
parser = subparser.add_parser(
'run',
help='Run Hail benchmarks locally.',
description='Run Hail benchmarks locally.'
)
parser.add_argument('--tests', '-t',
type=str,
required=False,
help='Run specific comma-delimited tests instead of running all tests.')
parser.add_argument('--cores', '-c',
type=int,
default=1,
help='Number of cores to use.')
parser.add_argument('--pattern', '-k', type=str, required=False,
help='Run all tests that substring match the pattern')
parser.add_argument("--n-iter", "-n",
type=int,
default=3,
help='Number of iterations for each test.')
parser.add_argument("--log", "-l",
type=str,
help='Log file path')
parser.add_argument("--quiet", "-q",
action="store_true",
help="Do not print testing information to stderr in real time.")
parser.add_argument("--verbose", "-v",
action="store_true",
help="Do not silence Hail logging to standard output.")
parser.add_argument("--output", "-o",
type=str,
help="Output file path.")
parser.add_argument("--data-dir", "-d",
type=str,
help="Data directory.")
parser.add_argument('--timeout',
type=int,
default=1800,
help="Timeout in seconds after which benchmarks will be interrupted")
parser.add_argument('--dry-run',
action='store_true',
help='Print benchmarks to execute, but do not run.')
parser.add_argument('--profile', '-p',
choices=['cpu', 'alloc', 'itimer'],
nargs='?', const='cpu',
help='Run with async-profiler.')
parser.add_argument('--prof-fmt', '-f',
choices=['html', 'flame', 'jfr'],
default='html',
help='Choose profiler output.')
parser.set_defaults(main=main)
|
flexible
|
{
"blob_id": "d4625dd743dd6648044e40b02743ae80f4caea36",
"index": 9572,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main(args):\n init_logging()\n records = []\n\n def handler(stats):\n records.append(stats)\n data_dir = args.data_dir or os.environ.get('HAIL_BENCHMARK_DIR'\n ) or '/tmp/hail_benchmark_data'\n profiler_path = os.environ.get('ASYNC_PROFILER_HOME')\n if args.profile and profiler_path is None:\n raise KeyError(\n 'In order to use --profile, you must download async-profiler and set `ASYNC_PROFILER_HOME`'\n )\n config = RunConfig(args.n_iter, handler, noisy=not args.quiet, timeout=\n args.timeout, dry_run=args.dry_run, data_dir=data_dir, cores=args.\n cores, verbose=args.verbose, log=args.log, profiler_path=\n profiler_path, profile=args.profile, prof_fmt=args.prof_fmt)\n if args.tests:\n run_list(args.tests.split(','), config)\n if args.pattern:\n run_pattern(args.pattern, config)\n if not args.pattern and not args.tests:\n run_all(config)\n if args.dry_run:\n return\n data = {'config': {'cores': args.cores, 'version': hl.__version__,\n 'timestamp': str(datetime.datetime.now()), 'system': sys.platform},\n 'benchmarks': records}\n if args.output:\n with open(args.output, 'w') as out:\n json.dump(data, out)\n else:\n print(json.dumps(data))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main(args):\n init_logging()\n records = []\n\n def handler(stats):\n records.append(stats)\n data_dir = args.data_dir or os.environ.get('HAIL_BENCHMARK_DIR'\n ) or '/tmp/hail_benchmark_data'\n profiler_path = os.environ.get('ASYNC_PROFILER_HOME')\n if args.profile and profiler_path is None:\n raise KeyError(\n 'In order to use --profile, you must download async-profiler and set `ASYNC_PROFILER_HOME`'\n )\n config = RunConfig(args.n_iter, handler, noisy=not args.quiet, timeout=\n args.timeout, dry_run=args.dry_run, data_dir=data_dir, cores=args.\n cores, verbose=args.verbose, log=args.log, profiler_path=\n profiler_path, profile=args.profile, prof_fmt=args.prof_fmt)\n if args.tests:\n run_list(args.tests.split(','), config)\n if args.pattern:\n run_pattern(args.pattern, config)\n if not args.pattern and not args.tests:\n run_all(config)\n if args.dry_run:\n return\n data = {'config': {'cores': args.cores, 'version': hl.__version__,\n 'timestamp': str(datetime.datetime.now()), 'system': sys.platform},\n 'benchmarks': records}\n if args.output:\n with open(args.output, 'w') as out:\n json.dump(data, out)\n else:\n print(json.dumps(data))\n\n\ndef register_main(subparser) ->'None':\n parser = subparser.add_parser('run', help=\n 'Run Hail benchmarks locally.', description=\n 'Run Hail benchmarks locally.')\n parser.add_argument('--tests', '-t', type=str, required=False, help=\n 'Run specific comma-delimited tests instead of running all tests.')\n parser.add_argument('--cores', '-c', type=int, default=1, help=\n 'Number of cores to use.')\n parser.add_argument('--pattern', '-k', type=str, required=False, help=\n 'Run all tests that substring match the pattern')\n parser.add_argument('--n-iter', '-n', type=int, default=3, help=\n 'Number of iterations for each test.')\n parser.add_argument('--log', '-l', type=str, help='Log file path')\n parser.add_argument('--quiet', '-q', action='store_true', help=\n 'Do not print testing information to stderr in real time.')\n parser.add_argument('--verbose', '-v', action='store_true', help=\n 'Do not silence Hail logging to standard output.')\n parser.add_argument('--output', '-o', type=str, help='Output file path.')\n parser.add_argument('--data-dir', '-d', type=str, help='Data directory.')\n parser.add_argument('--timeout', type=int, default=1800, help=\n 'Timeout in seconds after which benchmarks will be interrupted')\n parser.add_argument('--dry-run', action='store_true', help=\n 'Print benchmarks to execute, but do not run.')\n parser.add_argument('--profile', '-p', choices=['cpu', 'alloc',\n 'itimer'], nargs='?', const='cpu', help='Run with async-profiler.')\n parser.add_argument('--prof-fmt', '-f', choices=['html', 'flame', 'jfr'\n ], default='html', help='Choose profiler output.')\n parser.set_defaults(main=main)\n",
"step-4": "import argparse\nimport datetime\nimport json\nimport os\nimport sys\nimport hail as hl\nfrom .utils import run_all, run_pattern, run_list, RunConfig\nfrom .. import init_logging\n\n\ndef main(args):\n init_logging()\n records = []\n\n def handler(stats):\n records.append(stats)\n data_dir = args.data_dir or os.environ.get('HAIL_BENCHMARK_DIR'\n ) or '/tmp/hail_benchmark_data'\n profiler_path = os.environ.get('ASYNC_PROFILER_HOME')\n if args.profile and profiler_path is None:\n raise KeyError(\n 'In order to use --profile, you must download async-profiler and set `ASYNC_PROFILER_HOME`'\n )\n config = RunConfig(args.n_iter, handler, noisy=not args.quiet, timeout=\n args.timeout, dry_run=args.dry_run, data_dir=data_dir, cores=args.\n cores, verbose=args.verbose, log=args.log, profiler_path=\n profiler_path, profile=args.profile, prof_fmt=args.prof_fmt)\n if args.tests:\n run_list(args.tests.split(','), config)\n if args.pattern:\n run_pattern(args.pattern, config)\n if not args.pattern and not args.tests:\n run_all(config)\n if args.dry_run:\n return\n data = {'config': {'cores': args.cores, 'version': hl.__version__,\n 'timestamp': str(datetime.datetime.now()), 'system': sys.platform},\n 'benchmarks': records}\n if args.output:\n with open(args.output, 'w') as out:\n json.dump(data, out)\n else:\n print(json.dumps(data))\n\n\ndef register_main(subparser) ->'None':\n parser = subparser.add_parser('run', help=\n 'Run Hail benchmarks locally.', description=\n 'Run Hail benchmarks locally.')\n parser.add_argument('--tests', '-t', type=str, required=False, help=\n 'Run specific comma-delimited tests instead of running all tests.')\n parser.add_argument('--cores', '-c', type=int, default=1, help=\n 'Number of cores to use.')\n parser.add_argument('--pattern', '-k', type=str, required=False, help=\n 'Run all tests that substring match the pattern')\n parser.add_argument('--n-iter', '-n', type=int, default=3, help=\n 'Number of iterations for each test.')\n parser.add_argument('--log', '-l', type=str, help='Log file path')\n parser.add_argument('--quiet', '-q', action='store_true', help=\n 'Do not print testing information to stderr in real time.')\n parser.add_argument('--verbose', '-v', action='store_true', help=\n 'Do not silence Hail logging to standard output.')\n parser.add_argument('--output', '-o', type=str, help='Output file path.')\n parser.add_argument('--data-dir', '-d', type=str, help='Data directory.')\n parser.add_argument('--timeout', type=int, default=1800, help=\n 'Timeout in seconds after which benchmarks will be interrupted')\n parser.add_argument('--dry-run', action='store_true', help=\n 'Print benchmarks to execute, but do not run.')\n parser.add_argument('--profile', '-p', choices=['cpu', 'alloc',\n 'itimer'], nargs='?', const='cpu', help='Run with async-profiler.')\n parser.add_argument('--prof-fmt', '-f', choices=['html', 'flame', 'jfr'\n ], default='html', help='Choose profiler output.')\n parser.set_defaults(main=main)\n",
"step-5": "import argparse\nimport datetime\nimport json\nimport os\nimport sys\n\nimport hail as hl\n\nfrom .utils import run_all, run_pattern, run_list, RunConfig\nfrom .. import init_logging\n\n\ndef main(args):\n init_logging()\n\n records = []\n\n def handler(stats):\n records.append(stats)\n\n data_dir = args.data_dir or os.environ.get('HAIL_BENCHMARK_DIR') or '/tmp/hail_benchmark_data'\n profiler_path = os.environ.get('ASYNC_PROFILER_HOME')\n\n if args.profile and profiler_path is None:\n raise KeyError(\"In order to use --profile, you must download async-profiler and set `ASYNC_PROFILER_HOME`\")\n\n config = RunConfig(args.n_iter, handler, noisy=not args.quiet, timeout=args.timeout, dry_run=args.dry_run,\n data_dir=data_dir, cores=args.cores, verbose=args.verbose, log=args.log,\n profiler_path=profiler_path, profile=args.profile, prof_fmt=args.prof_fmt)\n if args.tests:\n run_list(args.tests.split(','), config)\n if args.pattern:\n run_pattern(args.pattern, config)\n if not args.pattern and not args.tests:\n run_all(config)\n\n if args.dry_run:\n return\n\n data = {'config': {'cores': args.cores,\n 'version': hl.__version__,\n 'timestamp': str(datetime.datetime.now()),\n 'system': sys.platform},\n 'benchmarks': records}\n if args.output:\n with open(args.output, 'w') as out:\n json.dump(data, out)\n else:\n print(json.dumps(data))\n\n\ndef register_main(subparser) -> 'None':\n parser = subparser.add_parser(\n 'run',\n help='Run Hail benchmarks locally.',\n description='Run Hail benchmarks locally.'\n )\n parser.add_argument('--tests', '-t',\n type=str,\n required=False,\n help='Run specific comma-delimited tests instead of running all tests.')\n parser.add_argument('--cores', '-c',\n type=int,\n default=1,\n help='Number of cores to use.')\n parser.add_argument('--pattern', '-k', type=str, required=False,\n help='Run all tests that substring match the pattern')\n parser.add_argument(\"--n-iter\", \"-n\",\n type=int,\n default=3,\n help='Number of iterations for each test.')\n parser.add_argument(\"--log\", \"-l\",\n type=str,\n help='Log file path')\n parser.add_argument(\"--quiet\", \"-q\",\n action=\"store_true\",\n help=\"Do not print testing information to stderr in real time.\")\n parser.add_argument(\"--verbose\", \"-v\",\n action=\"store_true\",\n help=\"Do not silence Hail logging to standard output.\")\n parser.add_argument(\"--output\", \"-o\",\n type=str,\n help=\"Output file path.\")\n parser.add_argument(\"--data-dir\", \"-d\",\n type=str,\n help=\"Data directory.\")\n parser.add_argument('--timeout',\n type=int,\n default=1800,\n help=\"Timeout in seconds after which benchmarks will be interrupted\")\n parser.add_argument('--dry-run',\n action='store_true',\n help='Print benchmarks to execute, but do not run.')\n parser.add_argument('--profile', '-p',\n choices=['cpu', 'alloc', 'itimer'],\n nargs='?', const='cpu',\n help='Run with async-profiler.')\n parser.add_argument('--prof-fmt', '-f',\n choices=['html', 'flame', 'jfr'],\n default='html',\n help='Choose profiler output.')\n parser.set_defaults(main=main)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#Main program:
#reads IMU data from arduino uart
#receives PS3 Controller input
#Mantains Controller input frequency with CST
#!/usr/bin/env python
from map import mapControllerToDeg
from map import constrain
from map import wrap_180
from map import motorOutputLimitHandler
from uart1 import IMUDevice
import socket
from controlStateTable2 import ControlStateTable
from map import arduino_map
from pid import PID
import time
import pdb
def setup(pids):
# PID Configuration
#pids['PITCH'].set_Kpid(6.5,0.1,1.2)
#pids['ROLL'].set_Kpid(6.5,0.1,1.2)
#pids['PITCH'].set_Kpid(6.5,0.1,1.2)
#pids['ROLL'].set_Kpid(6.5,0.1,1.2)
#pids['YAW'].set_Kpid(2.7,1,0)
pids['PITCH'].set_Kpid(6.5,0,0)
pids['ROLL'].set_Kpid(0,0,0)
#pids['YAW'].set_Kpid(0,0,0)
def print_IMU_CST_Streams():
print CST.strTable()
#print "IMU reading" + IMU.getLine()
def convert_IMU_CST_to_Degrees():
global C_YPR
#sets Y,P,R in IMU class
IMU.getYPR(IMU.getLine())
#converts the control values for YPR to degrees
C_YPR = mapControllerToDeg(CST.getTable())
#print "IMU DATA:"
#print IMU.Yaw, IMU.Pitch, IMU.Roll
print "CONTROL DEG DATA: ", str(C_YPR)
def calculatePIDs():
global pitch_output, roll_output, yaw_output, thr, pids
#PID CODE
#print "PID _____ PITCH: "
pitch_output = constrain(pids['PITCH'].update_pid_std(C_YPR['P'], IMU.Pitch, 10000),-250, 250)
#print "PID _____ ROLL: "
roll_output = constrain(pids['ROLL'].update_pid_std(C_YPR['R'], IMU.Roll, 10000),-250, 250)
#print "PID _____ YAW: "
yaw_output = constrain(pids['YAW'].update_pid_std(wrap_180(C_YPR['Y']), wrap_180(IMU.Yaw), 10000),-360, 360)
#get thrust
#thr = float(CST.getTable()['THRUST'])
def calculateMotorThrust():
global pitch_output, roll_output, yaw_output, thr, motor, pidStatusEnable
#motor['FL'] = thr + roll_output + pitch_output - yaw_output
#motor['BL'] = thr + roll_output - pitch_output + yaw_output
#motor['FR'] = thr - roll_output + pitch_output + yaw_output
#motor['BR'] = thr - roll_output - pitch_output - yaw_output
motor['FL'] = thr + roll_output - pitch_output + yaw_output
motor['BL'] = thr + roll_output + pitch_output - yaw_output
motor['FR'] = thr - roll_output - pitch_output - yaw_output
motor['BR'] = thr - roll_output + pitch_output + yaw_output
motorOutputLimitHandler(motor)
sep= ","
tuple1 = str(int(motor['BR']))+sep+str(int(motor['BL']))+sep+str(int(motor['FR']))+sep+str(int(motor['FL']))
writeResult = IMU.writeSerialPort(tuple1)
#except Exception, e:
# raise
# if writeResult == -1:
#print "Could not write motor value............."
print "Motor: ", str(motor)
print "--------"
def sleep():
global seconds, microseconds_unit
time.sleep(seconds/microseconds_unit)
def stabilizationCode():
print_IMU_CST_Streams()
convert_IMU_CST_to_Degrees()
calculatePIDs()
#try:
calculateMotorThrust()
#except Exception, e:
# raise
sleep()
seconds = 10000
microseconds_unit = 1000000.0
unblocking = 0 #unblocks socket
#verify client and server ip are = to interface ip
#TCP_IP = '192.168.1.7'
TCP_IP='192.168.1.101'
TCP_PORT = 5005
BUFFER_SIZE = 20 # Normally 1024, but we want fast response
#To store degrees from PS3 Controller
C_YPR = {}
#PID dictionary
pids = { 'PITCH': PID(),
'ROLL': PID(),
'YAW': PID() }
#motor dictionary
motor = { 'FL':0, 'BL':0,
'FR':0, 'BR':0 }
pitch_output=0
roll_output=0
yaw_output=0
thr = 0
THR_MIN = 1100
THR_MAX = 2000
IMU = IMUDevice()
IMU.openSerialPort()
IMU.openSampleFile()
CST = ControlStateTable()
############################## open wireless constants set port ##################
TCP_IP2='192.168.1.101'
TCP_PORT2 = 5008
s2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#s.setblocking(unblocking)
server_address2 = (TCP_IP2, TCP_PORT2)
s2.bind(server_address2)
s2.listen(1)
conn2, addr2 = s2.accept()
print 'Connection address:', addr2
################################ open ps3 socket port #####################
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#s.setblocking(unblocking)
server_address = (TCP_IP, TCP_PORT)
s.bind(server_address)
s.listen(1)
conn, addr = s.accept()
print 'Connection address:', addr
conn.setblocking(unblocking) #does not wait for packets
conn2.setblocking(unblocking)
#configure PID
setup(pids)
def setPidConstantsWireless(data):
#pdb.set_trace()
global conn2
global pids
keyf = data[0]
keyPidC=data[1]
value = data[2:]
value = float(value)
p=pids['PITCH']
r=pids['ROLL']
y=pids['YAW']
if keyf == 'p':
pKp = p.m_Kp
pKd = p.m_Kd
pKi = p.m_Ki
if keyPidC == 'p':
pKp = value
elif keyPidC == 'i':
pKi = value
elif keyPidC == 'd':
pKd = value
pids['PITCH'].set_Kpid(pKp, pKi, pKd)
if keyf == 'r':
rKp = r.m_Kp
rKd = r.m_Kd
rKi = r.m_Ki
if keyPidC == 'p':
rKp = value
elif keyPidC == 'i':
rKi = value
elif keyPidC == 'd':
rKd = value
pids['ROLL'].set_Kpid(rKp, rKi, rKd)
if keyf == 'y':
yKp = y.m_Kp
yKd = y.m_Kd
yKi = y.m_Ki
if keyPidC == 'p':
yKp = value
elif keyPidC == 'i':
yKi = value
elif keyPidC == 'd':
yKd = value
pids['YAW'].set_Kpid(yKp, yKi, yKd)
ptitle="| Pitch: kp, kd, ki= "
pData =str(p.m_Kp)+","+ str(p.m_Kd)+","+ str(p.m_Ki)
rtitle="| Roll: kp, kd, ki= "
rData=str(r.m_Kp)+","+ str(r.m_Kd)+","+ str(r.m_Ki)
ytitle="| Yaw: kp, kd, ki= "
yData=str(y.m_Kp)+","+ str(y.m_Kd)+","+ str(y.m_Ki)
conn2.send(ptitle+pData+rtitle+rData+ytitle+yData)
#print "DATA RX:"
#print data
#print ptitle + pData
#exit()
################################## Main loop ######################################
while 1:
#pdb.set_trace()
try:
data2 = conn2.recv(BUFFER_SIZE)
#pdb.set_trace()
if data2 not in ['', None]: #no data
setPidConstantsWireless(data2)
#if no data is rx continue
except:
#if no data is received from ps3 controller then continue
try:
data = conn.recv(BUFFER_SIZE)
except:
#controller is connected but no data has been received
#send data from CST and IMU when no PS3 input received
if thr >= 1150: #only run pid if thrust is over 1100
#try:
stabilizationCode()
#except Exception, e:
# continue
#else:
#due to anomalies in the data stream at the beginning of reading
#the serial buffer we need to start releasing it before our thrust
#is good for flight
#buffRelease=IMU.getLine()
continue
"""if data in ['',None]: #enable for testing
#controller is not connected
#send data from CST and IMU when no PS3 input received
stabilizationCode()
continue"""
#PS3 data received
#print "received data:"+ data
key,value=CST.decode(data)
#print key, value
if key == 'EXIT': #shutdown pid
tuple1 = "1000,1000,1000,1000"
writeResult = IMU.writeSerialPort(tuple1)
conn.close()
conn2.close()
exit()
CST.updateStateTable(key,value)
thr = float(CST.getTable()['THRUST'])
if thr >= 1150: #only run pid if thrust is over 1100
#try:
stabilizationCode()
#except Exception, e:
#continue
#else:
#due to anomalies in the data stream at the beginning of reading
#the serial buffer we need to start releasing it before our thrust
#is good for flight
#buffRelease=IMU.getLine()
conn.send(data) #echo
#conn.close()
|
normal
|
{
"blob_id": "5626e5a4a448630fbbbc92a67ae08f3ed24e1b9e",
"index": 4417,
"step-1": "#Main program:\n#reads IMU data from arduino uart\n#receives PS3 Controller input\n#Mantains Controller input frequency with CST\n\n#!/usr/bin/env python\nfrom map import mapControllerToDeg\nfrom map import constrain\nfrom map import wrap_180\nfrom map import motorOutputLimitHandler\nfrom uart1 import IMUDevice\nimport socket\nfrom controlStateTable2 import ControlStateTable\nfrom map import arduino_map\nfrom pid import PID\nimport time\nimport pdb\ndef setup(pids):\n\t# PID Configuration\n\t\n\t#pids['PITCH'].set_Kpid(6.5,0.1,1.2)\n\t#pids['ROLL'].set_Kpid(6.5,0.1,1.2)\n\n\t#pids['PITCH'].set_Kpid(6.5,0.1,1.2)\n\t#pids['ROLL'].set_Kpid(6.5,0.1,1.2)\n\t#pids['YAW'].set_Kpid(2.7,1,0)\n\n\tpids['PITCH'].set_Kpid(6.5,0,0)\n\tpids['ROLL'].set_Kpid(0,0,0)\n\t#pids['YAW'].set_Kpid(0,0,0)\n\ndef print_IMU_CST_Streams():\n\tprint CST.strTable()\n\t#print \"IMU reading\" + IMU.getLine()\n\ndef convert_IMU_CST_to_Degrees():\n\tglobal C_YPR\n\t#sets Y,P,R in IMU class\n\n\tIMU.getYPR(IMU.getLine())\n\t#converts the control values for YPR to degrees\n\tC_YPR = mapControllerToDeg(CST.getTable())\t\n\t#print \"IMU DATA:\"\n\t#print IMU.Yaw, IMU.Pitch, IMU.Roll\n\tprint \"CONTROL DEG DATA: \", str(C_YPR)\n\t\n\ndef calculatePIDs():\n\tglobal pitch_output, roll_output, yaw_output, thr, pids\n\t#PID CODE\n\t#print \"PID _____ PITCH: \"\n\tpitch_output = constrain(pids['PITCH'].update_pid_std(C_YPR['P'], IMU.Pitch, 10000),-250, 250)\n\t#print \"PID _____ ROLL: \"\n\troll_output = constrain(pids['ROLL'].update_pid_std(C_YPR['R'], IMU.Roll, 10000),-250, 250)\n\t#print \"PID _____ YAW: \"\n\tyaw_output = constrain(pids['YAW'].update_pid_std(wrap_180(C_YPR['Y']), wrap_180(IMU.Yaw), 10000),-360, 360)\n\t#get thrust\n\t#thr = float(CST.getTable()['THRUST'])\n\ndef calculateMotorThrust():\t\t\n\tglobal pitch_output, roll_output, yaw_output, thr, motor, pidStatusEnable\n\n\t#motor['FL'] = thr + roll_output + pitch_output - yaw_output\n\t#motor['BL'] = thr + roll_output - pitch_output + yaw_output\n\t#motor['FR'] = thr - roll_output + pitch_output + yaw_output\n\t#motor['BR'] = thr - roll_output - pitch_output - yaw_output\n\tmotor['FL'] = thr + roll_output - pitch_output + yaw_output\n\tmotor['BL'] = thr + roll_output + pitch_output - yaw_output\n\tmotor['FR'] = thr - roll_output - pitch_output - yaw_output\n\tmotor['BR'] = thr - roll_output + pitch_output + yaw_output\n\n\tmotorOutputLimitHandler(motor)\n\tsep= \",\"\n\ttuple1 = str(int(motor['BR']))+sep+str(int(motor['BL']))+sep+str(int(motor['FR']))+sep+str(int(motor['FL']))\n\t\n\n\twriteResult = IMU.writeSerialPort(tuple1)\n\n\t\t#except Exception, e:\n\t#\traise\n#\tif writeResult == -1:\n\t\t#print \"Could not write motor value.............\"\n\tprint \"Motor: \", str(motor)\n\tprint \"--------\"\n\ndef sleep():\n\tglobal seconds, microseconds_unit\n\ttime.sleep(seconds/microseconds_unit)\n\ndef stabilizationCode():\n\tprint_IMU_CST_Streams()\n\tconvert_IMU_CST_to_Degrees()\n\tcalculatePIDs()\n\t#try:\n\tcalculateMotorThrust()\n\t#except Exception, e:\n\t#\traise\n\tsleep()\n\nseconds = 10000\nmicroseconds_unit = 1000000.0\nunblocking = 0 #unblocks socket\n#verify client and server ip are = to interface ip\n#TCP_IP = '192.168.1.7'\nTCP_IP='192.168.1.101'\nTCP_PORT = 5005\nBUFFER_SIZE = 20 # Normally 1024, but we want fast response\n#To store degrees from PS3 Controller\nC_YPR = {}\n#PID dictionary\npids = { 'PITCH': PID(), \n\t'ROLL': PID(),\n\t'YAW': PID() }\n \n#motor dictionary\nmotor = { 'FL':0, 'BL':0,\n \t 'FR':0, 'BR':0 }\n\n\npitch_output=0\nroll_output=0\nyaw_output=0\nthr = 0\n\nTHR_MIN = 1100\nTHR_MAX = 2000\n\nIMU = IMUDevice()\nIMU.openSerialPort()\nIMU.openSampleFile()\nCST = ControlStateTable()\n\n\n############################## open wireless constants set port ##################\nTCP_IP2='192.168.1.101'\nTCP_PORT2 = 5008\ns2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n#s.setblocking(unblocking)\nserver_address2 = (TCP_IP2, TCP_PORT2)\ns2.bind(server_address2)\ns2.listen(1)\n\nconn2, addr2 = s2.accept()\n\nprint 'Connection address:', addr2\n################################ open ps3 socket port #####################\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n#s.setblocking(unblocking)\nserver_address = (TCP_IP, TCP_PORT)\ns.bind(server_address)\ns.listen(1)\n\n\nconn, addr = s.accept()\nprint 'Connection address:', addr\n\nconn.setblocking(unblocking) #does not wait for packets\nconn2.setblocking(unblocking)\n#configure PID\nsetup(pids)\ndef setPidConstantsWireless(data):\n\t#pdb.set_trace()\n\tglobal conn2\n\tglobal pids\n\tkeyf = data[0]\n\tkeyPidC=data[1]\n\tvalue = data[2:]\n\tvalue = float(value)\n\tp=pids['PITCH']\n\tr=pids['ROLL']\n\ty=pids['YAW']\n\n\tif keyf == 'p':\n\t\tpKp = p.m_Kp\n\t\tpKd = p.m_Kd\n\t\tpKi = p.m_Ki\t\n\t\tif keyPidC == 'p':\n\t\t\tpKp = value\t\n\t\telif keyPidC == 'i':\n\t\t\tpKi = value\n\t\telif keyPidC == 'd':\n\t\t\tpKd = value\n\t\tpids['PITCH'].set_Kpid(pKp, pKi, pKd)\n\n\tif keyf == 'r':\n\t\trKp = r.m_Kp\n\t\trKd = r.m_Kd\n\t\trKi = r.m_Ki\t\n\t\tif keyPidC == 'p':\n\t\t\trKp = value\t\n\t\telif keyPidC == 'i':\n\t\t\trKi = value\n\t\telif keyPidC == 'd':\n\t\t\trKd = value\n\t\tpids['ROLL'].set_Kpid(rKp, rKi, rKd)\n\n\tif keyf == 'y':\n\t\tyKp = y.m_Kp\n\t\tyKd = y.m_Kd\n\t\tyKi = y.m_Ki\t\n\t\tif keyPidC == 'p':\n\t\t\tyKp = value\t\n\t\telif keyPidC == 'i':\n\t\t\tyKi = value\n\t\telif keyPidC == 'd':\n\t\t\tyKd = value\n\t\tpids['YAW'].set_Kpid(yKp, yKi, yKd)\n\n\tptitle=\"| Pitch: kp, kd, ki= \"\n\tpData =str(p.m_Kp)+\",\"+ str(p.m_Kd)+\",\"+ str(p.m_Ki)\n\trtitle=\"| Roll: kp, kd, ki= \"\n\trData=str(r.m_Kp)+\",\"+ str(r.m_Kd)+\",\"+ str(r.m_Ki)\n\tytitle=\"| Yaw: kp, kd, ki= \"\n\tyData=str(y.m_Kp)+\",\"+ str(y.m_Kd)+\",\"+ str(y.m_Ki)\n\tconn2.send(ptitle+pData+rtitle+rData+ytitle+yData)\n\t#print \"DATA RX:\"\n\t#print data\n\t#print ptitle + pData\n\t#exit()\n\n################################## Main loop ######################################\nwhile 1:\n\t#pdb.set_trace()\n\ttry:\t\n\t\tdata2 = conn2.recv(BUFFER_SIZE)\n\t\t#pdb.set_trace()\n\t\tif data2 not in ['', None]: #no data\n\t\t\tsetPidConstantsWireless(data2)\t\t\n\t#if no data is rx continue\t\t\n\texcept:\t\n\t\t#if no data is received from ps3 controller then continue\n\t\ttry:\n\t\t\tdata = conn.recv(BUFFER_SIZE)\n\t\texcept: \n\t\t\t#controller is connected but no data has been received\n\t\t\t#send data from CST and IMU when no PS3 input received\n\t\t\tif thr >= 1150: #only run pid if thrust is over 1100\n\t\t\t\t#try:\n\t\t\t\tstabilizationCode()\n\t\t\t\t#except Exception, e:\n\t\t\t\t#\tcontinue\n\t\t\t#else:\n\t\t\t\t#due to anomalies in the data stream at the beginning of reading\n\t\t\t\t#the serial buffer we need to start releasing it before our thrust\n\t\t\t\t#is good for flight\n\t\t\t\t#buffRelease=IMU.getLine()\n\t\t\tcontinue\n\t\t\"\"\"if data in ['',None]: #enable for testing \n\t\t\t#controller is not connected\n\t\t\t#send data from CST and IMU when no PS3 input received\n\t\t\tstabilizationCode()\n\t\t\tcontinue\"\"\"\n\t\t#PS3 data received \n\t\t#print \"received data:\"+ data\n\t\tkey,value=CST.decode(data)\n\t\t\n\t\t#print key, value\n\t\tif key == 'EXIT': #shutdown pid\n\t\t\ttuple1 = \"1000,1000,1000,1000\"\n\t\t\twriteResult = IMU.writeSerialPort(tuple1)\n\t\t\tconn.close()\n\t\t\tconn2.close()\n\t\t\texit()\n\n\t\tCST.updateStateTable(key,value)\n\t\t\n\t\tthr = float(CST.getTable()['THRUST'])\n\t\tif thr >= 1150: #only run pid if thrust is over 1100\n\t\t\t#try:\n\t\t\tstabilizationCode()\n\t\t\t#except Exception, e:\n\t\t\t\t#continue\n\t\t#else:\n\t\t\t#due to anomalies in the data stream at the beginning of reading\n\t\t\t#the serial buffer we need to start releasing it before our thrust\n\t\t\t#is good for flight\n\t\t\t#buffRelease=IMU.getLine()\n\n\t\tconn.send(data) #echo\n\t#conn.close()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
def check(root, a, b):
if root:
if (root.left == a and root.right == b) or (root.left ==b and root.right==a):
return False
return check(root.left, a, b) and check(root.right, a, b)
return True
def isCousin(root, a, b):
# Your code here
if check(root, a, b)==False:
return False
q=[]
q.insert(0, root)
tmp=set()
while(len(q)):
l = len(q)
for i in range(l):
n = q.pop()
tmp.add(n.data)
if n.left:
q.insert(0, n.left)
if n.right:
q.insert(0, n.right)
if a in tmp and b in tmp:
return True
tmp.clear()
return False
|
normal
|
{
"blob_id": "96cfee85194c9c30b3d74bbddc2a31b6933eb032",
"index": 2226,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef isCousin(root, a, b):\n if check(root, a, b) == False:\n return False\n q = []\n q.insert(0, root)\n tmp = set()\n while len(q):\n l = len(q)\n for i in range(l):\n n = q.pop()\n tmp.add(n.data)\n if n.left:\n q.insert(0, n.left)\n if n.right:\n q.insert(0, n.right)\n if a in tmp and b in tmp:\n return True\n tmp.clear()\n return False\n",
"step-3": "def check(root, a, b):\n if root:\n if (root.left == a and root.right == b or root.left == b and root.\n right == a):\n return False\n return check(root.left, a, b) and check(root.right, a, b)\n return True\n\n\ndef isCousin(root, a, b):\n if check(root, a, b) == False:\n return False\n q = []\n q.insert(0, root)\n tmp = set()\n while len(q):\n l = len(q)\n for i in range(l):\n n = q.pop()\n tmp.add(n.data)\n if n.left:\n q.insert(0, n.left)\n if n.right:\n q.insert(0, n.right)\n if a in tmp and b in tmp:\n return True\n tmp.clear()\n return False\n",
"step-4": "def check(root, a, b):\n if root:\n if (root.left == a and root.right == b) or (root.left ==b and root.right==a):\n return False\n return check(root.left, a, b) and check(root.right, a, b)\n return True\ndef isCousin(root, a, b):\n # Your code here\n if check(root, a, b)==False:\n return False\n q=[]\n q.insert(0, root)\n tmp=set()\n while(len(q)):\n l = len(q)\n for i in range(l):\n n = q.pop()\n tmp.add(n.data)\n if n.left:\n q.insert(0, n.left)\n if n.right:\n q.insert(0, n.right)\n if a in tmp and b in tmp:\n return True\n tmp.clear()\n return False",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import BlockDeviceHandler
import json
import LocalMachine
import os
""" This module automaticly format the disk based on diskconf.json """
def module_print(text):
print_text = "[ autoformat disk ] " + str(text)
print(print_text)
def parse_config_file_from_disk(path, confname="diskconf.json"):
json_path = str(path) + "/" + str(confname)
if not os.path.exists(json_path):
module_print("\tPath not exists: " + str(json_path))
return None
try:
with open(json_path, "r") as f:
data = json.load(f)
module_print("config: " + str(confname) + " => " + str(data))
except Exception as e:
module_print("Json parse error: " + str(e))
return None
return data
def write_state_config_file_from_disk(path, data, confname="diskconf.json"):
json_path = str(path) + "/" + str(confname)
try:
if os.path.exists(json_path):
module_print("\tWrite back format state to " + str(json_path))
with open(json_path, "w") as f:
if str(data['is_formatted']).lower() == "false":
data['is_formatted'] = "True"
json.dump(data, f, indent=2)
module_print("\t\tSUCCESS")
else:
module_print("State already set")
else:
module_print("diskconf not exists: " + str(json_path))
except Exception as e:
module_print("\t\tFAILED")
module_print("Write back format state to disk failed:" + str(e))
def save_diskconf_file(path, confname="diskconf.json"):
json_path = str(path) + "/" + str(confname)
save_path = "/tmp"
cmd = "sudo cp {} {}".format(json_path, save_path)
exitcode, stdout, stderr = LocalMachine.run_command(cmd)
BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)
def restore_diskconf_file(path, confname="diskconf.json"):
json_path = str(path) + "/" + str(confname)
save_path = "/tmp/" + str(confname)
cmd = "sudo cp {} {}".format(save_path, json_path)
exitcode, stdout, stderr = LocalMachine.run_command(cmd)
BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)
cmd = "sudo rm -f {}".format(save_path)
exitcode, stdout, stderr = LocalMachine.run_command(cmd)
BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)
def safe_format_disk_check_force_mode(json_data, dev):
dev_data_modified = False
# disk is not formatted
dev_data = BlockDeviceHandler.get_device_info_data(dev)
if json_data['label'] != dev_data['label']:
dev_data_modified = True
if json_data['format'] != dev_data['filesystem']:
dev_data_modified = True
if str(json_data['is_formatted']).lower() == "false":
if str(json_data['force']).lower() == "true" and dev_data_modified is False:
module_print("[i] [format] Block device paramaters not changed but force mode is ON")
return True
elif dev_data_modified is True:
module_print("[i] [format] Requested block device parameter(s) changed - format")
return True
else:
module_print("[i] [Skip format] Blockdevice format not needed - label and system not changed")
return False
else:
module_print("[i] [is_formatted:True] Blockdevice already formatted.")
return False
def format_device_based_on_config_file(dev, premount_path):
module_print("Format device")
diskconf_path = premount_path
data = parse_config_file_from_disk(diskconf_path)
if data is not None:
if safe_format_disk_check_force_mode(data, dev):
module_print("\tSave disk config file before formatting")
save_diskconf_file(diskconf_path)
module_print("\tUnmount device before formatting")
BlockDeviceHandler.unmount_device(dev)
module_print("\tFormat device")
BlockDeviceHandler.format_ex4(dev, data['label'])
module_print("\tMount formatted device")
mount_point = BlockDeviceHandler.mount_device(dev)
module_print("\tRestore config file to disk after formating")
restore_diskconf_file(mount_point)
module_print("\tSave back the the config file with the new state")
write_state_config_file_from_disk(mount_point, data)
else:
module_print("\tDisk already formatted: {}:{}".format(dev, premount_path))
module_print("mount device: " + str(dev))
mount_point = BlockDeviceHandler.mount_device(dev)
def prepare_block_device():
if BlockDeviceHandler.is_any_device_avaible():
module_print("Block device exists")
devices = BlockDeviceHandler.list_connected_devices()
for dev in devices:
premount_path = BlockDeviceHandler.premount_device(dev)
format_device_based_on_config_file(dev, premount_path)
BlockDeviceHandler.unmount_all_premounted_devices()
if __name__ == "__main__":
prepare_block_device()
#BlockDeviceHandler.unmount_all_devices(del_mount_point=True)
|
normal
|
{
"blob_id": "927470fe0087b17e5fe67a9b8b3cc13a40d8be1a",
"index": 7554,
"step-1": "<mask token>\n\n\ndef parse_config_file_from_disk(path, confname='diskconf.json'):\n json_path = str(path) + '/' + str(confname)\n if not os.path.exists(json_path):\n module_print('\\tPath not exists: ' + str(json_path))\n return None\n try:\n with open(json_path, 'r') as f:\n data = json.load(f)\n module_print('config: ' + str(confname) + ' => ' + str(data))\n except Exception as e:\n module_print('Json parse error: ' + str(e))\n return None\n return data\n\n\ndef write_state_config_file_from_disk(path, data, confname='diskconf.json'):\n json_path = str(path) + '/' + str(confname)\n try:\n if os.path.exists(json_path):\n module_print('\\tWrite back format state to ' + str(json_path))\n with open(json_path, 'w') as f:\n if str(data['is_formatted']).lower() == 'false':\n data['is_formatted'] = 'True'\n json.dump(data, f, indent=2)\n module_print('\\t\\tSUCCESS')\n else:\n module_print('State already set')\n else:\n module_print('diskconf not exists: ' + str(json_path))\n except Exception as e:\n module_print('\\t\\tFAILED')\n module_print('Write back format state to disk failed:' + str(e))\n\n\ndef save_diskconf_file(path, confname='diskconf.json'):\n json_path = str(path) + '/' + str(confname)\n save_path = '/tmp'\n cmd = 'sudo cp {} {}'.format(json_path, save_path)\n exitcode, stdout, stderr = LocalMachine.run_command(cmd)\n BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)\n\n\ndef restore_diskconf_file(path, confname='diskconf.json'):\n json_path = str(path) + '/' + str(confname)\n save_path = '/tmp/' + str(confname)\n cmd = 'sudo cp {} {}'.format(save_path, json_path)\n exitcode, stdout, stderr = LocalMachine.run_command(cmd)\n BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)\n cmd = 'sudo rm -f {}'.format(save_path)\n exitcode, stdout, stderr = LocalMachine.run_command(cmd)\n BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)\n\n\ndef safe_format_disk_check_force_mode(json_data, dev):\n dev_data_modified = False\n dev_data = BlockDeviceHandler.get_device_info_data(dev)\n if json_data['label'] != dev_data['label']:\n dev_data_modified = True\n if json_data['format'] != dev_data['filesystem']:\n dev_data_modified = True\n if str(json_data['is_formatted']).lower() == 'false':\n if str(json_data['force']).lower(\n ) == 'true' and dev_data_modified is False:\n module_print(\n '[i] [format] Block device paramaters not changed but force mode is ON'\n )\n return True\n elif dev_data_modified is True:\n module_print(\n '[i] [format] Requested block device parameter(s) changed - format'\n )\n return True\n else:\n module_print(\n '[i] [Skip format] Blockdevice format not needed - label and system not changed'\n )\n return False\n else:\n module_print('[i] [is_formatted:True] Blockdevice already formatted.')\n return False\n\n\ndef format_device_based_on_config_file(dev, premount_path):\n module_print('Format device')\n diskconf_path = premount_path\n data = parse_config_file_from_disk(diskconf_path)\n if data is not None:\n if safe_format_disk_check_force_mode(data, dev):\n module_print('\\tSave disk config file before formatting')\n save_diskconf_file(diskconf_path)\n module_print('\\tUnmount device before formatting')\n BlockDeviceHandler.unmount_device(dev)\n module_print('\\tFormat device')\n BlockDeviceHandler.format_ex4(dev, data['label'])\n module_print('\\tMount formatted device')\n mount_point = BlockDeviceHandler.mount_device(dev)\n module_print('\\tRestore config file to disk after formating')\n restore_diskconf_file(mount_point)\n module_print('\\tSave back the the config file with the new state')\n write_state_config_file_from_disk(mount_point, data)\n else:\n module_print('\\tDisk already formatted: {}:{}'.format(dev,\n premount_path))\n module_print('mount device: ' + str(dev))\n mount_point = BlockDeviceHandler.mount_device(dev)\n\n\ndef prepare_block_device():\n if BlockDeviceHandler.is_any_device_avaible():\n module_print('Block device exists')\n devices = BlockDeviceHandler.list_connected_devices()\n for dev in devices:\n premount_path = BlockDeviceHandler.premount_device(dev)\n format_device_based_on_config_file(dev, premount_path)\n BlockDeviceHandler.unmount_all_premounted_devices()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef module_print(text):\n print_text = '[ autoformat disk ] ' + str(text)\n print(print_text)\n\n\ndef parse_config_file_from_disk(path, confname='diskconf.json'):\n json_path = str(path) + '/' + str(confname)\n if not os.path.exists(json_path):\n module_print('\\tPath not exists: ' + str(json_path))\n return None\n try:\n with open(json_path, 'r') as f:\n data = json.load(f)\n module_print('config: ' + str(confname) + ' => ' + str(data))\n except Exception as e:\n module_print('Json parse error: ' + str(e))\n return None\n return data\n\n\ndef write_state_config_file_from_disk(path, data, confname='diskconf.json'):\n json_path = str(path) + '/' + str(confname)\n try:\n if os.path.exists(json_path):\n module_print('\\tWrite back format state to ' + str(json_path))\n with open(json_path, 'w') as f:\n if str(data['is_formatted']).lower() == 'false':\n data['is_formatted'] = 'True'\n json.dump(data, f, indent=2)\n module_print('\\t\\tSUCCESS')\n else:\n module_print('State already set')\n else:\n module_print('diskconf not exists: ' + str(json_path))\n except Exception as e:\n module_print('\\t\\tFAILED')\n module_print('Write back format state to disk failed:' + str(e))\n\n\ndef save_diskconf_file(path, confname='diskconf.json'):\n json_path = str(path) + '/' + str(confname)\n save_path = '/tmp'\n cmd = 'sudo cp {} {}'.format(json_path, save_path)\n exitcode, stdout, stderr = LocalMachine.run_command(cmd)\n BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)\n\n\ndef restore_diskconf_file(path, confname='diskconf.json'):\n json_path = str(path) + '/' + str(confname)\n save_path = '/tmp/' + str(confname)\n cmd = 'sudo cp {} {}'.format(save_path, json_path)\n exitcode, stdout, stderr = LocalMachine.run_command(cmd)\n BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)\n cmd = 'sudo rm -f {}'.format(save_path)\n exitcode, stdout, stderr = LocalMachine.run_command(cmd)\n BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)\n\n\ndef safe_format_disk_check_force_mode(json_data, dev):\n dev_data_modified = False\n dev_data = BlockDeviceHandler.get_device_info_data(dev)\n if json_data['label'] != dev_data['label']:\n dev_data_modified = True\n if json_data['format'] != dev_data['filesystem']:\n dev_data_modified = True\n if str(json_data['is_formatted']).lower() == 'false':\n if str(json_data['force']).lower(\n ) == 'true' and dev_data_modified is False:\n module_print(\n '[i] [format] Block device paramaters not changed but force mode is ON'\n )\n return True\n elif dev_data_modified is True:\n module_print(\n '[i] [format] Requested block device parameter(s) changed - format'\n )\n return True\n else:\n module_print(\n '[i] [Skip format] Blockdevice format not needed - label and system not changed'\n )\n return False\n else:\n module_print('[i] [is_formatted:True] Blockdevice already formatted.')\n return False\n\n\ndef format_device_based_on_config_file(dev, premount_path):\n module_print('Format device')\n diskconf_path = premount_path\n data = parse_config_file_from_disk(diskconf_path)\n if data is not None:\n if safe_format_disk_check_force_mode(data, dev):\n module_print('\\tSave disk config file before formatting')\n save_diskconf_file(diskconf_path)\n module_print('\\tUnmount device before formatting')\n BlockDeviceHandler.unmount_device(dev)\n module_print('\\tFormat device')\n BlockDeviceHandler.format_ex4(dev, data['label'])\n module_print('\\tMount formatted device')\n mount_point = BlockDeviceHandler.mount_device(dev)\n module_print('\\tRestore config file to disk after formating')\n restore_diskconf_file(mount_point)\n module_print('\\tSave back the the config file with the new state')\n write_state_config_file_from_disk(mount_point, data)\n else:\n module_print('\\tDisk already formatted: {}:{}'.format(dev,\n premount_path))\n module_print('mount device: ' + str(dev))\n mount_point = BlockDeviceHandler.mount_device(dev)\n\n\ndef prepare_block_device():\n if BlockDeviceHandler.is_any_device_avaible():\n module_print('Block device exists')\n devices = BlockDeviceHandler.list_connected_devices()\n for dev in devices:\n premount_path = BlockDeviceHandler.premount_device(dev)\n format_device_based_on_config_file(dev, premount_path)\n BlockDeviceHandler.unmount_all_premounted_devices()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef module_print(text):\n print_text = '[ autoformat disk ] ' + str(text)\n print(print_text)\n\n\ndef parse_config_file_from_disk(path, confname='diskconf.json'):\n json_path = str(path) + '/' + str(confname)\n if not os.path.exists(json_path):\n module_print('\\tPath not exists: ' + str(json_path))\n return None\n try:\n with open(json_path, 'r') as f:\n data = json.load(f)\n module_print('config: ' + str(confname) + ' => ' + str(data))\n except Exception as e:\n module_print('Json parse error: ' + str(e))\n return None\n return data\n\n\ndef write_state_config_file_from_disk(path, data, confname='diskconf.json'):\n json_path = str(path) + '/' + str(confname)\n try:\n if os.path.exists(json_path):\n module_print('\\tWrite back format state to ' + str(json_path))\n with open(json_path, 'w') as f:\n if str(data['is_formatted']).lower() == 'false':\n data['is_formatted'] = 'True'\n json.dump(data, f, indent=2)\n module_print('\\t\\tSUCCESS')\n else:\n module_print('State already set')\n else:\n module_print('diskconf not exists: ' + str(json_path))\n except Exception as e:\n module_print('\\t\\tFAILED')\n module_print('Write back format state to disk failed:' + str(e))\n\n\ndef save_diskconf_file(path, confname='diskconf.json'):\n json_path = str(path) + '/' + str(confname)\n save_path = '/tmp'\n cmd = 'sudo cp {} {}'.format(json_path, save_path)\n exitcode, stdout, stderr = LocalMachine.run_command(cmd)\n BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)\n\n\ndef restore_diskconf_file(path, confname='diskconf.json'):\n json_path = str(path) + '/' + str(confname)\n save_path = '/tmp/' + str(confname)\n cmd = 'sudo cp {} {}'.format(save_path, json_path)\n exitcode, stdout, stderr = LocalMachine.run_command(cmd)\n BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)\n cmd = 'sudo rm -f {}'.format(save_path)\n exitcode, stdout, stderr = LocalMachine.run_command(cmd)\n BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)\n\n\ndef safe_format_disk_check_force_mode(json_data, dev):\n dev_data_modified = False\n dev_data = BlockDeviceHandler.get_device_info_data(dev)\n if json_data['label'] != dev_data['label']:\n dev_data_modified = True\n if json_data['format'] != dev_data['filesystem']:\n dev_data_modified = True\n if str(json_data['is_formatted']).lower() == 'false':\n if str(json_data['force']).lower(\n ) == 'true' and dev_data_modified is False:\n module_print(\n '[i] [format] Block device paramaters not changed but force mode is ON'\n )\n return True\n elif dev_data_modified is True:\n module_print(\n '[i] [format] Requested block device parameter(s) changed - format'\n )\n return True\n else:\n module_print(\n '[i] [Skip format] Blockdevice format not needed - label and system not changed'\n )\n return False\n else:\n module_print('[i] [is_formatted:True] Blockdevice already formatted.')\n return False\n\n\ndef format_device_based_on_config_file(dev, premount_path):\n module_print('Format device')\n diskconf_path = premount_path\n data = parse_config_file_from_disk(diskconf_path)\n if data is not None:\n if safe_format_disk_check_force_mode(data, dev):\n module_print('\\tSave disk config file before formatting')\n save_diskconf_file(diskconf_path)\n module_print('\\tUnmount device before formatting')\n BlockDeviceHandler.unmount_device(dev)\n module_print('\\tFormat device')\n BlockDeviceHandler.format_ex4(dev, data['label'])\n module_print('\\tMount formatted device')\n mount_point = BlockDeviceHandler.mount_device(dev)\n module_print('\\tRestore config file to disk after formating')\n restore_diskconf_file(mount_point)\n module_print('\\tSave back the the config file with the new state')\n write_state_config_file_from_disk(mount_point, data)\n else:\n module_print('\\tDisk already formatted: {}:{}'.format(dev,\n premount_path))\n module_print('mount device: ' + str(dev))\n mount_point = BlockDeviceHandler.mount_device(dev)\n\n\ndef prepare_block_device():\n if BlockDeviceHandler.is_any_device_avaible():\n module_print('Block device exists')\n devices = BlockDeviceHandler.list_connected_devices()\n for dev in devices:\n premount_path = BlockDeviceHandler.premount_device(dev)\n format_device_based_on_config_file(dev, premount_path)\n BlockDeviceHandler.unmount_all_premounted_devices()\n\n\nif __name__ == '__main__':\n prepare_block_device()\n",
"step-4": "import BlockDeviceHandler\nimport json\nimport LocalMachine\nimport os\n<mask token>\n\n\ndef module_print(text):\n print_text = '[ autoformat disk ] ' + str(text)\n print(print_text)\n\n\ndef parse_config_file_from_disk(path, confname='diskconf.json'):\n json_path = str(path) + '/' + str(confname)\n if not os.path.exists(json_path):\n module_print('\\tPath not exists: ' + str(json_path))\n return None\n try:\n with open(json_path, 'r') as f:\n data = json.load(f)\n module_print('config: ' + str(confname) + ' => ' + str(data))\n except Exception as e:\n module_print('Json parse error: ' + str(e))\n return None\n return data\n\n\ndef write_state_config_file_from_disk(path, data, confname='diskconf.json'):\n json_path = str(path) + '/' + str(confname)\n try:\n if os.path.exists(json_path):\n module_print('\\tWrite back format state to ' + str(json_path))\n with open(json_path, 'w') as f:\n if str(data['is_formatted']).lower() == 'false':\n data['is_formatted'] = 'True'\n json.dump(data, f, indent=2)\n module_print('\\t\\tSUCCESS')\n else:\n module_print('State already set')\n else:\n module_print('diskconf not exists: ' + str(json_path))\n except Exception as e:\n module_print('\\t\\tFAILED')\n module_print('Write back format state to disk failed:' + str(e))\n\n\ndef save_diskconf_file(path, confname='diskconf.json'):\n json_path = str(path) + '/' + str(confname)\n save_path = '/tmp'\n cmd = 'sudo cp {} {}'.format(json_path, save_path)\n exitcode, stdout, stderr = LocalMachine.run_command(cmd)\n BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)\n\n\ndef restore_diskconf_file(path, confname='diskconf.json'):\n json_path = str(path) + '/' + str(confname)\n save_path = '/tmp/' + str(confname)\n cmd = 'sudo cp {} {}'.format(save_path, json_path)\n exitcode, stdout, stderr = LocalMachine.run_command(cmd)\n BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)\n cmd = 'sudo rm -f {}'.format(save_path)\n exitcode, stdout, stderr = LocalMachine.run_command(cmd)\n BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)\n\n\ndef safe_format_disk_check_force_mode(json_data, dev):\n dev_data_modified = False\n dev_data = BlockDeviceHandler.get_device_info_data(dev)\n if json_data['label'] != dev_data['label']:\n dev_data_modified = True\n if json_data['format'] != dev_data['filesystem']:\n dev_data_modified = True\n if str(json_data['is_formatted']).lower() == 'false':\n if str(json_data['force']).lower(\n ) == 'true' and dev_data_modified is False:\n module_print(\n '[i] [format] Block device paramaters not changed but force mode is ON'\n )\n return True\n elif dev_data_modified is True:\n module_print(\n '[i] [format] Requested block device parameter(s) changed - format'\n )\n return True\n else:\n module_print(\n '[i] [Skip format] Blockdevice format not needed - label and system not changed'\n )\n return False\n else:\n module_print('[i] [is_formatted:True] Blockdevice already formatted.')\n return False\n\n\ndef format_device_based_on_config_file(dev, premount_path):\n module_print('Format device')\n diskconf_path = premount_path\n data = parse_config_file_from_disk(diskconf_path)\n if data is not None:\n if safe_format_disk_check_force_mode(data, dev):\n module_print('\\tSave disk config file before formatting')\n save_diskconf_file(diskconf_path)\n module_print('\\tUnmount device before formatting')\n BlockDeviceHandler.unmount_device(dev)\n module_print('\\tFormat device')\n BlockDeviceHandler.format_ex4(dev, data['label'])\n module_print('\\tMount formatted device')\n mount_point = BlockDeviceHandler.mount_device(dev)\n module_print('\\tRestore config file to disk after formating')\n restore_diskconf_file(mount_point)\n module_print('\\tSave back the the config file with the new state')\n write_state_config_file_from_disk(mount_point, data)\n else:\n module_print('\\tDisk already formatted: {}:{}'.format(dev,\n premount_path))\n module_print('mount device: ' + str(dev))\n mount_point = BlockDeviceHandler.mount_device(dev)\n\n\ndef prepare_block_device():\n if BlockDeviceHandler.is_any_device_avaible():\n module_print('Block device exists')\n devices = BlockDeviceHandler.list_connected_devices()\n for dev in devices:\n premount_path = BlockDeviceHandler.premount_device(dev)\n format_device_based_on_config_file(dev, premount_path)\n BlockDeviceHandler.unmount_all_premounted_devices()\n\n\nif __name__ == '__main__':\n prepare_block_device()\n",
"step-5": "import BlockDeviceHandler\nimport json\nimport LocalMachine\nimport os\n\n\"\"\" This module automaticly format the disk based on diskconf.json \"\"\"\n\ndef module_print(text):\n print_text = \"[ autoformat disk ] \" + str(text)\n print(print_text)\n\ndef parse_config_file_from_disk(path, confname=\"diskconf.json\"):\n json_path = str(path) + \"/\" + str(confname)\n if not os.path.exists(json_path):\n module_print(\"\\tPath not exists: \" + str(json_path))\n return None\n try:\n with open(json_path, \"r\") as f:\n data = json.load(f)\n module_print(\"config: \" + str(confname) + \" => \" + str(data))\n except Exception as e:\n module_print(\"Json parse error: \" + str(e))\n return None\n return data\n\ndef write_state_config_file_from_disk(path, data, confname=\"diskconf.json\"):\n json_path = str(path) + \"/\" + str(confname)\n try:\n if os.path.exists(json_path):\n module_print(\"\\tWrite back format state to \" + str(json_path))\n with open(json_path, \"w\") as f:\n if str(data['is_formatted']).lower() == \"false\":\n data['is_formatted'] = \"True\"\n json.dump(data, f, indent=2)\n module_print(\"\\t\\tSUCCESS\")\n else:\n module_print(\"State already set\")\n else:\n module_print(\"diskconf not exists: \" + str(json_path))\n except Exception as e:\n module_print(\"\\t\\tFAILED\")\n module_print(\"Write back format state to disk failed:\" + str(e))\n\ndef save_diskconf_file(path, confname=\"diskconf.json\"):\n json_path = str(path) + \"/\" + str(confname)\n save_path = \"/tmp\"\n cmd = \"sudo cp {} {}\".format(json_path, save_path)\n exitcode, stdout, stderr = LocalMachine.run_command(cmd)\n BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)\n\ndef restore_diskconf_file(path, confname=\"diskconf.json\"):\n json_path = str(path) + \"/\" + str(confname)\n save_path = \"/tmp/\" + str(confname)\n cmd = \"sudo cp {} {}\".format(save_path, json_path)\n exitcode, stdout, stderr = LocalMachine.run_command(cmd)\n BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)\n cmd = \"sudo rm -f {}\".format(save_path)\n exitcode, stdout, stderr = LocalMachine.run_command(cmd)\n BlockDeviceHandler.check_exitcode(cmd, exitcode, stderr)\n\ndef safe_format_disk_check_force_mode(json_data, dev):\n dev_data_modified = False\n # disk is not formatted\n dev_data = BlockDeviceHandler.get_device_info_data(dev)\n if json_data['label'] != dev_data['label']:\n dev_data_modified = True\n if json_data['format'] != dev_data['filesystem']:\n dev_data_modified = True\n\n if str(json_data['is_formatted']).lower() == \"false\":\n if str(json_data['force']).lower() == \"true\" and dev_data_modified is False:\n module_print(\"[i] [format] Block device paramaters not changed but force mode is ON\")\n return True\n elif dev_data_modified is True:\n module_print(\"[i] [format] Requested block device parameter(s) changed - format\")\n return True\n else:\n module_print(\"[i] [Skip format] Blockdevice format not needed - label and system not changed\")\n return False\n else:\n module_print(\"[i] [is_formatted:True] Blockdevice already formatted.\")\n return False\n\ndef format_device_based_on_config_file(dev, premount_path):\n module_print(\"Format device\")\n diskconf_path = premount_path\n data = parse_config_file_from_disk(diskconf_path)\n if data is not None:\n if safe_format_disk_check_force_mode(data, dev):\n module_print(\"\\tSave disk config file before formatting\")\n save_diskconf_file(diskconf_path)\n module_print(\"\\tUnmount device before formatting\")\n BlockDeviceHandler.unmount_device(dev)\n module_print(\"\\tFormat device\")\n BlockDeviceHandler.format_ex4(dev, data['label'])\n module_print(\"\\tMount formatted device\")\n mount_point = BlockDeviceHandler.mount_device(dev)\n module_print(\"\\tRestore config file to disk after formating\")\n restore_diskconf_file(mount_point)\n module_print(\"\\tSave back the the config file with the new state\")\n write_state_config_file_from_disk(mount_point, data)\n else:\n module_print(\"\\tDisk already formatted: {}:{}\".format(dev, premount_path))\n module_print(\"mount device: \" + str(dev))\n mount_point = BlockDeviceHandler.mount_device(dev)\n\ndef prepare_block_device():\n if BlockDeviceHandler.is_any_device_avaible():\n module_print(\"Block device exists\")\n devices = BlockDeviceHandler.list_connected_devices()\n for dev in devices:\n premount_path = BlockDeviceHandler.premount_device(dev)\n format_device_based_on_config_file(dev, premount_path)\n BlockDeviceHandler.unmount_all_premounted_devices()\n\nif __name__ == \"__main__\":\n prepare_block_device()\n #BlockDeviceHandler.unmount_all_devices(del_mount_point=True)\n",
"step-ids": [
7,
8,
9,
10,
11
]
}
|
[
7,
8,
9,
10,
11
] |
<|reserved_special_token_0|>
class Function:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def foward(self, x):
raise NotImplementedError()
class Square(Function):
def foward(self, x):
return x ** 2
class Exp(Function):
def foward(self, x):
return np.exp(x)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Variable:
def __init__(self, data):
self.data = data
class Function:
"""
Base class
specific functions are implemented in the inherited class
"""
def __call__(self, input):
x = input.data
y = self.foward(x)
output = Variable(y)
return output
def foward(self, x):
raise NotImplementedError()
class Square(Function):
def foward(self, x):
return x ** 2
class Exp(Function):
def foward(self, x):
return np.exp(x)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Variable:
def __init__(self, data):
self.data = data
class Function:
"""
Base class
specific functions are implemented in the inherited class
"""
def __call__(self, input):
x = input.data
y = self.foward(x)
output = Variable(y)
return output
def foward(self, x):
raise NotImplementedError()
class Square(Function):
def foward(self, x):
return x ** 2
class Exp(Function):
def foward(self, x):
return np.exp(x)
<|reserved_special_token_0|>
print(y.data)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Variable:
def __init__(self, data):
self.data = data
class Function:
"""
Base class
specific functions are implemented in the inherited class
"""
def __call__(self, input):
x = input.data
y = self.foward(x)
output = Variable(y)
return output
def foward(self, x):
raise NotImplementedError()
class Square(Function):
def foward(self, x):
return x ** 2
class Exp(Function):
def foward(self, x):
return np.exp(x)
square = Square()
exp = Exp()
x = Variable(np.array(0.5))
a = square(x)
b = exp(a)
y = square(b)
print(y.data)
<|reserved_special_token_1|>
#implement variable!
import numpy as np
class Variable:
def __init__(self, data):
self.data = data
class Function:
'''
Base class
specific functions are implemented in the inherited class
'''
def __call__(self, input):
x = input.data #data extract
y = self.foward(x)
output = Variable(y) #here! is key point
return output
def foward(self, x):
raise NotImplementedError()
class Square(Function):
def foward(self, x):
return x ** 2
class Exp(Function):
def foward(self, x):
return np.exp(x)
# input/output of a Function.__call__ is unified as a variable instance.
square = Square()
exp = Exp()
# like a composite function
# x -> [Square] -> a -> [Exp] -> b -> [Square] -> y
x = Variable(np.array(0.5))
a = square(x)
b = exp(a)
y = square(b)
print(y.data)
|
flexible
|
{
"blob_id": "9efd83524ebb598f30c8fb6c0f9f0c65333578e6",
"index": 6292,
"step-1": "<mask token>\n\n\nclass Function:\n <mask token>\n <mask token>\n\n def foward(self, x):\n raise NotImplementedError()\n\n\nclass Square(Function):\n\n def foward(self, x):\n return x ** 2\n\n\nclass Exp(Function):\n\n def foward(self, x):\n return np.exp(x)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Variable:\n\n def __init__(self, data):\n self.data = data\n\n\nclass Function:\n \"\"\"\n Base class\n specific functions are implemented in the inherited class\n \"\"\"\n\n def __call__(self, input):\n x = input.data\n y = self.foward(x)\n output = Variable(y)\n return output\n\n def foward(self, x):\n raise NotImplementedError()\n\n\nclass Square(Function):\n\n def foward(self, x):\n return x ** 2\n\n\nclass Exp(Function):\n\n def foward(self, x):\n return np.exp(x)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Variable:\n\n def __init__(self, data):\n self.data = data\n\n\nclass Function:\n \"\"\"\n Base class\n specific functions are implemented in the inherited class\n \"\"\"\n\n def __call__(self, input):\n x = input.data\n y = self.foward(x)\n output = Variable(y)\n return output\n\n def foward(self, x):\n raise NotImplementedError()\n\n\nclass Square(Function):\n\n def foward(self, x):\n return x ** 2\n\n\nclass Exp(Function):\n\n def foward(self, x):\n return np.exp(x)\n\n\n<mask token>\nprint(y.data)\n",
"step-4": "<mask token>\n\n\nclass Variable:\n\n def __init__(self, data):\n self.data = data\n\n\nclass Function:\n \"\"\"\n Base class\n specific functions are implemented in the inherited class\n \"\"\"\n\n def __call__(self, input):\n x = input.data\n y = self.foward(x)\n output = Variable(y)\n return output\n\n def foward(self, x):\n raise NotImplementedError()\n\n\nclass Square(Function):\n\n def foward(self, x):\n return x ** 2\n\n\nclass Exp(Function):\n\n def foward(self, x):\n return np.exp(x)\n\n\nsquare = Square()\nexp = Exp()\nx = Variable(np.array(0.5))\na = square(x)\nb = exp(a)\ny = square(b)\nprint(y.data)\n",
"step-5": "#implement variable!\nimport numpy as np\n\nclass Variable:\n def __init__(self, data):\n self.data = data\n\nclass Function:\n '''\n Base class\n specific functions are implemented in the inherited class\n '''\n def __call__(self, input): \n x = input.data #data extract\n y = self.foward(x)\n output = Variable(y) #here! is key point\n return output\n\n def foward(self, x):\n raise NotImplementedError()\n\nclass Square(Function):\n def foward(self, x):\n return x ** 2\n\nclass Exp(Function):\n def foward(self, x):\n return np.exp(x)\n\n# input/output of a Function.__call__ is unified as a variable instance.\nsquare = Square()\nexp = Exp()\n\n# like a composite function\n# x -> [Square] -> a -> [Exp] -> b -> [Square] -> y\nx = Variable(np.array(0.5))\na = square(x)\nb = exp(a)\ny = square(b)\nprint(y.data)\n\n\n",
"step-ids": [
6,
10,
11,
12,
14
]
}
|
[
6,
10,
11,
12,
14
] |
# 라이브러리 환경
import pandas as pd
import numpy as np
# sklearn 테이터셋에서 iris 데이터셋 로딩
from sklearn import datasets
iris = datasets.load_iris()
# iris 데이터셋은 딕셔너리 형태이므로, key 값 확인
'''
print(iris.keys())
print(iris['DESCR'])
print("데이터 셋 크기:", iris['target'])
print("데이터 셋 내용:\n", iris['target'])
'''
# data 속성의 데이터셋 크기
print("데이터 셋 크기:", iris['data'].shape)
# data 속성의 데이터셋 내용(첫 7개 행 추출)
data1 = ['a', 'b', 'c', 'd', 'e']
print(type(data1))
sr1 = pd.Series(data1)
# print(type(sr1))
data2 = (1, 2, 3.14, 100, -10)
sr2 = pd.Series(data2)
dict_data = {'c1':data1, 'c2':data2}
df = pd.DataFrame(dict_data)
print(df)
# 열(columns)과 행(index)이름 바꾸기
df.columns = ['string1', 'string2']
df.index = ['r1', 'r2', 'r3', 'r4', 'r5']
# print(df.loc['r2':'r4', 'string1':'string2'])
print('데이터셋 내용:\n', iris['data'][:7, :])
df = pd.DataFrame(iris['data'], columns=iris['feature_names'])
print('데이터 프레임의 형태:', df.shape)
df.columns = ['sepal_length', 'sepal_width', 'petal_length', 'petal_width']
print(df.head(2))
df['Target'] = iris['target']
print(df.head())
x = [2, 1, 13, 4, 15, 26]
y = [0, 4, 31, 2, 42, 54]
df = pd.DataFrame({'X':x, 'Y':y})
print(df)
|
normal
|
{
"blob_id": "dc2c9293040204f0ec2156c41b8be624f4e5cf99",
"index": 8389,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('데이터 셋 크기:', iris['data'].shape)\n<mask token>\nprint(type(data1))\n<mask token>\nprint(df)\n<mask token>\nprint('데이터셋 내용:\\n', iris['data'][:7, :])\n<mask token>\nprint('데이터 프레임의 형태:', df.shape)\n<mask token>\nprint(df.head(2))\n<mask token>\nprint(df.head())\n<mask token>\nprint(df)\n",
"step-3": "<mask token>\niris = datasets.load_iris()\n<mask token>\nprint('데이터 셋 크기:', iris['data'].shape)\ndata1 = ['a', 'b', 'c', 'd', 'e']\nprint(type(data1))\nsr1 = pd.Series(data1)\ndata2 = 1, 2, 3.14, 100, -10\nsr2 = pd.Series(data2)\ndict_data = {'c1': data1, 'c2': data2}\ndf = pd.DataFrame(dict_data)\nprint(df)\ndf.columns = ['string1', 'string2']\ndf.index = ['r1', 'r2', 'r3', 'r4', 'r5']\nprint('데이터셋 내용:\\n', iris['data'][:7, :])\ndf = pd.DataFrame(iris['data'], columns=iris['feature_names'])\nprint('데이터 프레임의 형태:', df.shape)\ndf.columns = ['sepal_length', 'sepal_width', 'petal_length', 'petal_width']\nprint(df.head(2))\ndf['Target'] = iris['target']\nprint(df.head())\nx = [2, 1, 13, 4, 15, 26]\ny = [0, 4, 31, 2, 42, 54]\ndf = pd.DataFrame({'X': x, 'Y': y})\nprint(df)\n",
"step-4": "import pandas as pd\nimport numpy as np\nfrom sklearn import datasets\niris = datasets.load_iris()\n<mask token>\nprint('데이터 셋 크기:', iris['data'].shape)\ndata1 = ['a', 'b', 'c', 'd', 'e']\nprint(type(data1))\nsr1 = pd.Series(data1)\ndata2 = 1, 2, 3.14, 100, -10\nsr2 = pd.Series(data2)\ndict_data = {'c1': data1, 'c2': data2}\ndf = pd.DataFrame(dict_data)\nprint(df)\ndf.columns = ['string1', 'string2']\ndf.index = ['r1', 'r2', 'r3', 'r4', 'r5']\nprint('데이터셋 내용:\\n', iris['data'][:7, :])\ndf = pd.DataFrame(iris['data'], columns=iris['feature_names'])\nprint('데이터 프레임의 형태:', df.shape)\ndf.columns = ['sepal_length', 'sepal_width', 'petal_length', 'petal_width']\nprint(df.head(2))\ndf['Target'] = iris['target']\nprint(df.head())\nx = [2, 1, 13, 4, 15, 26]\ny = [0, 4, 31, 2, 42, 54]\ndf = pd.DataFrame({'X': x, 'Y': y})\nprint(df)\n",
"step-5": "# 라이브러리 환경\nimport pandas as pd\nimport numpy as np\n\n# sklearn 테이터셋에서 iris 데이터셋 로딩\nfrom sklearn import datasets\niris = datasets.load_iris()\n\n# iris 데이터셋은 딕셔너리 형태이므로, key 값 확인\n'''\nprint(iris.keys())\nprint(iris['DESCR'])\nprint(\"데이터 셋 크기:\", iris['target'])\nprint(\"데이터 셋 내용:\\n\", iris['target'])\n'''\n\n# data 속성의 데이터셋 크기\nprint(\"데이터 셋 크기:\", iris['data'].shape)\n\n# data 속성의 데이터셋 내용(첫 7개 행 추출)\ndata1 = ['a', 'b', 'c', 'd', 'e']\nprint(type(data1))\nsr1 = pd.Series(data1)\n# print(type(sr1))\ndata2 = (1, 2, 3.14, 100, -10)\nsr2 = pd.Series(data2)\n\ndict_data = {'c1':data1, 'c2':data2}\ndf = pd.DataFrame(dict_data)\nprint(df)\n\n\n# 열(columns)과 행(index)이름 바꾸기\ndf.columns = ['string1', 'string2']\ndf.index = ['r1', 'r2', 'r3', 'r4', 'r5']\n\n# print(df.loc['r2':'r4', 'string1':'string2'])\n\nprint('데이터셋 내용:\\n', iris['data'][:7, :])\ndf = pd.DataFrame(iris['data'], columns=iris['feature_names'])\n\nprint('데이터 프레임의 형태:', df.shape)\ndf.columns = ['sepal_length', 'sepal_width', 'petal_length', 'petal_width']\nprint(df.head(2))\n\ndf['Target'] = iris['target']\nprint(df.head())\n\nx = [2, 1, 13, 4, 15, 26]\ny = [0, 4, 31, 2, 42, 54]\n\ndf = pd.DataFrame({'X':x, 'Y':y})\nprint(df)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtGui import *
from PyQt5.QtCore import *
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
MainWindow.setCentralWidget(self.centralwidget)
MainWindow.setWindowIcon(QIcon('data/nn.png'))
MainWindow.resize(800, 800)
self.OK = QtWidgets.QPushButton(self.centralwidget)
self.OK.setStyleSheet("background-color:#18BDFF; border-radius: 5px;");
self.OK.setIcon(QIcon("data/ok.png"))
self.OK.setIconSize(QSize(40, 40))
self.OK.setGeometry(QtCore.QRect(375, 820, 150, 45))
font = QtGui.QFont()
font.setPointSize(10)
self.OK.setFont(font)
self.OK.setAutoFillBackground(True)
self.OK.setObjectName("OK")
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Drawing digits"))
self.OK.setText(_translate("MainWindow", " OK"))
|
normal
|
{
"blob_id": "65264f52f641b67c707b6a827ecfe1bf417748e8",
"index": 2379,
"step-1": "<mask token>\n\n\nclass Ui_MainWindow(object):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Ui_MainWindow(object):\n\n def setupUi(self, MainWindow):\n MainWindow.setObjectName('MainWindow')\n self.centralwidget = QtWidgets.QWidget(MainWindow)\n self.centralwidget.setObjectName('centralwidget')\n MainWindow.setCentralWidget(self.centralwidget)\n MainWindow.setWindowIcon(QIcon('data/nn.png'))\n MainWindow.resize(800, 800)\n self.OK = QtWidgets.QPushButton(self.centralwidget)\n self.OK.setStyleSheet('background-color:#18BDFF; border-radius: 5px;')\n self.OK.setIcon(QIcon('data/ok.png'))\n self.OK.setIconSize(QSize(40, 40))\n self.OK.setGeometry(QtCore.QRect(375, 820, 150, 45))\n font = QtGui.QFont()\n font.setPointSize(10)\n self.OK.setFont(font)\n self.OK.setAutoFillBackground(True)\n self.OK.setObjectName('OK')\n self.retranslateUi(MainWindow)\n QtCore.QMetaObject.connectSlotsByName(MainWindow)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Ui_MainWindow(object):\n\n def setupUi(self, MainWindow):\n MainWindow.setObjectName('MainWindow')\n self.centralwidget = QtWidgets.QWidget(MainWindow)\n self.centralwidget.setObjectName('centralwidget')\n MainWindow.setCentralWidget(self.centralwidget)\n MainWindow.setWindowIcon(QIcon('data/nn.png'))\n MainWindow.resize(800, 800)\n self.OK = QtWidgets.QPushButton(self.centralwidget)\n self.OK.setStyleSheet('background-color:#18BDFF; border-radius: 5px;')\n self.OK.setIcon(QIcon('data/ok.png'))\n self.OK.setIconSize(QSize(40, 40))\n self.OK.setGeometry(QtCore.QRect(375, 820, 150, 45))\n font = QtGui.QFont()\n font.setPointSize(10)\n self.OK.setFont(font)\n self.OK.setAutoFillBackground(True)\n self.OK.setObjectName('OK')\n self.retranslateUi(MainWindow)\n QtCore.QMetaObject.connectSlotsByName(MainWindow)\n\n def retranslateUi(self, MainWindow):\n _translate = QtCore.QCoreApplication.translate\n MainWindow.setWindowTitle(_translate('MainWindow', 'Drawing digits'))\n self.OK.setText(_translate('MainWindow', ' OK'))\n",
"step-4": "from PyQt5 import QtCore, QtGui, QtWidgets\nfrom PyQt5.QtGui import *\nfrom PyQt5.QtCore import *\n\n\nclass Ui_MainWindow(object):\n\n def setupUi(self, MainWindow):\n MainWindow.setObjectName('MainWindow')\n self.centralwidget = QtWidgets.QWidget(MainWindow)\n self.centralwidget.setObjectName('centralwidget')\n MainWindow.setCentralWidget(self.centralwidget)\n MainWindow.setWindowIcon(QIcon('data/nn.png'))\n MainWindow.resize(800, 800)\n self.OK = QtWidgets.QPushButton(self.centralwidget)\n self.OK.setStyleSheet('background-color:#18BDFF; border-radius: 5px;')\n self.OK.setIcon(QIcon('data/ok.png'))\n self.OK.setIconSize(QSize(40, 40))\n self.OK.setGeometry(QtCore.QRect(375, 820, 150, 45))\n font = QtGui.QFont()\n font.setPointSize(10)\n self.OK.setFont(font)\n self.OK.setAutoFillBackground(True)\n self.OK.setObjectName('OK')\n self.retranslateUi(MainWindow)\n QtCore.QMetaObject.connectSlotsByName(MainWindow)\n\n def retranslateUi(self, MainWindow):\n _translate = QtCore.QCoreApplication.translate\n MainWindow.setWindowTitle(_translate('MainWindow', 'Drawing digits'))\n self.OK.setText(_translate('MainWindow', ' OK'))\n",
"step-5": "# -*- coding: utf-8 -*-\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nfrom PyQt5.QtGui import *\nfrom PyQt5.QtCore import *\n\nclass Ui_MainWindow(object):\n def setupUi(self, MainWindow):\n MainWindow.setObjectName(\"MainWindow\")\n self.centralwidget = QtWidgets.QWidget(MainWindow)\n self.centralwidget.setObjectName(\"centralwidget\")\n MainWindow.setCentralWidget(self.centralwidget)\n MainWindow.setWindowIcon(QIcon('data/nn.png'))\n MainWindow.resize(800, 800)\n \n \n self.OK = QtWidgets.QPushButton(self.centralwidget)\n self.OK.setStyleSheet(\"background-color:#18BDFF; border-radius: 5px;\");\n self.OK.setIcon(QIcon(\"data/ok.png\"))\n self.OK.setIconSize(QSize(40, 40)) \n self.OK.setGeometry(QtCore.QRect(375, 820, 150, 45))\n font = QtGui.QFont()\n font.setPointSize(10)\n self.OK.setFont(font)\n self.OK.setAutoFillBackground(True)\n self.OK.setObjectName(\"OK\")\n \n \n self.retranslateUi(MainWindow)\n QtCore.QMetaObject.connectSlotsByName(MainWindow)\n \n \n \n def retranslateUi(self, MainWindow):\n _translate = QtCore.QCoreApplication.translate\n MainWindow.setWindowTitle(_translate(\"MainWindow\", \"Drawing digits\"))\n self.OK.setText(_translate(\"MainWindow\", \" OK\"))\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# hw.shin@konantech.com
#leekiljae@ogqcorp.com
|
normal
|
{
"blob_id": "193d48237b4b1e406eb565943cf01f0423449fca",
"index": 3682,
"step-1": "# hw.shin@konantech.com\n#leekiljae@ogqcorp.com",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
1
]
}
|
[
1
] |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['DashboardArgs', 'Dashboard']
@pulumi.input_type
class DashboardArgs:
def __init__(__self__, *,
dashboard_definition: pulumi.Input[str],
dashboard_description: pulumi.Input[str],
dashboard_name: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]]] = None):
"""
The set of arguments for constructing a Dashboard resource.
:param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.
:param pulumi.Input[str] dashboard_description: A description for the dashboard.
:param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.
:param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.
:param pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]] tags: A list of key-value pairs that contain metadata for the dashboard.
"""
pulumi.set(__self__, "dashboard_definition", dashboard_definition)
pulumi.set(__self__, "dashboard_description", dashboard_description)
if dashboard_name is not None:
pulumi.set(__self__, "dashboard_name", dashboard_name)
if project_id is not None:
pulumi.set(__self__, "project_id", project_id)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="dashboardDefinition")
def dashboard_definition(self) -> pulumi.Input[str]:
"""
The dashboard definition specified in a JSON literal.
"""
return pulumi.get(self, "dashboard_definition")
@dashboard_definition.setter
def dashboard_definition(self, value: pulumi.Input[str]):
pulumi.set(self, "dashboard_definition", value)
@property
@pulumi.getter(name="dashboardDescription")
def dashboard_description(self) -> pulumi.Input[str]:
"""
A description for the dashboard.
"""
return pulumi.get(self, "dashboard_description")
@dashboard_description.setter
def dashboard_description(self, value: pulumi.Input[str]):
pulumi.set(self, "dashboard_description", value)
@property
@pulumi.getter(name="dashboardName")
def dashboard_name(self) -> Optional[pulumi.Input[str]]:
"""
A friendly name for the dashboard.
"""
return pulumi.get(self, "dashboard_name")
@dashboard_name.setter
def dashboard_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dashboard_name", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which to create the dashboard.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]]]:
"""
A list of key-value pairs that contain metadata for the dashboard.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]]]):
pulumi.set(self, "tags", value)
class Dashboard(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
dashboard_definition: Optional[pulumi.Input[str]] = None,
dashboard_description: Optional[pulumi.Input[str]] = None,
dashboard_name: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]] = None,
__props__=None):
"""
Resource schema for AWS::IoTSiteWise::Dashboard
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.
:param pulumi.Input[str] dashboard_description: A description for the dashboard.
:param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.
:param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]] tags: A list of key-value pairs that contain metadata for the dashboard.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DashboardArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Resource schema for AWS::IoTSiteWise::Dashboard
:param str resource_name: The name of the resource.
:param DashboardArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DashboardArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
dashboard_definition: Optional[pulumi.Input[str]] = None,
dashboard_description: Optional[pulumi.Input[str]] = None,
dashboard_name: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DashboardArgs.__new__(DashboardArgs)
if dashboard_definition is None and not opts.urn:
raise TypeError("Missing required property 'dashboard_definition'")
__props__.__dict__["dashboard_definition"] = dashboard_definition
if dashboard_description is None and not opts.urn:
raise TypeError("Missing required property 'dashboard_description'")
__props__.__dict__["dashboard_description"] = dashboard_description
__props__.__dict__["dashboard_name"] = dashboard_name
__props__.__dict__["project_id"] = project_id
__props__.__dict__["tags"] = tags
__props__.__dict__["dashboard_arn"] = None
__props__.__dict__["dashboard_id"] = None
super(Dashboard, __self__).__init__(
'aws-native:iotsitewise:Dashboard',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Dashboard':
"""
Get an existing Dashboard resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = DashboardArgs.__new__(DashboardArgs)
__props__.__dict__["dashboard_arn"] = None
__props__.__dict__["dashboard_definition"] = None
__props__.__dict__["dashboard_description"] = None
__props__.__dict__["dashboard_id"] = None
__props__.__dict__["dashboard_name"] = None
__props__.__dict__["project_id"] = None
__props__.__dict__["tags"] = None
return Dashboard(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="dashboardArn")
def dashboard_arn(self) -> pulumi.Output[str]:
"""
The ARN of the dashboard.
"""
return pulumi.get(self, "dashboard_arn")
@property
@pulumi.getter(name="dashboardDefinition")
def dashboard_definition(self) -> pulumi.Output[str]:
"""
The dashboard definition specified in a JSON literal.
"""
return pulumi.get(self, "dashboard_definition")
@property
@pulumi.getter(name="dashboardDescription")
def dashboard_description(self) -> pulumi.Output[str]:
"""
A description for the dashboard.
"""
return pulumi.get(self, "dashboard_description")
@property
@pulumi.getter(name="dashboardId")
def dashboard_id(self) -> pulumi.Output[str]:
"""
The ID of the dashboard.
"""
return pulumi.get(self, "dashboard_id")
@property
@pulumi.getter(name="dashboardName")
def dashboard_name(self) -> pulumi.Output[str]:
"""
A friendly name for the dashboard.
"""
return pulumi.get(self, "dashboard_name")
@property
@pulumi.getter(name="projectId")
def project_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the project in which to create the dashboard.
"""
return pulumi.get(self, "project_id")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence['outputs.DashboardTag']]]:
"""
A list of key-value pairs that contain metadata for the dashboard.
"""
return pulumi.get(self, "tags")
|
normal
|
{
"blob_id": "2332783c96b24caa383bf47d82384e1c40a48e94",
"index": 8566,
"step-1": "<mask token>\n\n\n@pulumi.input_type\nclass DashboardArgs:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Dashboard(pulumi.CustomResource):\n\n @overload\n def __init__(__self__, resource_name: str, opts: Optional[pulumi.\n ResourceOptions]=None, dashboard_definition: Optional[pulumi.Input[\n str]]=None, dashboard_description: Optional[pulumi.Input[str]]=None,\n dashboard_name: Optional[pulumi.Input[str]]=None, project_id:\n Optional[pulumi.Input[str]]=None, tags: Optional[pulumi.Input[\n Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]]=None,\n __props__=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n ...\n\n @overload\n def __init__(__self__, resource_name: str, args: DashboardArgs, opts:\n Optional[pulumi.ResourceOptions]=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param DashboardArgs args: The arguments to use to populate this resource's properties.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n ...\n\n def __init__(__self__, resource_name: str, *args, **kwargs):\n resource_args, opts = _utilities.get_resource_args_opts(DashboardArgs,\n pulumi.ResourceOptions, *args, **kwargs)\n if resource_args is not None:\n __self__._internal_init(resource_name, opts, **resource_args.\n __dict__)\n else:\n __self__._internal_init(resource_name, *args, **kwargs)\n\n def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.\n ResourceOptions]=None, dashboard_definition: Optional[pulumi.Input[\n str]]=None, dashboard_description: Optional[pulumi.Input[str]]=None,\n dashboard_name: Optional[pulumi.Input[str]]=None, project_id:\n Optional[pulumi.Input[str]]=None, tags: Optional[pulumi.Input[\n Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]]=None,\n __props__=None):\n opts = pulumi.ResourceOptions.merge(_utilities.\n get_resource_opts_defaults(), opts)\n if not isinstance(opts, pulumi.ResourceOptions):\n raise TypeError(\n 'Expected resource options to be a ResourceOptions instance')\n if opts.id is None:\n if __props__ is not None:\n raise TypeError(\n '__props__ is only valid when passed in combination with a valid opts.id to get an existing resource'\n )\n __props__ = DashboardArgs.__new__(DashboardArgs)\n if dashboard_definition is None and not opts.urn:\n raise TypeError(\n \"Missing required property 'dashboard_definition'\")\n __props__.__dict__['dashboard_definition'] = dashboard_definition\n if dashboard_description is None and not opts.urn:\n raise TypeError(\n \"Missing required property 'dashboard_description'\")\n __props__.__dict__['dashboard_description'] = dashboard_description\n __props__.__dict__['dashboard_name'] = dashboard_name\n __props__.__dict__['project_id'] = project_id\n __props__.__dict__['tags'] = tags\n __props__.__dict__['dashboard_arn'] = None\n __props__.__dict__['dashboard_id'] = None\n super(Dashboard, __self__).__init__('aws-native:iotsitewise:Dashboard',\n resource_name, __props__, opts)\n\n @staticmethod\n def get(resource_name: str, id: pulumi.Input[str], opts: Optional[\n pulumi.ResourceOptions]=None) ->'Dashboard':\n \"\"\"\n Get an existing Dashboard resource's state with the given name, id, and optional extra\n properties used to qualify the lookup.\n\n :param str resource_name: The unique name of the resulting resource.\n :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)\n )\n __props__ = DashboardArgs.__new__(DashboardArgs)\n __props__.__dict__['dashboard_arn'] = None\n __props__.__dict__['dashboard_definition'] = None\n __props__.__dict__['dashboard_description'] = None\n __props__.__dict__['dashboard_id'] = None\n __props__.__dict__['dashboard_name'] = None\n __props__.__dict__['project_id'] = None\n __props__.__dict__['tags'] = None\n return Dashboard(resource_name, opts=opts, __props__=__props__)\n\n @property\n @pulumi.getter(name='dashboardArn')\n def dashboard_arn(self) ->pulumi.Output[str]:\n \"\"\"\n The ARN of the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_arn')\n\n @property\n @pulumi.getter(name='dashboardDefinition')\n def dashboard_definition(self) ->pulumi.Output[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, 'dashboard_definition')\n\n @property\n @pulumi.getter(name='dashboardDescription')\n def dashboard_description(self) ->pulumi.Output[str]:\n \"\"\"\n A description for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_description')\n\n @property\n @pulumi.getter(name='dashboardId')\n def dashboard_id(self) ->pulumi.Output[str]:\n \"\"\"\n The ID of the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_id')\n\n @property\n @pulumi.getter(name='dashboardName')\n def dashboard_name(self) ->pulumi.Output[str]:\n \"\"\"\n A friendly name for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_name')\n\n @property\n @pulumi.getter(name='projectId')\n def project_id(self) ->pulumi.Output[Optional[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, 'project_id')\n\n @property\n @pulumi.getter\n def tags(self) ->pulumi.Output[Optional[Sequence['outputs.DashboardTag']]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, 'tags')\n",
"step-2": "<mask token>\n\n\n@pulumi.input_type\nclass DashboardArgs:\n\n def __init__(__self__, *, dashboard_definition: pulumi.Input[str],\n dashboard_description: pulumi.Input[str], dashboard_name: Optional[\n pulumi.Input[str]]=None, project_id: Optional[pulumi.Input[str]]=\n None, tags: Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]=None):\n \"\"\"\n The set of arguments for constructing a Dashboard resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n pulumi.set(__self__, 'dashboard_definition', dashboard_definition)\n pulumi.set(__self__, 'dashboard_description', dashboard_description)\n if dashboard_name is not None:\n pulumi.set(__self__, 'dashboard_name', dashboard_name)\n if project_id is not None:\n pulumi.set(__self__, 'project_id', project_id)\n if tags is not None:\n pulumi.set(__self__, 'tags', tags)\n\n @property\n @pulumi.getter(name='dashboardDefinition')\n def dashboard_definition(self) ->pulumi.Input[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, 'dashboard_definition')\n\n @dashboard_definition.setter\n def dashboard_definition(self, value: pulumi.Input[str]):\n pulumi.set(self, 'dashboard_definition', value)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @property\n @pulumi.getter(name='projectId')\n def project_id(self) ->Optional[pulumi.Input[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, 'project_id')\n\n @project_id.setter\n def project_id(self, value: Optional[pulumi.Input[str]]):\n pulumi.set(self, 'project_id', value)\n\n @property\n @pulumi.getter\n def tags(self) ->Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, 'tags')\n\n @tags.setter\n def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]):\n pulumi.set(self, 'tags', value)\n\n\nclass Dashboard(pulumi.CustomResource):\n\n @overload\n def __init__(__self__, resource_name: str, opts: Optional[pulumi.\n ResourceOptions]=None, dashboard_definition: Optional[pulumi.Input[\n str]]=None, dashboard_description: Optional[pulumi.Input[str]]=None,\n dashboard_name: Optional[pulumi.Input[str]]=None, project_id:\n Optional[pulumi.Input[str]]=None, tags: Optional[pulumi.Input[\n Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]]=None,\n __props__=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n ...\n\n @overload\n def __init__(__self__, resource_name: str, args: DashboardArgs, opts:\n Optional[pulumi.ResourceOptions]=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param DashboardArgs args: The arguments to use to populate this resource's properties.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n ...\n\n def __init__(__self__, resource_name: str, *args, **kwargs):\n resource_args, opts = _utilities.get_resource_args_opts(DashboardArgs,\n pulumi.ResourceOptions, *args, **kwargs)\n if resource_args is not None:\n __self__._internal_init(resource_name, opts, **resource_args.\n __dict__)\n else:\n __self__._internal_init(resource_name, *args, **kwargs)\n\n def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.\n ResourceOptions]=None, dashboard_definition: Optional[pulumi.Input[\n str]]=None, dashboard_description: Optional[pulumi.Input[str]]=None,\n dashboard_name: Optional[pulumi.Input[str]]=None, project_id:\n Optional[pulumi.Input[str]]=None, tags: Optional[pulumi.Input[\n Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]]=None,\n __props__=None):\n opts = pulumi.ResourceOptions.merge(_utilities.\n get_resource_opts_defaults(), opts)\n if not isinstance(opts, pulumi.ResourceOptions):\n raise TypeError(\n 'Expected resource options to be a ResourceOptions instance')\n if opts.id is None:\n if __props__ is not None:\n raise TypeError(\n '__props__ is only valid when passed in combination with a valid opts.id to get an existing resource'\n )\n __props__ = DashboardArgs.__new__(DashboardArgs)\n if dashboard_definition is None and not opts.urn:\n raise TypeError(\n \"Missing required property 'dashboard_definition'\")\n __props__.__dict__['dashboard_definition'] = dashboard_definition\n if dashboard_description is None and not opts.urn:\n raise TypeError(\n \"Missing required property 'dashboard_description'\")\n __props__.__dict__['dashboard_description'] = dashboard_description\n __props__.__dict__['dashboard_name'] = dashboard_name\n __props__.__dict__['project_id'] = project_id\n __props__.__dict__['tags'] = tags\n __props__.__dict__['dashboard_arn'] = None\n __props__.__dict__['dashboard_id'] = None\n super(Dashboard, __self__).__init__('aws-native:iotsitewise:Dashboard',\n resource_name, __props__, opts)\n\n @staticmethod\n def get(resource_name: str, id: pulumi.Input[str], opts: Optional[\n pulumi.ResourceOptions]=None) ->'Dashboard':\n \"\"\"\n Get an existing Dashboard resource's state with the given name, id, and optional extra\n properties used to qualify the lookup.\n\n :param str resource_name: The unique name of the resulting resource.\n :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)\n )\n __props__ = DashboardArgs.__new__(DashboardArgs)\n __props__.__dict__['dashboard_arn'] = None\n __props__.__dict__['dashboard_definition'] = None\n __props__.__dict__['dashboard_description'] = None\n __props__.__dict__['dashboard_id'] = None\n __props__.__dict__['dashboard_name'] = None\n __props__.__dict__['project_id'] = None\n __props__.__dict__['tags'] = None\n return Dashboard(resource_name, opts=opts, __props__=__props__)\n\n @property\n @pulumi.getter(name='dashboardArn')\n def dashboard_arn(self) ->pulumi.Output[str]:\n \"\"\"\n The ARN of the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_arn')\n\n @property\n @pulumi.getter(name='dashboardDefinition')\n def dashboard_definition(self) ->pulumi.Output[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, 'dashboard_definition')\n\n @property\n @pulumi.getter(name='dashboardDescription')\n def dashboard_description(self) ->pulumi.Output[str]:\n \"\"\"\n A description for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_description')\n\n @property\n @pulumi.getter(name='dashboardId')\n def dashboard_id(self) ->pulumi.Output[str]:\n \"\"\"\n The ID of the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_id')\n\n @property\n @pulumi.getter(name='dashboardName')\n def dashboard_name(self) ->pulumi.Output[str]:\n \"\"\"\n A friendly name for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_name')\n\n @property\n @pulumi.getter(name='projectId')\n def project_id(self) ->pulumi.Output[Optional[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, 'project_id')\n\n @property\n @pulumi.getter\n def tags(self) ->pulumi.Output[Optional[Sequence['outputs.DashboardTag']]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, 'tags')\n",
"step-3": "<mask token>\n\n\n@pulumi.input_type\nclass DashboardArgs:\n\n def __init__(__self__, *, dashboard_definition: pulumi.Input[str],\n dashboard_description: pulumi.Input[str], dashboard_name: Optional[\n pulumi.Input[str]]=None, project_id: Optional[pulumi.Input[str]]=\n None, tags: Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]=None):\n \"\"\"\n The set of arguments for constructing a Dashboard resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n pulumi.set(__self__, 'dashboard_definition', dashboard_definition)\n pulumi.set(__self__, 'dashboard_description', dashboard_description)\n if dashboard_name is not None:\n pulumi.set(__self__, 'dashboard_name', dashboard_name)\n if project_id is not None:\n pulumi.set(__self__, 'project_id', project_id)\n if tags is not None:\n pulumi.set(__self__, 'tags', tags)\n\n @property\n @pulumi.getter(name='dashboardDefinition')\n def dashboard_definition(self) ->pulumi.Input[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, 'dashboard_definition')\n\n @dashboard_definition.setter\n def dashboard_definition(self, value: pulumi.Input[str]):\n pulumi.set(self, 'dashboard_definition', value)\n <mask token>\n <mask token>\n <mask token>\n\n @dashboard_name.setter\n def dashboard_name(self, value: Optional[pulumi.Input[str]]):\n pulumi.set(self, 'dashboard_name', value)\n\n @property\n @pulumi.getter(name='projectId')\n def project_id(self) ->Optional[pulumi.Input[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, 'project_id')\n\n @project_id.setter\n def project_id(self, value: Optional[pulumi.Input[str]]):\n pulumi.set(self, 'project_id', value)\n\n @property\n @pulumi.getter\n def tags(self) ->Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, 'tags')\n\n @tags.setter\n def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]):\n pulumi.set(self, 'tags', value)\n\n\nclass Dashboard(pulumi.CustomResource):\n\n @overload\n def __init__(__self__, resource_name: str, opts: Optional[pulumi.\n ResourceOptions]=None, dashboard_definition: Optional[pulumi.Input[\n str]]=None, dashboard_description: Optional[pulumi.Input[str]]=None,\n dashboard_name: Optional[pulumi.Input[str]]=None, project_id:\n Optional[pulumi.Input[str]]=None, tags: Optional[pulumi.Input[\n Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]]=None,\n __props__=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n ...\n\n @overload\n def __init__(__self__, resource_name: str, args: DashboardArgs, opts:\n Optional[pulumi.ResourceOptions]=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param DashboardArgs args: The arguments to use to populate this resource's properties.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n ...\n\n def __init__(__self__, resource_name: str, *args, **kwargs):\n resource_args, opts = _utilities.get_resource_args_opts(DashboardArgs,\n pulumi.ResourceOptions, *args, **kwargs)\n if resource_args is not None:\n __self__._internal_init(resource_name, opts, **resource_args.\n __dict__)\n else:\n __self__._internal_init(resource_name, *args, **kwargs)\n\n def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.\n ResourceOptions]=None, dashboard_definition: Optional[pulumi.Input[\n str]]=None, dashboard_description: Optional[pulumi.Input[str]]=None,\n dashboard_name: Optional[pulumi.Input[str]]=None, project_id:\n Optional[pulumi.Input[str]]=None, tags: Optional[pulumi.Input[\n Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]]=None,\n __props__=None):\n opts = pulumi.ResourceOptions.merge(_utilities.\n get_resource_opts_defaults(), opts)\n if not isinstance(opts, pulumi.ResourceOptions):\n raise TypeError(\n 'Expected resource options to be a ResourceOptions instance')\n if opts.id is None:\n if __props__ is not None:\n raise TypeError(\n '__props__ is only valid when passed in combination with a valid opts.id to get an existing resource'\n )\n __props__ = DashboardArgs.__new__(DashboardArgs)\n if dashboard_definition is None and not opts.urn:\n raise TypeError(\n \"Missing required property 'dashboard_definition'\")\n __props__.__dict__['dashboard_definition'] = dashboard_definition\n if dashboard_description is None and not opts.urn:\n raise TypeError(\n \"Missing required property 'dashboard_description'\")\n __props__.__dict__['dashboard_description'] = dashboard_description\n __props__.__dict__['dashboard_name'] = dashboard_name\n __props__.__dict__['project_id'] = project_id\n __props__.__dict__['tags'] = tags\n __props__.__dict__['dashboard_arn'] = None\n __props__.__dict__['dashboard_id'] = None\n super(Dashboard, __self__).__init__('aws-native:iotsitewise:Dashboard',\n resource_name, __props__, opts)\n\n @staticmethod\n def get(resource_name: str, id: pulumi.Input[str], opts: Optional[\n pulumi.ResourceOptions]=None) ->'Dashboard':\n \"\"\"\n Get an existing Dashboard resource's state with the given name, id, and optional extra\n properties used to qualify the lookup.\n\n :param str resource_name: The unique name of the resulting resource.\n :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)\n )\n __props__ = DashboardArgs.__new__(DashboardArgs)\n __props__.__dict__['dashboard_arn'] = None\n __props__.__dict__['dashboard_definition'] = None\n __props__.__dict__['dashboard_description'] = None\n __props__.__dict__['dashboard_id'] = None\n __props__.__dict__['dashboard_name'] = None\n __props__.__dict__['project_id'] = None\n __props__.__dict__['tags'] = None\n return Dashboard(resource_name, opts=opts, __props__=__props__)\n\n @property\n @pulumi.getter(name='dashboardArn')\n def dashboard_arn(self) ->pulumi.Output[str]:\n \"\"\"\n The ARN of the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_arn')\n\n @property\n @pulumi.getter(name='dashboardDefinition')\n def dashboard_definition(self) ->pulumi.Output[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, 'dashboard_definition')\n\n @property\n @pulumi.getter(name='dashboardDescription')\n def dashboard_description(self) ->pulumi.Output[str]:\n \"\"\"\n A description for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_description')\n\n @property\n @pulumi.getter(name='dashboardId')\n def dashboard_id(self) ->pulumi.Output[str]:\n \"\"\"\n The ID of the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_id')\n\n @property\n @pulumi.getter(name='dashboardName')\n def dashboard_name(self) ->pulumi.Output[str]:\n \"\"\"\n A friendly name for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_name')\n\n @property\n @pulumi.getter(name='projectId')\n def project_id(self) ->pulumi.Output[Optional[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, 'project_id')\n\n @property\n @pulumi.getter\n def tags(self) ->pulumi.Output[Optional[Sequence['outputs.DashboardTag']]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, 'tags')\n",
"step-4": "<mask token>\n\n\n@pulumi.input_type\nclass DashboardArgs:\n\n def __init__(__self__, *, dashboard_definition: pulumi.Input[str],\n dashboard_description: pulumi.Input[str], dashboard_name: Optional[\n pulumi.Input[str]]=None, project_id: Optional[pulumi.Input[str]]=\n None, tags: Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]=None):\n \"\"\"\n The set of arguments for constructing a Dashboard resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n pulumi.set(__self__, 'dashboard_definition', dashboard_definition)\n pulumi.set(__self__, 'dashboard_description', dashboard_description)\n if dashboard_name is not None:\n pulumi.set(__self__, 'dashboard_name', dashboard_name)\n if project_id is not None:\n pulumi.set(__self__, 'project_id', project_id)\n if tags is not None:\n pulumi.set(__self__, 'tags', tags)\n\n @property\n @pulumi.getter(name='dashboardDefinition')\n def dashboard_definition(self) ->pulumi.Input[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, 'dashboard_definition')\n\n @dashboard_definition.setter\n def dashboard_definition(self, value: pulumi.Input[str]):\n pulumi.set(self, 'dashboard_definition', value)\n <mask token>\n <mask token>\n\n @property\n @pulumi.getter(name='dashboardName')\n def dashboard_name(self) ->Optional[pulumi.Input[str]]:\n \"\"\"\n A friendly name for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_name')\n\n @dashboard_name.setter\n def dashboard_name(self, value: Optional[pulumi.Input[str]]):\n pulumi.set(self, 'dashboard_name', value)\n\n @property\n @pulumi.getter(name='projectId')\n def project_id(self) ->Optional[pulumi.Input[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, 'project_id')\n\n @project_id.setter\n def project_id(self, value: Optional[pulumi.Input[str]]):\n pulumi.set(self, 'project_id', value)\n\n @property\n @pulumi.getter\n def tags(self) ->Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, 'tags')\n\n @tags.setter\n def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]):\n pulumi.set(self, 'tags', value)\n\n\nclass Dashboard(pulumi.CustomResource):\n\n @overload\n def __init__(__self__, resource_name: str, opts: Optional[pulumi.\n ResourceOptions]=None, dashboard_definition: Optional[pulumi.Input[\n str]]=None, dashboard_description: Optional[pulumi.Input[str]]=None,\n dashboard_name: Optional[pulumi.Input[str]]=None, project_id:\n Optional[pulumi.Input[str]]=None, tags: Optional[pulumi.Input[\n Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]]=None,\n __props__=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n ...\n\n @overload\n def __init__(__self__, resource_name: str, args: DashboardArgs, opts:\n Optional[pulumi.ResourceOptions]=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param DashboardArgs args: The arguments to use to populate this resource's properties.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n ...\n\n def __init__(__self__, resource_name: str, *args, **kwargs):\n resource_args, opts = _utilities.get_resource_args_opts(DashboardArgs,\n pulumi.ResourceOptions, *args, **kwargs)\n if resource_args is not None:\n __self__._internal_init(resource_name, opts, **resource_args.\n __dict__)\n else:\n __self__._internal_init(resource_name, *args, **kwargs)\n\n def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.\n ResourceOptions]=None, dashboard_definition: Optional[pulumi.Input[\n str]]=None, dashboard_description: Optional[pulumi.Input[str]]=None,\n dashboard_name: Optional[pulumi.Input[str]]=None, project_id:\n Optional[pulumi.Input[str]]=None, tags: Optional[pulumi.Input[\n Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]]=None,\n __props__=None):\n opts = pulumi.ResourceOptions.merge(_utilities.\n get_resource_opts_defaults(), opts)\n if not isinstance(opts, pulumi.ResourceOptions):\n raise TypeError(\n 'Expected resource options to be a ResourceOptions instance')\n if opts.id is None:\n if __props__ is not None:\n raise TypeError(\n '__props__ is only valid when passed in combination with a valid opts.id to get an existing resource'\n )\n __props__ = DashboardArgs.__new__(DashboardArgs)\n if dashboard_definition is None and not opts.urn:\n raise TypeError(\n \"Missing required property 'dashboard_definition'\")\n __props__.__dict__['dashboard_definition'] = dashboard_definition\n if dashboard_description is None and not opts.urn:\n raise TypeError(\n \"Missing required property 'dashboard_description'\")\n __props__.__dict__['dashboard_description'] = dashboard_description\n __props__.__dict__['dashboard_name'] = dashboard_name\n __props__.__dict__['project_id'] = project_id\n __props__.__dict__['tags'] = tags\n __props__.__dict__['dashboard_arn'] = None\n __props__.__dict__['dashboard_id'] = None\n super(Dashboard, __self__).__init__('aws-native:iotsitewise:Dashboard',\n resource_name, __props__, opts)\n\n @staticmethod\n def get(resource_name: str, id: pulumi.Input[str], opts: Optional[\n pulumi.ResourceOptions]=None) ->'Dashboard':\n \"\"\"\n Get an existing Dashboard resource's state with the given name, id, and optional extra\n properties used to qualify the lookup.\n\n :param str resource_name: The unique name of the resulting resource.\n :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)\n )\n __props__ = DashboardArgs.__new__(DashboardArgs)\n __props__.__dict__['dashboard_arn'] = None\n __props__.__dict__['dashboard_definition'] = None\n __props__.__dict__['dashboard_description'] = None\n __props__.__dict__['dashboard_id'] = None\n __props__.__dict__['dashboard_name'] = None\n __props__.__dict__['project_id'] = None\n __props__.__dict__['tags'] = None\n return Dashboard(resource_name, opts=opts, __props__=__props__)\n\n @property\n @pulumi.getter(name='dashboardArn')\n def dashboard_arn(self) ->pulumi.Output[str]:\n \"\"\"\n The ARN of the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_arn')\n\n @property\n @pulumi.getter(name='dashboardDefinition')\n def dashboard_definition(self) ->pulumi.Output[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, 'dashboard_definition')\n\n @property\n @pulumi.getter(name='dashboardDescription')\n def dashboard_description(self) ->pulumi.Output[str]:\n \"\"\"\n A description for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_description')\n\n @property\n @pulumi.getter(name='dashboardId')\n def dashboard_id(self) ->pulumi.Output[str]:\n \"\"\"\n The ID of the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_id')\n\n @property\n @pulumi.getter(name='dashboardName')\n def dashboard_name(self) ->pulumi.Output[str]:\n \"\"\"\n A friendly name for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_name')\n\n @property\n @pulumi.getter(name='projectId')\n def project_id(self) ->pulumi.Output[Optional[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, 'project_id')\n\n @property\n @pulumi.getter\n def tags(self) ->pulumi.Output[Optional[Sequence['outputs.DashboardTag']]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, 'tags')\n",
"step-5": "# coding=utf-8\n# *** WARNING: this file was generated by the Pulumi SDK Generator. ***\n# *** Do not edit by hand unless you're certain you know what you are doing! ***\n\nimport copy\nimport warnings\nimport pulumi\nimport pulumi.runtime\nfrom typing import Any, Mapping, Optional, Sequence, Union, overload\nfrom .. import _utilities\nfrom . import outputs\nfrom ._inputs import *\n\n__all__ = ['DashboardArgs', 'Dashboard']\n\n@pulumi.input_type\nclass DashboardArgs:\n def __init__(__self__, *,\n dashboard_definition: pulumi.Input[str],\n dashboard_description: pulumi.Input[str],\n dashboard_name: Optional[pulumi.Input[str]] = None,\n project_id: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]]] = None):\n \"\"\"\n The set of arguments for constructing a Dashboard resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n pulumi.set(__self__, \"dashboard_definition\", dashboard_definition)\n pulumi.set(__self__, \"dashboard_description\", dashboard_description)\n if dashboard_name is not None:\n pulumi.set(__self__, \"dashboard_name\", dashboard_name)\n if project_id is not None:\n pulumi.set(__self__, \"project_id\", project_id)\n if tags is not None:\n pulumi.set(__self__, \"tags\", tags)\n\n @property\n @pulumi.getter(name=\"dashboardDefinition\")\n def dashboard_definition(self) -> pulumi.Input[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, \"dashboard_definition\")\n\n @dashboard_definition.setter\n def dashboard_definition(self, value: pulumi.Input[str]):\n pulumi.set(self, \"dashboard_definition\", value)\n\n @property\n @pulumi.getter(name=\"dashboardDescription\")\n def dashboard_description(self) -> pulumi.Input[str]:\n \"\"\"\n A description for the dashboard.\n \"\"\"\n return pulumi.get(self, \"dashboard_description\")\n\n @dashboard_description.setter\n def dashboard_description(self, value: pulumi.Input[str]):\n pulumi.set(self, \"dashboard_description\", value)\n\n @property\n @pulumi.getter(name=\"dashboardName\")\n def dashboard_name(self) -> Optional[pulumi.Input[str]]:\n \"\"\"\n A friendly name for the dashboard.\n \"\"\"\n return pulumi.get(self, \"dashboard_name\")\n\n @dashboard_name.setter\n def dashboard_name(self, value: Optional[pulumi.Input[str]]):\n pulumi.set(self, \"dashboard_name\", value)\n\n @property\n @pulumi.getter(name=\"projectId\")\n def project_id(self) -> Optional[pulumi.Input[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, \"project_id\")\n\n @project_id.setter\n def project_id(self, value: Optional[pulumi.Input[str]]):\n pulumi.set(self, \"project_id\", value)\n\n @property\n @pulumi.getter\n def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, \"tags\")\n\n @tags.setter\n def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]]]):\n pulumi.set(self, \"tags\", value)\n\n\nclass Dashboard(pulumi.CustomResource):\n @overload\n def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n dashboard_definition: Optional[pulumi.Input[str]] = None,\n dashboard_description: Optional[pulumi.Input[str]] = None,\n dashboard_name: Optional[pulumi.Input[str]] = None,\n project_id: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]] = None,\n __props__=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n ...\n @overload\n def __init__(__self__,\n resource_name: str,\n args: DashboardArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param DashboardArgs args: The arguments to use to populate this resource's properties.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n ...\n def __init__(__self__, resource_name: str, *args, **kwargs):\n resource_args, opts = _utilities.get_resource_args_opts(DashboardArgs, pulumi.ResourceOptions, *args, **kwargs)\n if resource_args is not None:\n __self__._internal_init(resource_name, opts, **resource_args.__dict__)\n else:\n __self__._internal_init(resource_name, *args, **kwargs)\n\n def _internal_init(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n dashboard_definition: Optional[pulumi.Input[str]] = None,\n dashboard_description: Optional[pulumi.Input[str]] = None,\n dashboard_name: Optional[pulumi.Input[str]] = None,\n project_id: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]] = None,\n __props__=None):\n opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)\n if not isinstance(opts, pulumi.ResourceOptions):\n raise TypeError('Expected resource options to be a ResourceOptions instance')\n if opts.id is None:\n if __props__ is not None:\n raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')\n __props__ = DashboardArgs.__new__(DashboardArgs)\n\n if dashboard_definition is None and not opts.urn:\n raise TypeError(\"Missing required property 'dashboard_definition'\")\n __props__.__dict__[\"dashboard_definition\"] = dashboard_definition\n if dashboard_description is None and not opts.urn:\n raise TypeError(\"Missing required property 'dashboard_description'\")\n __props__.__dict__[\"dashboard_description\"] = dashboard_description\n __props__.__dict__[\"dashboard_name\"] = dashboard_name\n __props__.__dict__[\"project_id\"] = project_id\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"dashboard_arn\"] = None\n __props__.__dict__[\"dashboard_id\"] = None\n super(Dashboard, __self__).__init__(\n 'aws-native:iotsitewise:Dashboard',\n resource_name,\n __props__,\n opts)\n\n @staticmethod\n def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'Dashboard':\n \"\"\"\n Get an existing Dashboard resource's state with the given name, id, and optional extra\n properties used to qualify the lookup.\n\n :param str resource_name: The unique name of the resulting resource.\n :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = DashboardArgs.__new__(DashboardArgs)\n\n __props__.__dict__[\"dashboard_arn\"] = None\n __props__.__dict__[\"dashboard_definition\"] = None\n __props__.__dict__[\"dashboard_description\"] = None\n __props__.__dict__[\"dashboard_id\"] = None\n __props__.__dict__[\"dashboard_name\"] = None\n __props__.__dict__[\"project_id\"] = None\n __props__.__dict__[\"tags\"] = None\n return Dashboard(resource_name, opts=opts, __props__=__props__)\n\n @property\n @pulumi.getter(name=\"dashboardArn\")\n def dashboard_arn(self) -> pulumi.Output[str]:\n \"\"\"\n The ARN of the dashboard.\n \"\"\"\n return pulumi.get(self, \"dashboard_arn\")\n\n @property\n @pulumi.getter(name=\"dashboardDefinition\")\n def dashboard_definition(self) -> pulumi.Output[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, \"dashboard_definition\")\n\n @property\n @pulumi.getter(name=\"dashboardDescription\")\n def dashboard_description(self) -> pulumi.Output[str]:\n \"\"\"\n A description for the dashboard.\n \"\"\"\n return pulumi.get(self, \"dashboard_description\")\n\n @property\n @pulumi.getter(name=\"dashboardId\")\n def dashboard_id(self) -> pulumi.Output[str]:\n \"\"\"\n The ID of the dashboard.\n \"\"\"\n return pulumi.get(self, \"dashboard_id\")\n\n @property\n @pulumi.getter(name=\"dashboardName\")\n def dashboard_name(self) -> pulumi.Output[str]:\n \"\"\"\n A friendly name for the dashboard.\n \"\"\"\n return pulumi.get(self, \"dashboard_name\")\n\n @property\n @pulumi.getter(name=\"projectId\")\n def project_id(self) -> pulumi.Output[Optional[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, \"project_id\")\n\n @property\n @pulumi.getter\n def tags(self) -> pulumi.Output[Optional[Sequence['outputs.DashboardTag']]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, \"tags\")\n\n",
"step-ids": [
14,
21,
22,
23,
28
]
}
|
[
14,
21,
22,
23,
28
] |
from numpy.testing import assert_almost_equal
from fastats.maths.norm_cdf import norm_cdf
def test_norm_cdf_basic_sanity():
assert_almost_equal(0.5, norm_cdf(0.0, 0, 1))
def test_norm_cdf_dartmouth():
"""
Examples taken from:
https://math.dartmouth.edu/archive/m20f12/public_html/matlabnormal
stored in literature directory as dartmouth_normcdf_norminv.pdf
"""
assert_almost_equal(0.0062, norm_cdf(90, 100, 4), decimal=4)
if __name__ == '__main__':
import pytest
pytest.main([__file__])
|
normal
|
{
"blob_id": "0229783467b8bcd0361baf6be07e3261f34220c7",
"index": 6581,
"step-1": "<mask token>\n\n\ndef test_norm_cdf_dartmouth():\n \"\"\"\n Examples taken from:\n https://math.dartmouth.edu/archive/m20f12/public_html/matlabnormal\n stored in literature directory as dartmouth_normcdf_norminv.pdf\n \"\"\"\n assert_almost_equal(0.0062, norm_cdf(90, 100, 4), decimal=4)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_norm_cdf_basic_sanity():\n assert_almost_equal(0.5, norm_cdf(0.0, 0, 1))\n\n\ndef test_norm_cdf_dartmouth():\n \"\"\"\n Examples taken from:\n https://math.dartmouth.edu/archive/m20f12/public_html/matlabnormal\n stored in literature directory as dartmouth_normcdf_norminv.pdf\n \"\"\"\n assert_almost_equal(0.0062, norm_cdf(90, 100, 4), decimal=4)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef test_norm_cdf_basic_sanity():\n assert_almost_equal(0.5, norm_cdf(0.0, 0, 1))\n\n\ndef test_norm_cdf_dartmouth():\n \"\"\"\n Examples taken from:\n https://math.dartmouth.edu/archive/m20f12/public_html/matlabnormal\n stored in literature directory as dartmouth_normcdf_norminv.pdf\n \"\"\"\n assert_almost_equal(0.0062, norm_cdf(90, 100, 4), decimal=4)\n\n\nif __name__ == '__main__':\n import pytest\n pytest.main([__file__])\n",
"step-4": "from numpy.testing import assert_almost_equal\nfrom fastats.maths.norm_cdf import norm_cdf\n\n\ndef test_norm_cdf_basic_sanity():\n assert_almost_equal(0.5, norm_cdf(0.0, 0, 1))\n\n\ndef test_norm_cdf_dartmouth():\n \"\"\"\n Examples taken from:\n https://math.dartmouth.edu/archive/m20f12/public_html/matlabnormal\n stored in literature directory as dartmouth_normcdf_norminv.pdf\n \"\"\"\n assert_almost_equal(0.0062, norm_cdf(90, 100, 4), decimal=4)\n\n\nif __name__ == '__main__':\n import pytest\n pytest.main([__file__])\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import cv2
import imageio
import pandas as pd
import glob, os
import numpy as np
fileDir = os.getcwd()
# os.chdir("./train-jpg")
# there are 40480 training examples
# we will allocate 39000 for training
# and the remaining 1480 will be for validation
input_size = 65536 # 256^2
hidden_size = 20
hidden_size_1 = 15
hidden_size_2 = 10
hidden_size_3 = 5
num_classes = 1
learning_rate = 0.001
num_epochs = 5
train_num = 1000
test_num = 148
# train_num = 39000
# test_num = 1480
# %% Load data--for clouds and non-clouds
images = []
for file in glob.glob("*.jpg"):
images.append(file)
images = sorted(images, key=lambda filename: int(filename[6: -4])) # string splicing so that the images are in order
train_images = []
test_images = []
train_labels = []
test_labels = []
labels = pd.read_csv("./train_v2.csv") # labels are whether or not image is any sort of cloudy or haze
for i in range(train_num + test_num):
tags = labels.iloc[i]["tags"]
if i < train_num:
train_images.append(imageio.imread(images[i], as_gray=True).flatten())
train_labels.append(int("cloudy" not in tags and "haze" not in tags))
# train_labels.append(int("water" not in tags))
else:
test_images.append(imageio.imread(images[i], as_gray=True).flatten())
test_labels.append(int("cloudy" not in tags and "haze" not in tags))
# test_labels.append(int("water" not in tags))
class Net(nn.Module):
def __init__(self, input_size, hidden_size, num_classes):
super(Net, self).__init__()
# parameters
# weights
# self.h1 = nn.Sigmoid() # input_size, hidden_size
# self.o = nn.Sigmoid() # hidden_size, num_classes
self.h1 = nn.Linear(input_size, hidden_size)
self.h2 = nn.Linear(hidden_size, hidden_size_1)
self.h3 = nn.Linear(hidden_size_1, hidden_size_2)
self.h4 = nn.Linear(hidden_size_2, hidden_size_3)
self.o = nn.Linear(hidden_size_3, num_classes)
def forward(self, x):
x = torch.sigmoid(self.h1(x))
# print("doing x: {}".format(x.shape))
x = torch.sigmoid(self.h2(x))
x = torch.sigmoid(self.h3(x))
x = torch.sigmoid(self.h4(x))
x = torch.sigmoid(self.o(x))
return x
# %%
model = Net(input_size, hidden_size, num_classes) # no device configuration here
criterion = nn.SoftMarginLoss()
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
# model = TheModelClass(*args, **kwargs)
# model.load_state_dict(torch.load("model.ckpt"))
# model.eval()
# optimizer = TheOptimizerClass(*args, **kwargs)
# checkpoint = torch.load('./model.ckpt')
# model.load_state_dict(checkpoint['model_state_dict'])
# optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
# epoch = checkpoint['epoch']
# loss = checkpoint['loss']
total_step = len(train_images)
for epoch in range(num_epochs):
for i, image in enumerate(train_images):
image = torch.Tensor(train_images[i]).reshape(1, 65536)
label = torch.Tensor([int(train_labels[i])])
# label = label.long()
# label = label.reshape(1,1)
# label = label.squeeze()
# Forward pass
outputs = model(image)
outputs = outputs.squeeze(0)
# outputs.reshape(1,)
loss = criterion(outputs, label)
# Backward and optimize
optimizer.zero_grad()
loss.backward()
optimizer.step()
if (i+1) % 100 == 0:
print ('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'
.format(epoch+1, num_epochs, i+1, total_step, loss.item()))
# %%
with torch.no_grad():
correct = 0
total = 0
for i, image in enumerate(test_images):
image = torch.Tensor(test_images[i]).reshape(1, 65536)
label = torch.Tensor([int(test_labels[i])])
outputs = model(image)
outputs = outputs.squeeze(0)
outputs = 1 if torch.sum(outputs) >= 0.5 else 0
if outputs == torch.sum(label):
correct += 1
elif outputs == 0:
print("#############")
print(i,outputs, torch.sum(label))
# _, predicted = torch.max(outputs.data, 1)
# correct += (predicted == labels).sum().item()
print('Accuracy of the network on the {} test images: {} %'.format(len(test_images), 100 * correct / len(test_images)))
# %%
torch.save(model.state_dict(), 'model.ckpt')
# %%
|
normal
|
{
"blob_id": "a4deb67d277538e61c32381da0fe4886016dae33",
"index": 85,
"step-1": "<mask token>\n\n\nclass Net(nn.Module):\n\n def __init__(self, input_size, hidden_size, num_classes):\n super(Net, self).__init__()\n self.h1 = nn.Linear(input_size, hidden_size)\n self.h2 = nn.Linear(hidden_size, hidden_size_1)\n self.h3 = nn.Linear(hidden_size_1, hidden_size_2)\n self.h4 = nn.Linear(hidden_size_2, hidden_size_3)\n self.o = nn.Linear(hidden_size_3, num_classes)\n\n def forward(self, x):\n x = torch.sigmoid(self.h1(x))\n x = torch.sigmoid(self.h2(x))\n x = torch.sigmoid(self.h3(x))\n x = torch.sigmoid(self.h4(x))\n x = torch.sigmoid(self.o(x))\n return x\n\n\n<mask token>\n",
"step-2": "<mask token>\nfor file in glob.glob('*.jpg'):\n images.append(file)\n<mask token>\nfor i in range(train_num + test_num):\n tags = labels.iloc[i]['tags']\n if i < train_num:\n train_images.append(imageio.imread(images[i], as_gray=True).flatten())\n train_labels.append(int('cloudy' not in tags and 'haze' not in tags))\n else:\n test_images.append(imageio.imread(images[i], as_gray=True).flatten())\n test_labels.append(int('cloudy' not in tags and 'haze' not in tags))\n\n\nclass Net(nn.Module):\n\n def __init__(self, input_size, hidden_size, num_classes):\n super(Net, self).__init__()\n self.h1 = nn.Linear(input_size, hidden_size)\n self.h2 = nn.Linear(hidden_size, hidden_size_1)\n self.h3 = nn.Linear(hidden_size_1, hidden_size_2)\n self.h4 = nn.Linear(hidden_size_2, hidden_size_3)\n self.o = nn.Linear(hidden_size_3, num_classes)\n\n def forward(self, x):\n x = torch.sigmoid(self.h1(x))\n x = torch.sigmoid(self.h2(x))\n x = torch.sigmoid(self.h3(x))\n x = torch.sigmoid(self.h4(x))\n x = torch.sigmoid(self.o(x))\n return x\n\n\n<mask token>\nfor epoch in range(num_epochs):\n for i, image in enumerate(train_images):\n image = torch.Tensor(train_images[i]).reshape(1, 65536)\n label = torch.Tensor([int(train_labels[i])])\n outputs = model(image)\n outputs = outputs.squeeze(0)\n loss = criterion(outputs, label)\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n if (i + 1) % 100 == 0:\n print('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'.format(epoch +\n 1, num_epochs, i + 1, total_step, loss.item()))\nwith torch.no_grad():\n correct = 0\n total = 0\n for i, image in enumerate(test_images):\n image = torch.Tensor(test_images[i]).reshape(1, 65536)\n label = torch.Tensor([int(test_labels[i])])\n outputs = model(image)\n outputs = outputs.squeeze(0)\n outputs = 1 if torch.sum(outputs) >= 0.5 else 0\n if outputs == torch.sum(label):\n correct += 1\n elif outputs == 0:\n print('#############')\n print(i, outputs, torch.sum(label))\n print('Accuracy of the network on the {} test images: {} %'.format(len(\n test_images), 100 * correct / len(test_images)))\ntorch.save(model.state_dict(), 'model.ckpt')\n",
"step-3": "<mask token>\nfileDir = os.getcwd()\ninput_size = 65536\nhidden_size = 20\nhidden_size_1 = 15\nhidden_size_2 = 10\nhidden_size_3 = 5\nnum_classes = 1\nlearning_rate = 0.001\nnum_epochs = 5\ntrain_num = 1000\ntest_num = 148\nimages = []\nfor file in glob.glob('*.jpg'):\n images.append(file)\nimages = sorted(images, key=lambda filename: int(filename[6:-4]))\ntrain_images = []\ntest_images = []\ntrain_labels = []\ntest_labels = []\nlabels = pd.read_csv('./train_v2.csv')\nfor i in range(train_num + test_num):\n tags = labels.iloc[i]['tags']\n if i < train_num:\n train_images.append(imageio.imread(images[i], as_gray=True).flatten())\n train_labels.append(int('cloudy' not in tags and 'haze' not in tags))\n else:\n test_images.append(imageio.imread(images[i], as_gray=True).flatten())\n test_labels.append(int('cloudy' not in tags and 'haze' not in tags))\n\n\nclass Net(nn.Module):\n\n def __init__(self, input_size, hidden_size, num_classes):\n super(Net, self).__init__()\n self.h1 = nn.Linear(input_size, hidden_size)\n self.h2 = nn.Linear(hidden_size, hidden_size_1)\n self.h3 = nn.Linear(hidden_size_1, hidden_size_2)\n self.h4 = nn.Linear(hidden_size_2, hidden_size_3)\n self.o = nn.Linear(hidden_size_3, num_classes)\n\n def forward(self, x):\n x = torch.sigmoid(self.h1(x))\n x = torch.sigmoid(self.h2(x))\n x = torch.sigmoid(self.h3(x))\n x = torch.sigmoid(self.h4(x))\n x = torch.sigmoid(self.o(x))\n return x\n\n\nmodel = Net(input_size, hidden_size, num_classes)\ncriterion = nn.SoftMarginLoss()\noptimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\ntotal_step = len(train_images)\nfor epoch in range(num_epochs):\n for i, image in enumerate(train_images):\n image = torch.Tensor(train_images[i]).reshape(1, 65536)\n label = torch.Tensor([int(train_labels[i])])\n outputs = model(image)\n outputs = outputs.squeeze(0)\n loss = criterion(outputs, label)\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n if (i + 1) % 100 == 0:\n print('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'.format(epoch +\n 1, num_epochs, i + 1, total_step, loss.item()))\nwith torch.no_grad():\n correct = 0\n total = 0\n for i, image in enumerate(test_images):\n image = torch.Tensor(test_images[i]).reshape(1, 65536)\n label = torch.Tensor([int(test_labels[i])])\n outputs = model(image)\n outputs = outputs.squeeze(0)\n outputs = 1 if torch.sum(outputs) >= 0.5 else 0\n if outputs == torch.sum(label):\n correct += 1\n elif outputs == 0:\n print('#############')\n print(i, outputs, torch.sum(label))\n print('Accuracy of the network on the {} test images: {} %'.format(len(\n test_images), 100 * correct / len(test_images)))\ntorch.save(model.state_dict(), 'model.ckpt')\n",
"step-4": "import torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\nimport cv2\nimport imageio\nimport pandas as pd\nimport glob, os\nimport numpy as np\nfileDir = os.getcwd()\ninput_size = 65536\nhidden_size = 20\nhidden_size_1 = 15\nhidden_size_2 = 10\nhidden_size_3 = 5\nnum_classes = 1\nlearning_rate = 0.001\nnum_epochs = 5\ntrain_num = 1000\ntest_num = 148\nimages = []\nfor file in glob.glob('*.jpg'):\n images.append(file)\nimages = sorted(images, key=lambda filename: int(filename[6:-4]))\ntrain_images = []\ntest_images = []\ntrain_labels = []\ntest_labels = []\nlabels = pd.read_csv('./train_v2.csv')\nfor i in range(train_num + test_num):\n tags = labels.iloc[i]['tags']\n if i < train_num:\n train_images.append(imageio.imread(images[i], as_gray=True).flatten())\n train_labels.append(int('cloudy' not in tags and 'haze' not in tags))\n else:\n test_images.append(imageio.imread(images[i], as_gray=True).flatten())\n test_labels.append(int('cloudy' not in tags and 'haze' not in tags))\n\n\nclass Net(nn.Module):\n\n def __init__(self, input_size, hidden_size, num_classes):\n super(Net, self).__init__()\n self.h1 = nn.Linear(input_size, hidden_size)\n self.h2 = nn.Linear(hidden_size, hidden_size_1)\n self.h3 = nn.Linear(hidden_size_1, hidden_size_2)\n self.h4 = nn.Linear(hidden_size_2, hidden_size_3)\n self.o = nn.Linear(hidden_size_3, num_classes)\n\n def forward(self, x):\n x = torch.sigmoid(self.h1(x))\n x = torch.sigmoid(self.h2(x))\n x = torch.sigmoid(self.h3(x))\n x = torch.sigmoid(self.h4(x))\n x = torch.sigmoid(self.o(x))\n return x\n\n\nmodel = Net(input_size, hidden_size, num_classes)\ncriterion = nn.SoftMarginLoss()\noptimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\ntotal_step = len(train_images)\nfor epoch in range(num_epochs):\n for i, image in enumerate(train_images):\n image = torch.Tensor(train_images[i]).reshape(1, 65536)\n label = torch.Tensor([int(train_labels[i])])\n outputs = model(image)\n outputs = outputs.squeeze(0)\n loss = criterion(outputs, label)\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n if (i + 1) % 100 == 0:\n print('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'.format(epoch +\n 1, num_epochs, i + 1, total_step, loss.item()))\nwith torch.no_grad():\n correct = 0\n total = 0\n for i, image in enumerate(test_images):\n image = torch.Tensor(test_images[i]).reshape(1, 65536)\n label = torch.Tensor([int(test_labels[i])])\n outputs = model(image)\n outputs = outputs.squeeze(0)\n outputs = 1 if torch.sum(outputs) >= 0.5 else 0\n if outputs == torch.sum(label):\n correct += 1\n elif outputs == 0:\n print('#############')\n print(i, outputs, torch.sum(label))\n print('Accuracy of the network on the {} test images: {} %'.format(len(\n test_images), 100 * correct / len(test_images)))\ntorch.save(model.state_dict(), 'model.ckpt')\n",
"step-5": "import torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\nimport cv2\nimport imageio\nimport pandas as pd\nimport glob, os\nimport numpy as np\n\nfileDir = os.getcwd()\n# os.chdir(\"./train-jpg\")\n\n# there are 40480 training examples\n# we will allocate 39000 for training\n# and the remaining 1480 will be for validation\n\ninput_size = 65536 # 256^2\nhidden_size = 20\nhidden_size_1 = 15\nhidden_size_2 = 10\nhidden_size_3 = 5\nnum_classes = 1\nlearning_rate = 0.001\nnum_epochs = 5\n\ntrain_num = 1000\ntest_num = 148\n\n# train_num = 39000\n# test_num = 1480\n\n# %% Load data--for clouds and non-clouds\nimages = []\n\nfor file in glob.glob(\"*.jpg\"):\n images.append(file)\nimages = sorted(images, key=lambda filename: int(filename[6: -4])) # string splicing so that the images are in order\n\ntrain_images = []\ntest_images = []\n\ntrain_labels = []\ntest_labels = []\nlabels = pd.read_csv(\"./train_v2.csv\") # labels are whether or not image is any sort of cloudy or haze\n\nfor i in range(train_num + test_num):\n tags = labels.iloc[i][\"tags\"]\n if i < train_num:\n train_images.append(imageio.imread(images[i], as_gray=True).flatten())\n train_labels.append(int(\"cloudy\" not in tags and \"haze\" not in tags))\n # train_labels.append(int(\"water\" not in tags))\n else:\n test_images.append(imageio.imread(images[i], as_gray=True).flatten())\n test_labels.append(int(\"cloudy\" not in tags and \"haze\" not in tags))\n # test_labels.append(int(\"water\" not in tags))\n \nclass Net(nn.Module):\n def __init__(self, input_size, hidden_size, num_classes):\n super(Net, self).__init__()\n \n # parameters\n \n # weights\n # self.h1 = nn.Sigmoid() # input_size, hidden_size\n # self.o = nn.Sigmoid() # hidden_size, num_classes\n\n self.h1 = nn.Linear(input_size, hidden_size) \n self.h2 = nn.Linear(hidden_size, hidden_size_1)\n self.h3 = nn.Linear(hidden_size_1, hidden_size_2)\n self.h4 = nn.Linear(hidden_size_2, hidden_size_3)\n self.o = nn.Linear(hidden_size_3, num_classes) \n\n def forward(self, x):\n x = torch.sigmoid(self.h1(x))\n # print(\"doing x: {}\".format(x.shape))\n x = torch.sigmoid(self.h2(x))\n x = torch.sigmoid(self.h3(x))\n x = torch.sigmoid(self.h4(x))\n x = torch.sigmoid(self.o(x))\n return x\n\n# %%\n\nmodel = Net(input_size, hidden_size, num_classes) # no device configuration here\ncriterion = nn.SoftMarginLoss()\noptimizer = torch.optim.Adam(model.parameters(), lr=learning_rate) \n# model = TheModelClass(*args, **kwargs)\n# model.load_state_dict(torch.load(\"model.ckpt\"))\n# model.eval()\n# optimizer = TheOptimizerClass(*args, **kwargs)\n\n# checkpoint = torch.load('./model.ckpt')\n# model.load_state_dict(checkpoint['model_state_dict'])\n# optimizer.load_state_dict(checkpoint['optimizer_state_dict'])\n# epoch = checkpoint['epoch']\n# loss = checkpoint['loss']\n\n\ntotal_step = len(train_images)\nfor epoch in range(num_epochs):\n for i, image in enumerate(train_images): \n\n image = torch.Tensor(train_images[i]).reshape(1, 65536)\n label = torch.Tensor([int(train_labels[i])])\n # label = label.long()\n # label = label.reshape(1,1)\n # label = label.squeeze()\n \n # Forward pass\n outputs = model(image)\n outputs = outputs.squeeze(0)\n # outputs.reshape(1,)\n loss = criterion(outputs, label)\n \n # Backward and optimize\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n \n if (i+1) % 100 == 0:\n print ('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}' \n .format(epoch+1, num_epochs, i+1, total_step, loss.item()))\n\n\n# %%\n\nwith torch.no_grad():\n correct = 0\n total = 0\n for i, image in enumerate(test_images):\n image = torch.Tensor(test_images[i]).reshape(1, 65536)\n label = torch.Tensor([int(test_labels[i])])\n outputs = model(image)\n outputs = outputs.squeeze(0)\n outputs = 1 if torch.sum(outputs) >= 0.5 else 0\n if outputs == torch.sum(label):\n correct += 1\n elif outputs == 0: \n print(\"#############\")\n print(i,outputs, torch.sum(label))\n # _, predicted = torch.max(outputs.data, 1)\n # correct += (predicted == labels).sum().item()\n\n print('Accuracy of the network on the {} test images: {} %'.format(len(test_images), 100 * correct / len(test_images)))\n\n\n\n# %%\n\ntorch.save(model.state_dict(), 'model.ckpt')\n\n# %%\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
# Formatters example
#
# Requirements:
# Go to the ../hello_world directory and do: python prepare_data.py
#
# Instructions:
#
# Just run this file:
#
# python table.py
# Output:
# * standard input – text table
# * table.html
# * cross_table.html
#
from cubes import Workspace, create_formatter
workspace = Workspace("slicer.ini")
# Create formatters
text_formatter = create_formatter("text_table")
html_formatter = create_formatter("simple_html_table")
html_cross_formatter = create_formatter("html_cross_table")
# Get the browser and data
browser = workspace.browser("irbd_balance")
result = browser.aggregate(drilldown=["item"])
result = result.cached()
#
# 1. Create text output
#
print "Text output"
print "-----------"
print text_formatter(result, "item")
#
# 2. Create HTML output (see table.html)
#
with open("table.html", "w") as f:
data = html_formatter(result, "item")
f.write(data)
#
# 3. Create cross-table to cross_table.html
#
result = browser.aggregate(drilldown=["item", "year"])
with open("cross_table.html", "w") as f:
data = html_cross_formatter(result,
onrows=["year"],
oncolumns=["item.category_label"])
f.write(data)
print "Check also table.html and cross_table.html files"
|
normal
|
{
"blob_id": "55e743cb027d27cc6b668424c1584f27a8e8c51a",
"index": 5707,
"step-1": "# Formatters example\n#\n# Requirements:\n# Go to the ../hello_world directory and do: python prepare_data.py\n#\n# Instructions:\n#\n# Just run this file:\n#\n# python table.py\n# Output:\n# * standard input – text table\n# * table.html\n# * cross_table.html\n#\n\nfrom cubes import Workspace, create_formatter\n\nworkspace = Workspace(\"slicer.ini\")\n\n# Create formatters\ntext_formatter = create_formatter(\"text_table\")\nhtml_formatter = create_formatter(\"simple_html_table\")\nhtml_cross_formatter = create_formatter(\"html_cross_table\")\n\n# Get the browser and data\n\nbrowser = workspace.browser(\"irbd_balance\")\n\nresult = browser.aggregate(drilldown=[\"item\"])\nresult = result.cached()\n\n#\n# 1. Create text output\n#\nprint \"Text output\"\nprint \"-----------\"\n\nprint text_formatter(result, \"item\")\n\n\n#\n# 2. Create HTML output (see table.html)\n#\nwith open(\"table.html\", \"w\") as f:\n data = html_formatter(result, \"item\")\n f.write(data)\n\n#\n# 3. Create cross-table to cross_table.html\n#\nresult = browser.aggregate(drilldown=[\"item\", \"year\"])\nwith open(\"cross_table.html\", \"w\") as f:\n data = html_cross_formatter(result,\n onrows=[\"year\"],\n oncolumns=[\"item.category_label\"])\n f.write(data)\n\nprint \"Check also table.html and cross_table.html files\"\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
def manage_prev_page():
global session, request
if ('profile' not in request.referrer and 'change_password' not in
request.referrer and 'forgot_password' not in request.referrer and
'request_password' not in request.referrer):
session['prev_page'] = request.referrer
@main.route('/')
def homepage():
return render_template('main/homepage.html')
@main.route('/about', methods=['GET', 'POST'])
def about():
return render_template('main/about.html')
<|reserved_special_token_0|>
@main.route('/blog/post/<int:post_id>/<string:post_url>', methods=['GET',
'POST'])
def post(post_id, post_url):
post = Post.query.filter_by(id=post_id).first()
tags = Tag.query.filter_by(id=post_id)
form = CommentForm()
comments = Comment.query.filter_by(post_id=post_id).order_by(Comment.
date.desc())
tags = Tag.query.filter_by(post_id=post_id)
if form.validate_on_submit():
if current_user.is_authenticated:
comment = Comment(content=form.content.data, post_id=post_id,
author_id=current_user.id)
db.session.add(comment)
db.session.commit()
flash('Your comment has been published')
return redirect(url_for('main.post', post_id=post_id, post_url=
post_url))
else:
flash('You need to get logged in to comment')
return render_template('main/post.html', post=post, form=form, comments
=comments, tags=tags, post_id=post_id, post_url=post_url)
@main.route('/profile', methods=['GET', 'POST'])
@decorators.login_required
def profile():
if request.method == 'GET':
manage_prev_page()
form = ProfileForm()
if request.method == 'POST':
if form.validate_on_submit():
current_user.username = form.username.data
current_user.email = form.email.data
db.session.commit()
flash('Your account has been changed!')
return redirect(session['prev_page'])
else:
flash('Please check your data!')
return render_template('main/profile.html', form=form)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def manage_prev_page():
global session, request
if ('profile' not in request.referrer and 'change_password' not in
request.referrer and 'forgot_password' not in request.referrer and
'request_password' not in request.referrer):
session['prev_page'] = request.referrer
@main.route('/')
def homepage():
return render_template('main/homepage.html')
@main.route('/about', methods=['GET', 'POST'])
def about():
return render_template('main/about.html')
@main.route('/blog')
def blog():
page = request.args.get('page', 1, type=int)
posts = Post.query.paginate(page=page, per_page=5)
tags = Tag.query.all()
return render_template('main/blog.html', posts=posts, slugify=slugify,
tags=tags)
@main.route('/blog/post/<int:post_id>/<string:post_url>', methods=['GET',
'POST'])
def post(post_id, post_url):
post = Post.query.filter_by(id=post_id).first()
tags = Tag.query.filter_by(id=post_id)
form = CommentForm()
comments = Comment.query.filter_by(post_id=post_id).order_by(Comment.
date.desc())
tags = Tag.query.filter_by(post_id=post_id)
if form.validate_on_submit():
if current_user.is_authenticated:
comment = Comment(content=form.content.data, post_id=post_id,
author_id=current_user.id)
db.session.add(comment)
db.session.commit()
flash('Your comment has been published')
return redirect(url_for('main.post', post_id=post_id, post_url=
post_url))
else:
flash('You need to get logged in to comment')
return render_template('main/post.html', post=post, form=form, comments
=comments, tags=tags, post_id=post_id, post_url=post_url)
@main.route('/profile', methods=['GET', 'POST'])
@decorators.login_required
def profile():
if request.method == 'GET':
manage_prev_page()
form = ProfileForm()
if request.method == 'POST':
if form.validate_on_submit():
current_user.username = form.username.data
current_user.email = form.email.data
db.session.commit()
flash('Your account has been changed!')
return redirect(session['prev_page'])
else:
flash('Please check your data!')
return render_template('main/profile.html', form=form)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
main = Blueprint('main', __name__)
def manage_prev_page():
global session, request
if ('profile' not in request.referrer and 'change_password' not in
request.referrer and 'forgot_password' not in request.referrer and
'request_password' not in request.referrer):
session['prev_page'] = request.referrer
@main.route('/')
def homepage():
return render_template('main/homepage.html')
@main.route('/about', methods=['GET', 'POST'])
def about():
return render_template('main/about.html')
@main.route('/blog')
def blog():
page = request.args.get('page', 1, type=int)
posts = Post.query.paginate(page=page, per_page=5)
tags = Tag.query.all()
return render_template('main/blog.html', posts=posts, slugify=slugify,
tags=tags)
@main.route('/blog/post/<int:post_id>/<string:post_url>', methods=['GET',
'POST'])
def post(post_id, post_url):
post = Post.query.filter_by(id=post_id).first()
tags = Tag.query.filter_by(id=post_id)
form = CommentForm()
comments = Comment.query.filter_by(post_id=post_id).order_by(Comment.
date.desc())
tags = Tag.query.filter_by(post_id=post_id)
if form.validate_on_submit():
if current_user.is_authenticated:
comment = Comment(content=form.content.data, post_id=post_id,
author_id=current_user.id)
db.session.add(comment)
db.session.commit()
flash('Your comment has been published')
return redirect(url_for('main.post', post_id=post_id, post_url=
post_url))
else:
flash('You need to get logged in to comment')
return render_template('main/post.html', post=post, form=form, comments
=comments, tags=tags, post_id=post_id, post_url=post_url)
@main.route('/profile', methods=['GET', 'POST'])
@decorators.login_required
def profile():
if request.method == 'GET':
manage_prev_page()
form = ProfileForm()
if request.method == 'POST':
if form.validate_on_submit():
current_user.username = form.username.data
current_user.email = form.email.data
db.session.commit()
flash('Your account has been changed!')
return redirect(session['prev_page'])
else:
flash('Please check your data!')
return render_template('main/profile.html', form=form)
<|reserved_special_token_1|>
from flask import Blueprint, render_template, flash, redirect, url_for, request, current_app, g, session
from flask_login import current_user
from app import decorators
from app.models import User, Post, Comment, Tag
from slugify import slugify
from app.main.forms import CommentForm, TagForm, ProfileForm, ContactForm
from app import db
from flask_mail import Message
from app import mail
main = Blueprint('main', __name__)
def manage_prev_page():
global session, request
if ('profile' not in request.referrer and 'change_password' not in
request.referrer and 'forgot_password' not in request.referrer and
'request_password' not in request.referrer):
session['prev_page'] = request.referrer
@main.route('/')
def homepage():
return render_template('main/homepage.html')
@main.route('/about', methods=['GET', 'POST'])
def about():
return render_template('main/about.html')
@main.route('/blog')
def blog():
page = request.args.get('page', 1, type=int)
posts = Post.query.paginate(page=page, per_page=5)
tags = Tag.query.all()
return render_template('main/blog.html', posts=posts, slugify=slugify,
tags=tags)
@main.route('/blog/post/<int:post_id>/<string:post_url>', methods=['GET',
'POST'])
def post(post_id, post_url):
post = Post.query.filter_by(id=post_id).first()
tags = Tag.query.filter_by(id=post_id)
form = CommentForm()
comments = Comment.query.filter_by(post_id=post_id).order_by(Comment.
date.desc())
tags = Tag.query.filter_by(post_id=post_id)
if form.validate_on_submit():
if current_user.is_authenticated:
comment = Comment(content=form.content.data, post_id=post_id,
author_id=current_user.id)
db.session.add(comment)
db.session.commit()
flash('Your comment has been published')
return redirect(url_for('main.post', post_id=post_id, post_url=
post_url))
else:
flash('You need to get logged in to comment')
return render_template('main/post.html', post=post, form=form, comments
=comments, tags=tags, post_id=post_id, post_url=post_url)
@main.route('/profile', methods=['GET', 'POST'])
@decorators.login_required
def profile():
if request.method == 'GET':
manage_prev_page()
form = ProfileForm()
if request.method == 'POST':
if form.validate_on_submit():
current_user.username = form.username.data
current_user.email = form.email.data
db.session.commit()
flash('Your account has been changed!')
return redirect(session['prev_page'])
else:
flash('Please check your data!')
return render_template('main/profile.html', form=form)
<|reserved_special_token_1|>
from flask import Blueprint, render_template, flash, redirect, url_for, request, current_app, g, session
from flask_login import current_user
from app import decorators
from app.models import User, Post, Comment, Tag
from slugify import slugify
from app.main.forms import CommentForm, TagForm, ProfileForm, ContactForm
from app import db
from flask_mail import Message
from app import mail
main = Blueprint('main', __name__)
def manage_prev_page():
global session, request
if 'profile' not in request.referrer and 'change_password' not in request.referrer \
and 'forgot_password' not in request.referrer \
and 'request_password' not in request.referrer:
session['prev_page'] = request.referrer
@main.route('/')
def homepage():
return render_template('main/homepage.html')
@main.route('/about', methods=['GET', 'POST'])
def about():
return render_template('main/about.html')
@main.route('/blog')
def blog():
page = request.args.get('page', 1, type=int)
posts = Post.query.paginate(page=page, per_page=5)
tags = Tag.query.all()
return render_template('main/blog.html', posts=posts, slugify=slugify, tags=tags)
@main.route('/blog/post/<int:post_id>/<string:post_url>', methods=['GET', 'POST'])
def post(post_id, post_url):
post = Post.query.filter_by(id=post_id).first()
tags = Tag.query.filter_by(id=post_id)
form = CommentForm()
comments = Comment.query.filter_by(post_id=post_id).order_by(Comment.date.desc())
tags = Tag.query.filter_by(post_id=post_id)
if form.validate_on_submit():
if current_user.is_authenticated:
comment = Comment(content=form.content.data,
post_id=post_id, author_id=current_user.id)
db.session.add(comment)
db.session.commit()
flash('Your comment has been published')
return redirect(url_for('main.post', post_id=post_id, post_url=post_url))
else:
flash('You need to get logged in to comment')
return render_template('main/post.html', post=post, form=form, comments=comments, tags=tags, post_id=post_id, post_url=post_url )
@main.route('/profile', methods=['GET', 'POST'])
@decorators.login_required
def profile():
if request.method == 'GET':
manage_prev_page()
form = ProfileForm()
if request.method == 'POST':
if form.validate_on_submit():
current_user.username = form.username.data
current_user.email = form.email.data
db.session.commit()
flash('Your account has been changed!')
return redirect(session['prev_page'])
else:
flash('Please check your data!')
return render_template('main/profile.html', form=form)
|
flexible
|
{
"blob_id": "4e66fe0485d987da590d11c848009b2e1665b3dc",
"index": 5445,
"step-1": "<mask token>\n\n\ndef manage_prev_page():\n global session, request\n if ('profile' not in request.referrer and 'change_password' not in\n request.referrer and 'forgot_password' not in request.referrer and \n 'request_password' not in request.referrer):\n session['prev_page'] = request.referrer\n\n\n@main.route('/')\ndef homepage():\n return render_template('main/homepage.html')\n\n\n@main.route('/about', methods=['GET', 'POST'])\ndef about():\n return render_template('main/about.html')\n\n\n<mask token>\n\n\n@main.route('/blog/post/<int:post_id>/<string:post_url>', methods=['GET',\n 'POST'])\ndef post(post_id, post_url):\n post = Post.query.filter_by(id=post_id).first()\n tags = Tag.query.filter_by(id=post_id)\n form = CommentForm()\n comments = Comment.query.filter_by(post_id=post_id).order_by(Comment.\n date.desc())\n tags = Tag.query.filter_by(post_id=post_id)\n if form.validate_on_submit():\n if current_user.is_authenticated:\n comment = Comment(content=form.content.data, post_id=post_id,\n author_id=current_user.id)\n db.session.add(comment)\n db.session.commit()\n flash('Your comment has been published')\n return redirect(url_for('main.post', post_id=post_id, post_url=\n post_url))\n else:\n flash('You need to get logged in to comment')\n return render_template('main/post.html', post=post, form=form, comments\n =comments, tags=tags, post_id=post_id, post_url=post_url)\n\n\n@main.route('/profile', methods=['GET', 'POST'])\n@decorators.login_required\ndef profile():\n if request.method == 'GET':\n manage_prev_page()\n form = ProfileForm()\n if request.method == 'POST':\n if form.validate_on_submit():\n current_user.username = form.username.data\n current_user.email = form.email.data\n db.session.commit()\n flash('Your account has been changed!')\n return redirect(session['prev_page'])\n else:\n flash('Please check your data!')\n return render_template('main/profile.html', form=form)\n",
"step-2": "<mask token>\n\n\ndef manage_prev_page():\n global session, request\n if ('profile' not in request.referrer and 'change_password' not in\n request.referrer and 'forgot_password' not in request.referrer and \n 'request_password' not in request.referrer):\n session['prev_page'] = request.referrer\n\n\n@main.route('/')\ndef homepage():\n return render_template('main/homepage.html')\n\n\n@main.route('/about', methods=['GET', 'POST'])\ndef about():\n return render_template('main/about.html')\n\n\n@main.route('/blog')\ndef blog():\n page = request.args.get('page', 1, type=int)\n posts = Post.query.paginate(page=page, per_page=5)\n tags = Tag.query.all()\n return render_template('main/blog.html', posts=posts, slugify=slugify,\n tags=tags)\n\n\n@main.route('/blog/post/<int:post_id>/<string:post_url>', methods=['GET',\n 'POST'])\ndef post(post_id, post_url):\n post = Post.query.filter_by(id=post_id).first()\n tags = Tag.query.filter_by(id=post_id)\n form = CommentForm()\n comments = Comment.query.filter_by(post_id=post_id).order_by(Comment.\n date.desc())\n tags = Tag.query.filter_by(post_id=post_id)\n if form.validate_on_submit():\n if current_user.is_authenticated:\n comment = Comment(content=form.content.data, post_id=post_id,\n author_id=current_user.id)\n db.session.add(comment)\n db.session.commit()\n flash('Your comment has been published')\n return redirect(url_for('main.post', post_id=post_id, post_url=\n post_url))\n else:\n flash('You need to get logged in to comment')\n return render_template('main/post.html', post=post, form=form, comments\n =comments, tags=tags, post_id=post_id, post_url=post_url)\n\n\n@main.route('/profile', methods=['GET', 'POST'])\n@decorators.login_required\ndef profile():\n if request.method == 'GET':\n manage_prev_page()\n form = ProfileForm()\n if request.method == 'POST':\n if form.validate_on_submit():\n current_user.username = form.username.data\n current_user.email = form.email.data\n db.session.commit()\n flash('Your account has been changed!')\n return redirect(session['prev_page'])\n else:\n flash('Please check your data!')\n return render_template('main/profile.html', form=form)\n",
"step-3": "<mask token>\nmain = Blueprint('main', __name__)\n\n\ndef manage_prev_page():\n global session, request\n if ('profile' not in request.referrer and 'change_password' not in\n request.referrer and 'forgot_password' not in request.referrer and \n 'request_password' not in request.referrer):\n session['prev_page'] = request.referrer\n\n\n@main.route('/')\ndef homepage():\n return render_template('main/homepage.html')\n\n\n@main.route('/about', methods=['GET', 'POST'])\ndef about():\n return render_template('main/about.html')\n\n\n@main.route('/blog')\ndef blog():\n page = request.args.get('page', 1, type=int)\n posts = Post.query.paginate(page=page, per_page=5)\n tags = Tag.query.all()\n return render_template('main/blog.html', posts=posts, slugify=slugify,\n tags=tags)\n\n\n@main.route('/blog/post/<int:post_id>/<string:post_url>', methods=['GET',\n 'POST'])\ndef post(post_id, post_url):\n post = Post.query.filter_by(id=post_id).first()\n tags = Tag.query.filter_by(id=post_id)\n form = CommentForm()\n comments = Comment.query.filter_by(post_id=post_id).order_by(Comment.\n date.desc())\n tags = Tag.query.filter_by(post_id=post_id)\n if form.validate_on_submit():\n if current_user.is_authenticated:\n comment = Comment(content=form.content.data, post_id=post_id,\n author_id=current_user.id)\n db.session.add(comment)\n db.session.commit()\n flash('Your comment has been published')\n return redirect(url_for('main.post', post_id=post_id, post_url=\n post_url))\n else:\n flash('You need to get logged in to comment')\n return render_template('main/post.html', post=post, form=form, comments\n =comments, tags=tags, post_id=post_id, post_url=post_url)\n\n\n@main.route('/profile', methods=['GET', 'POST'])\n@decorators.login_required\ndef profile():\n if request.method == 'GET':\n manage_prev_page()\n form = ProfileForm()\n if request.method == 'POST':\n if form.validate_on_submit():\n current_user.username = form.username.data\n current_user.email = form.email.data\n db.session.commit()\n flash('Your account has been changed!')\n return redirect(session['prev_page'])\n else:\n flash('Please check your data!')\n return render_template('main/profile.html', form=form)\n",
"step-4": "from flask import Blueprint, render_template, flash, redirect, url_for, request, current_app, g, session\nfrom flask_login import current_user\nfrom app import decorators\nfrom app.models import User, Post, Comment, Tag\nfrom slugify import slugify\nfrom app.main.forms import CommentForm, TagForm, ProfileForm, ContactForm\nfrom app import db\nfrom flask_mail import Message\nfrom app import mail\nmain = Blueprint('main', __name__)\n\n\ndef manage_prev_page():\n global session, request\n if ('profile' not in request.referrer and 'change_password' not in\n request.referrer and 'forgot_password' not in request.referrer and \n 'request_password' not in request.referrer):\n session['prev_page'] = request.referrer\n\n\n@main.route('/')\ndef homepage():\n return render_template('main/homepage.html')\n\n\n@main.route('/about', methods=['GET', 'POST'])\ndef about():\n return render_template('main/about.html')\n\n\n@main.route('/blog')\ndef blog():\n page = request.args.get('page', 1, type=int)\n posts = Post.query.paginate(page=page, per_page=5)\n tags = Tag.query.all()\n return render_template('main/blog.html', posts=posts, slugify=slugify,\n tags=tags)\n\n\n@main.route('/blog/post/<int:post_id>/<string:post_url>', methods=['GET',\n 'POST'])\ndef post(post_id, post_url):\n post = Post.query.filter_by(id=post_id).first()\n tags = Tag.query.filter_by(id=post_id)\n form = CommentForm()\n comments = Comment.query.filter_by(post_id=post_id).order_by(Comment.\n date.desc())\n tags = Tag.query.filter_by(post_id=post_id)\n if form.validate_on_submit():\n if current_user.is_authenticated:\n comment = Comment(content=form.content.data, post_id=post_id,\n author_id=current_user.id)\n db.session.add(comment)\n db.session.commit()\n flash('Your comment has been published')\n return redirect(url_for('main.post', post_id=post_id, post_url=\n post_url))\n else:\n flash('You need to get logged in to comment')\n return render_template('main/post.html', post=post, form=form, comments\n =comments, tags=tags, post_id=post_id, post_url=post_url)\n\n\n@main.route('/profile', methods=['GET', 'POST'])\n@decorators.login_required\ndef profile():\n if request.method == 'GET':\n manage_prev_page()\n form = ProfileForm()\n if request.method == 'POST':\n if form.validate_on_submit():\n current_user.username = form.username.data\n current_user.email = form.email.data\n db.session.commit()\n flash('Your account has been changed!')\n return redirect(session['prev_page'])\n else:\n flash('Please check your data!')\n return render_template('main/profile.html', form=form)\n",
"step-5": "from flask import Blueprint, render_template, flash, redirect, url_for, request, current_app, g, session\nfrom flask_login import current_user\nfrom app import decorators\nfrom app.models import User, Post, Comment, Tag\nfrom slugify import slugify\nfrom app.main.forms import CommentForm, TagForm, ProfileForm, ContactForm\nfrom app import db\nfrom flask_mail import Message\nfrom app import mail\n\n\nmain = Blueprint('main', __name__)\n\ndef manage_prev_page():\n global session, request\n if 'profile' not in request.referrer and 'change_password' not in request.referrer \\\n and 'forgot_password' not in request.referrer \\\n and 'request_password' not in request.referrer:\n session['prev_page'] = request.referrer\n\n@main.route('/')\ndef homepage():\n return render_template('main/homepage.html')\n\n@main.route('/about', methods=['GET', 'POST'])\ndef about():\n return render_template('main/about.html')\n\n\n@main.route('/blog')\ndef blog():\n page = request.args.get('page', 1, type=int)\n posts = Post.query.paginate(page=page, per_page=5)\n tags = Tag.query.all()\n return render_template('main/blog.html', posts=posts, slugify=slugify, tags=tags)\n\n\n@main.route('/blog/post/<int:post_id>/<string:post_url>', methods=['GET', 'POST'])\ndef post(post_id, post_url):\n post = Post.query.filter_by(id=post_id).first()\n tags = Tag.query.filter_by(id=post_id)\n form = CommentForm()\n comments = Comment.query.filter_by(post_id=post_id).order_by(Comment.date.desc())\n tags = Tag.query.filter_by(post_id=post_id)\n if form.validate_on_submit():\n if current_user.is_authenticated:\n comment = Comment(content=form.content.data,\n post_id=post_id, author_id=current_user.id)\n db.session.add(comment)\n db.session.commit()\n flash('Your comment has been published')\n return redirect(url_for('main.post', post_id=post_id, post_url=post_url))\n else: \n flash('You need to get logged in to comment')\n return render_template('main/post.html', post=post, form=form, comments=comments, tags=tags, post_id=post_id, post_url=post_url )\n\n@main.route('/profile', methods=['GET', 'POST'])\n@decorators.login_required\ndef profile():\n if request.method == 'GET':\n manage_prev_page()\n form = ProfileForm()\n if request.method == 'POST':\n if form.validate_on_submit():\n current_user.username = form.username.data\n current_user.email = form.email.data\n db.session.commit()\n flash('Your account has been changed!')\n return redirect(session['prev_page'])\n else:\n flash('Please check your data!')\n return render_template('main/profile.html', form=form)\n\n\n\n \n\n\n\n ",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
slaves = []
<|reserved_special_token_1|>
# -*- python -*-
# ex: set syntax=python:
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# See master.experimental/slaves.cfg for documentation.
slaves = [
################################################################################
# Linux
################################################################################
# {
# 'master': 'Chromium',
# 'hostname': 'build59-m1',
# 'builder': 'Linux Builder x64',
# 'os': 'linux',
# 'version': 'lucid',
# 'bits': '64',
# },
# {
# 'master': 'Chromium',
# 'hostname': 'vm119-m1',
# 'builder': 'Linux Tests x64',
# 'os': 'linux',
# 'version': 'lucid',
# 'bits': '64',
# },
# {
# 'master': 'Chromium',
# 'builder': 'Linux (aura)',
# 'hostname': 'vm80-m1',
# 'os': 'linux',
# 'version': 'lucid',
# 'bits': '32',
# },
# {
# 'master': 'Chromium',
# 'hostname': 'build13-m1',
# 'builder': 'Linux Builder (dbg)',
# 'os': 'linux',
# 'version': 'lucid',
# 'bits': '32',
# },
# {
# 'master': 'Chromium',
# 'hostname': 'vm128-m1',
# 'builder': 'Linux Tests (dbg)(1)',
# 'os': 'linux',
# 'version': 'lucid',
# 'bits': '32',
# },
# {
# 'master': 'Chromium',
# 'hostname': 'vm129-m1',
# 'builder': 'Linux Tests (dbg)(2)',
# 'os': 'linux',
# 'version': 'lucid',
# 'bits': '32',
# },
# {
# 'master': 'Chromium',
# 'builder': 'Linux Sync',
# 'hostname': 'vm121-m1',
# 'os': 'linux',
# 'version': 'lucid',
# 'bits': '64',
# },
# {
# 'master': 'Chromium',
# 'builder': 'Linux Clang (dbg)',
# 'hostname': 'vm79-m1',
# 'os': 'linux',
# 'version': 'lucid',
# 'bits': '64',
# },
# ################################################################################
# # Android
# ################################################################################
# {
# 'master': 'Chromium',
# 'hostname': 'vm138-m1',
# 'builder': 'Android Builder',
# 'os': 'linux',
# 'version': 'lucid',
# 'bits': '64',
# },
]
|
flexible
|
{
"blob_id": "e807cef534226f3efb4a8df471598727fa068f02",
"index": 3805,
"step-1": "<mask token>\n",
"step-2": "slaves = []\n",
"step-3": "# -*- python -*-\n# ex: set syntax=python:\n\n# Copyright (c) 2012 The Chromium Authors. All rights reserved.\n# Use of this source code is governed by a BSD-style license that can be\n# found in the LICENSE file.\n\n# See master.experimental/slaves.cfg for documentation.\n\n\nslaves = [\n################################################################################\n# Linux\n################################################################################\n# {\n# 'master': 'Chromium',\n# 'hostname': 'build59-m1',\n# 'builder': 'Linux Builder x64',\n# 'os': 'linux',\n# 'version': 'lucid',\n# 'bits': '64',\n# },\n# {\n# 'master': 'Chromium',\n# 'hostname': 'vm119-m1',\n# 'builder': 'Linux Tests x64',\n# 'os': 'linux',\n# 'version': 'lucid',\n# 'bits': '64',\n# },\n# {\n# 'master': 'Chromium',\n# 'builder': 'Linux (aura)',\n# 'hostname': 'vm80-m1',\n# 'os': 'linux',\n# 'version': 'lucid',\n# 'bits': '32',\n# },\n# {\n# 'master': 'Chromium',\n# 'hostname': 'build13-m1',\n# 'builder': 'Linux Builder (dbg)',\n# 'os': 'linux',\n# 'version': 'lucid',\n# 'bits': '32',\n# },\n# {\n# 'master': 'Chromium',\n# 'hostname': 'vm128-m1',\n# 'builder': 'Linux Tests (dbg)(1)',\n# 'os': 'linux',\n# 'version': 'lucid',\n# 'bits': '32',\n# },\n# {\n# 'master': 'Chromium',\n# 'hostname': 'vm129-m1',\n# 'builder': 'Linux Tests (dbg)(2)',\n# 'os': 'linux',\n# 'version': 'lucid',\n# 'bits': '32',\n# },\n# {\n# 'master': 'Chromium',\n# 'builder': 'Linux Sync',\n# 'hostname': 'vm121-m1',\n# 'os': 'linux',\n# 'version': 'lucid',\n# 'bits': '64',\n# },\n# {\n# 'master': 'Chromium',\n# 'builder': 'Linux Clang (dbg)',\n# 'hostname': 'vm79-m1',\n# 'os': 'linux',\n# 'version': 'lucid',\n# 'bits': '64',\n# },\n# ################################################################################\n# # Android\n# ################################################################################\n# {\n# 'master': 'Chromium',\n# 'hostname': 'vm138-m1',\n# 'builder': 'Android Builder',\n# 'os': 'linux',\n# 'version': 'lucid',\n# 'bits': '64',\n# },\n]\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import json
import time
from typing import Dict
import threading
"""
Note: każdy request uruchamia osobny wątek.
Przegląd: `top -H -p <process_id>`
"""
from flask import Flask, jsonify, request
app = Flask(__name__)
# https://www.tutorialspoint.com/flask/flask_http_methods.htm
# ładowanie konfiguracji aplikacji (opcjonalne, ale to dobry pomysł);
# po zbudowaniu aplikacji (poniżej) file "config.json" powinien się znajdować w folderze aplikacji
with open('config.json', 'r') as f:
loaded = json.load(f)
magic = loaded['magic']
@app.route('/status')
def get_json_data():
return jsonify({'comment': f'App działa OK; magic:{magic}'})
# dostępna pod: http://localhost:5001/compute?a=10&b=0
@app.route('/compute')
def compute():
a = int(request.args.get('a'))
b = int(request.args.get('b'))
print(f'request a={a}, thread:{threading.current_thread().name}')
time.sleep(10.0)
if b == 0:
# teraz zwracamy komunikat o błędzie, oraz http error-code 400 (BAD_REQUEST)
return jsonify({'comment': 'b==0, cannot divide'}), 400
return jsonify({'sum': a + b, 'difference': a - b, 'division': a / b})
# dostępna pod: http://localhost:5001/welcome/roadrunner/suffix/nice%20to%20meet%20you
@app.route('/welcome/<username>/suffix/<message>')
def welcome(username, message):
return jsonify({'comment': f'Hello {username}, {message}!'})
class Auth:
def __init__(self, user: str, pass_: str):
self.user = user
self.pass_ = pass_
# zadanie -> zbierać userów w jakieś strukturze (np. liście 'users', albo Dict lub Set),
# i zwrócić błąd jeśli tworzymy usera, którego pole "user" już zostało "zajęte"
# rozwiązanie:
users: Dict[str, Auth] = {}
# dostępna per Postman (trzeba zrobić zapytanie POST):
# localhost:5001/user/create
# w sekcji "body" trzba dać "raw -> JSON", i w polu JSON dodać:
# {
# "user": "Xi Wuhan",
# "pass_": "123"
# }
@app.route('/user/create', methods=['POST'])
def create_user():
data = request.json
k = Auth(**data)
if users.keys().__contains__(k.user):
return jsonify({'comment': 'This user name already exists!'}), 400
users[k.user] = k
return jsonify(k.__dict__)
app.run(host='localhost', port=5001, debug=None, load_dotenv=False) # can skip all args
# możliwa kompilacja do pojedynczego pliku wykonywalnego:
# `pyinstaller _zero.py -n my_flask_app --onefile
|
normal
|
{
"blob_id": "8fcc2a13fd5a803e2d755a567c78c8274bd88aad",
"index": 7283,
"step-1": "<mask token>\n\n\nclass Auth:\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@app.route('/welcome/<username>/suffix/<message>')\ndef welcome(username, message):\n return jsonify({'comment': f'Hello {username}, {message}!'})\n\n\nclass Auth:\n\n def __init__(self, user: str, pass_: str):\n self.user = user\n self.pass_ = pass_\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@app.route('/status')\ndef get_json_data():\n return jsonify({'comment': f'App działa OK; magic:{magic}'})\n\n\n@app.route('/compute')\ndef compute():\n a = int(request.args.get('a'))\n b = int(request.args.get('b'))\n print(f'request a={a}, thread:{threading.current_thread().name}')\n time.sleep(10.0)\n if b == 0:\n return jsonify({'comment': 'b==0, cannot divide'}), 400\n return jsonify({'sum': a + b, 'difference': a - b, 'division': a / b})\n\n\n@app.route('/welcome/<username>/suffix/<message>')\ndef welcome(username, message):\n return jsonify({'comment': f'Hello {username}, {message}!'})\n\n\nclass Auth:\n\n def __init__(self, user: str, pass_: str):\n self.user = user\n self.pass_ = pass_\n\n\n<mask token>\n\n\n@app.route('/user/create', methods=['POST'])\ndef create_user():\n data = request.json\n k = Auth(**data)\n if users.keys().__contains__(k.user):\n return jsonify({'comment': 'This user name already exists!'}), 400\n users[k.user] = k\n return jsonify(k.__dict__)\n\n\n<mask token>\n",
"step-4": "import json\nimport time\nfrom typing import Dict\nimport threading\n<mask token>\nfrom flask import Flask, jsonify, request\napp = Flask(__name__)\nwith open('config.json', 'r') as f:\n loaded = json.load(f)\n magic = loaded['magic']\n\n\n@app.route('/status')\ndef get_json_data():\n return jsonify({'comment': f'App działa OK; magic:{magic}'})\n\n\n@app.route('/compute')\ndef compute():\n a = int(request.args.get('a'))\n b = int(request.args.get('b'))\n print(f'request a={a}, thread:{threading.current_thread().name}')\n time.sleep(10.0)\n if b == 0:\n return jsonify({'comment': 'b==0, cannot divide'}), 400\n return jsonify({'sum': a + b, 'difference': a - b, 'division': a / b})\n\n\n@app.route('/welcome/<username>/suffix/<message>')\ndef welcome(username, message):\n return jsonify({'comment': f'Hello {username}, {message}!'})\n\n\nclass Auth:\n\n def __init__(self, user: str, pass_: str):\n self.user = user\n self.pass_ = pass_\n\n\nusers: Dict[str, Auth] = {}\n\n\n@app.route('/user/create', methods=['POST'])\ndef create_user():\n data = request.json\n k = Auth(**data)\n if users.keys().__contains__(k.user):\n return jsonify({'comment': 'This user name already exists!'}), 400\n users[k.user] = k\n return jsonify(k.__dict__)\n\n\napp.run(host='localhost', port=5001, debug=None, load_dotenv=False)\n",
"step-5": "import json\nimport time\nfrom typing import Dict\nimport threading\n\n\"\"\"\n Note: każdy request uruchamia osobny wątek. \n Przegląd: `top -H -p <process_id>`\n\"\"\"\n\n\nfrom flask import Flask, jsonify, request\n\napp = Flask(__name__)\n\n# https://www.tutorialspoint.com/flask/flask_http_methods.htm\n\n# ładowanie konfiguracji aplikacji (opcjonalne, ale to dobry pomysł);\n# po zbudowaniu aplikacji (poniżej) file \"config.json\" powinien się znajdować w folderze aplikacji\nwith open('config.json', 'r') as f:\n loaded = json.load(f)\n magic = loaded['magic']\n\n\n@app.route('/status')\ndef get_json_data():\n return jsonify({'comment': f'App działa OK; magic:{magic}'})\n\n\n# dostępna pod: http://localhost:5001/compute?a=10&b=0\n@app.route('/compute')\ndef compute():\n a = int(request.args.get('a'))\n b = int(request.args.get('b'))\n print(f'request a={a}, thread:{threading.current_thread().name}')\n time.sleep(10.0)\n if b == 0:\n # teraz zwracamy komunikat o błędzie, oraz http error-code 400 (BAD_REQUEST)\n return jsonify({'comment': 'b==0, cannot divide'}), 400\n return jsonify({'sum': a + b, 'difference': a - b, 'division': a / b})\n\n\n# dostępna pod: http://localhost:5001/welcome/roadrunner/suffix/nice%20to%20meet%20you\n@app.route('/welcome/<username>/suffix/<message>')\ndef welcome(username, message):\n return jsonify({'comment': f'Hello {username}, {message}!'})\n\n\nclass Auth:\n def __init__(self, user: str, pass_: str):\n self.user = user\n self.pass_ = pass_\n\n\n# zadanie -> zbierać userów w jakieś strukturze (np. liście 'users', albo Dict lub Set),\n# i zwrócić błąd jeśli tworzymy usera, którego pole \"user\" już zostało \"zajęte\"\n# rozwiązanie:\n\nusers: Dict[str, Auth] = {}\n\n\n# dostępna per Postman (trzeba zrobić zapytanie POST):\n# localhost:5001/user/create\n# w sekcji \"body\" trzba dać \"raw -> JSON\", i w polu JSON dodać:\n# {\n# \t\"user\": \"Xi Wuhan\",\n# \t\"pass_\": \"123\"\n# }\n@app.route('/user/create', methods=['POST'])\ndef create_user():\n data = request.json\n k = Auth(**data)\n if users.keys().__contains__(k.user):\n return jsonify({'comment': 'This user name already exists!'}), 400\n users[k.user] = k\n return jsonify(k.__dict__)\n\n\napp.run(host='localhost', port=5001, debug=None, load_dotenv=False) # can skip all args\n\n# możliwa kompilacja do pojedynczego pliku wykonywalnego:\n# `pyinstaller _zero.py -n my_flask_app --onefile\n",
"step-ids": [
1,
3,
6,
9,
10
]
}
|
[
1,
3,
6,
9,
10
] |
# encoding:utf-8
import tensorflow as tf
import p182.py as p182
# 创建文件列表,并通过文件列表创建输入文件队列。在调用输入数据处理流程前,需要
# 统一所有原始数据的格式并将它们存储到TFRcord文件中。下面给出的文件列表应该包含所
# 有提供训练数据的TFRcord文件
files = tf.train.match_filenames_once("/home/shenxj/tf-work/datasets/file_pattern-*")
filename_queue = tf.train.string_input_producer(files, shuffle=False)
# 使用类似7.1节中结婚嫂的方法解析TFRecord文件里的数据。这里假设image中存储的是图像
# 的原始数据,label为该样例所对应的标签。height,width和channels给出了图像的维度。
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(
serialized_example,
features={
'image': tf.FixedLenFeature([], tf.string),
'label': tf.FixedLenFeature([], tf.int64),
'height': tf.FixedLenFeature([], tf.int64),
'weigth': tf.FixedLenFeature([], tf.int64),
'channels': tf.FixedLenFeature([], tf.int64),
}
)
image, label = features['image'], features['label']
height, width = features['height'], features['wigth']
channels = features['channels']
# 从原始图像数据解析出像素矩阵,并根据图像尺寸还原图像
decoded_image = tf.decode_raw(image, tf.uint8)
decoded_image.set_shape([height, width, channels])
# 定义神经网络输入层图片的大小。
image_size = 299
# preprocess_for_train为7.2.2小节中介绍的图像预处理程序
distorted_image = p182.preprocess_for_train(
decoded_image, image_size, image_size, None
)
# 将处理后的图像和标签数据通过tf.train.shuffle_batch整理成神经网络训练时
# 需要的batch
min_after_dequeque = 10000
batch_size = 100
capacity = min_after_dequeque + 3 * batch_size
image_batch, label_batch = tf.train.shuffle_batch(
[distorted_image, label], batch_size=batch_size,
capacity=capacity, min_after_dequeue=min_after_dequeque
)
# 定义神经网络的结构以及优化过程。image_batch可以作为输入提供给神经网络的输入层。
# label_batch则提供了输入batch中样例的正确答案
logit = inference(image_batch)
loss = calc_loss(logit, label_batch)
train_step = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(loss)
# 声明会话并运行神经网络的优化过程
with tf.Session() as sess:
# 神经网络训练准备工作。这些工作包括变量初始化、线程启动
tf.initialize_all_variables().run()
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
# 神经网络训练过程
for i in range(TRAINING_ROUNDS):
sess.run(train_step)
# 停止所有线程
coord.request_stop()
coord.join(threads)
|
normal
|
{
"blob_id": "1685a2c49bea14e6fcaffb03634f6875f8fa1049",
"index": 3726,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ndecoded_image.set_shape([height, width, channels])\n<mask token>\nwith tf.Session() as sess:\n tf.initialize_all_variables().run()\n coord = tf.train.Coordinator()\n threads = tf.train.start_queue_runners(sess=sess, coord=coord)\n for i in range(TRAINING_ROUNDS):\n sess.run(train_step)\n coord.request_stop()\n coord.join(threads)\n",
"step-3": "<mask token>\nfiles = tf.train.match_filenames_once(\n '/home/shenxj/tf-work/datasets/file_pattern-*')\nfilename_queue = tf.train.string_input_producer(files, shuffle=False)\nreader = tf.TFRecordReader()\n_, serialized_example = reader.read(filename_queue)\nfeatures = tf.parse_single_example(serialized_example, features={'image':\n tf.FixedLenFeature([], tf.string), 'label': tf.FixedLenFeature([], tf.\n int64), 'height': tf.FixedLenFeature([], tf.int64), 'weigth': tf.\n FixedLenFeature([], tf.int64), 'channels': tf.FixedLenFeature([], tf.\n int64)})\nimage, label = features['image'], features['label']\nheight, width = features['height'], features['wigth']\nchannels = features['channels']\ndecoded_image = tf.decode_raw(image, tf.uint8)\ndecoded_image.set_shape([height, width, channels])\nimage_size = 299\ndistorted_image = p182.preprocess_for_train(decoded_image, image_size,\n image_size, None)\nmin_after_dequeque = 10000\nbatch_size = 100\ncapacity = min_after_dequeque + 3 * batch_size\nimage_batch, label_batch = tf.train.shuffle_batch([distorted_image, label],\n batch_size=batch_size, capacity=capacity, min_after_dequeue=\n min_after_dequeque)\nlogit = inference(image_batch)\nloss = calc_loss(logit, label_batch)\ntrain_step = tf.train.GradientDescentOptimizer(learning_rate=learning_rate\n ).minimize(loss)\nwith tf.Session() as sess:\n tf.initialize_all_variables().run()\n coord = tf.train.Coordinator()\n threads = tf.train.start_queue_runners(sess=sess, coord=coord)\n for i in range(TRAINING_ROUNDS):\n sess.run(train_step)\n coord.request_stop()\n coord.join(threads)\n",
"step-4": "import tensorflow as tf\nimport p182.py as p182\nfiles = tf.train.match_filenames_once(\n '/home/shenxj/tf-work/datasets/file_pattern-*')\nfilename_queue = tf.train.string_input_producer(files, shuffle=False)\nreader = tf.TFRecordReader()\n_, serialized_example = reader.read(filename_queue)\nfeatures = tf.parse_single_example(serialized_example, features={'image':\n tf.FixedLenFeature([], tf.string), 'label': tf.FixedLenFeature([], tf.\n int64), 'height': tf.FixedLenFeature([], tf.int64), 'weigth': tf.\n FixedLenFeature([], tf.int64), 'channels': tf.FixedLenFeature([], tf.\n int64)})\nimage, label = features['image'], features['label']\nheight, width = features['height'], features['wigth']\nchannels = features['channels']\ndecoded_image = tf.decode_raw(image, tf.uint8)\ndecoded_image.set_shape([height, width, channels])\nimage_size = 299\ndistorted_image = p182.preprocess_for_train(decoded_image, image_size,\n image_size, None)\nmin_after_dequeque = 10000\nbatch_size = 100\ncapacity = min_after_dequeque + 3 * batch_size\nimage_batch, label_batch = tf.train.shuffle_batch([distorted_image, label],\n batch_size=batch_size, capacity=capacity, min_after_dequeue=\n min_after_dequeque)\nlogit = inference(image_batch)\nloss = calc_loss(logit, label_batch)\ntrain_step = tf.train.GradientDescentOptimizer(learning_rate=learning_rate\n ).minimize(loss)\nwith tf.Session() as sess:\n tf.initialize_all_variables().run()\n coord = tf.train.Coordinator()\n threads = tf.train.start_queue_runners(sess=sess, coord=coord)\n for i in range(TRAINING_ROUNDS):\n sess.run(train_step)\n coord.request_stop()\n coord.join(threads)\n",
"step-5": "# encoding:utf-8\nimport tensorflow as tf\nimport p182.py as p182\n# 创建文件列表,并通过文件列表创建输入文件队列。在调用输入数据处理流程前,需要\n# 统一所有原始数据的格式并将它们存储到TFRcord文件中。下面给出的文件列表应该包含所\n# 有提供训练数据的TFRcord文件\nfiles = tf.train.match_filenames_once(\"/home/shenxj/tf-work/datasets/file_pattern-*\")\nfilename_queue = tf.train.string_input_producer(files, shuffle=False)\n\n# 使用类似7.1节中结婚嫂的方法解析TFRecord文件里的数据。这里假设image中存储的是图像\n# 的原始数据,label为该样例所对应的标签。height,width和channels给出了图像的维度。\nreader = tf.TFRecordReader()\n_, serialized_example = reader.read(filename_queue)\nfeatures = tf.parse_single_example(\n serialized_example,\n features={\n 'image': tf.FixedLenFeature([], tf.string),\n 'label': tf.FixedLenFeature([], tf.int64),\n 'height': tf.FixedLenFeature([], tf.int64),\n 'weigth': tf.FixedLenFeature([], tf.int64),\n 'channels': tf.FixedLenFeature([], tf.int64),\n }\n)\nimage, label = features['image'], features['label']\nheight, width = features['height'], features['wigth']\nchannels = features['channels']\n\n# 从原始图像数据解析出像素矩阵,并根据图像尺寸还原图像\ndecoded_image = tf.decode_raw(image, tf.uint8)\ndecoded_image.set_shape([height, width, channels])\n# 定义神经网络输入层图片的大小。\nimage_size = 299\n# preprocess_for_train为7.2.2小节中介绍的图像预处理程序\ndistorted_image = p182.preprocess_for_train(\n decoded_image, image_size, image_size, None\n)\n\n# 将处理后的图像和标签数据通过tf.train.shuffle_batch整理成神经网络训练时\n# 需要的batch\nmin_after_dequeque = 10000\nbatch_size = 100\ncapacity = min_after_dequeque + 3 * batch_size\nimage_batch, label_batch = tf.train.shuffle_batch(\n [distorted_image, label], batch_size=batch_size,\n capacity=capacity, min_after_dequeue=min_after_dequeque\n)\n\n# 定义神经网络的结构以及优化过程。image_batch可以作为输入提供给神经网络的输入层。\n# label_batch则提供了输入batch中样例的正确答案\nlogit = inference(image_batch)\nloss = calc_loss(logit, label_batch)\ntrain_step = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(loss)\n\n# 声明会话并运行神经网络的优化过程\nwith tf.Session() as sess:\n # 神经网络训练准备工作。这些工作包括变量初始化、线程启动\n tf.initialize_all_variables().run()\n coord = tf.train.Coordinator()\n threads = tf.train.start_queue_runners(sess=sess, coord=coord)\n\n # 神经网络训练过程\n for i in range(TRAINING_ROUNDS):\n sess.run(train_step)\n\n # 停止所有线程\n coord.request_stop()\n coord.join(threads)\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
class Background(object):
def __init__(self, name):
self.name = name
self.description = ''
self.prTraits = []
self.ideals = []
self.bonds = []
self.flaws = []
def getBackName(self):
return self.name
def setBackDesc(self,desc):
self.description = desc
def getBackDesc(self):
return self.description
def addPrTrait(self, trait):
self.prTraits.append(trait)
def getPrTraits(self):
return self.prTraits
def addIdeal(self, ideal):
self.ideals.append(ideal)
def getIdeals(self):
return self.ideals
def addBond(self, bond):
self.bonds.append(bond)
def getBonds(self):
return self.bonds
def addFlaw(self, flaw):
self.flaws.append(flaw)
def getFlaws(self):
return self.flaws
acolyte = Background('Acolyte')
acolyte.setBackDesc('You have spent you life in the service of a temple or to a specific god or pantheon of gods. You act as an intermediary between the realm of the holy and the mortal world, performing sacred rites and offering sacrifices in order to conduct worshipers into the presence of the divine.\n')
acolyte.addPrTrait('I idolize a particular hero of my faith and constantly refer to that person\'s deeds and example.\n')
acolyte.addPrTrait('I can find common ground between the fiercest enemies, empathizing with them and always working toward peace.\n')
acolyte.addPrTrait('I see omens in every event and action. The gods try to speak to us, we just need to listen.')
acolyte.addPrTrait('Nothing can shake my optimistic attitude')
acolyte.addPrTrait('I quote (or misquote) sacred texts and proverbs in almost every situation.\n')
acolyte.addPrTrait('I am tolerant (or intolerant) of other faits and respect (or condemn) the worsip of other gods.\n')
acolyte.addPrTrait('I\'ve enjoyed fine food, drink, and high society among my temple\'s elite. Rough living grates on me.\n')
acolyte.addPrTrait('I\'ve spent so long in the temple that I have little practical experience dealing with people in the outside world.\n')
acolyte.addIdeal('The ancient traditions of worship and sacrifice must be preserved and upheld.\n')
acolyte.addIdeal('I alwyas try to help those in need, no matter what the personal cost.\n')
acolyte.addIdeal('We must help bring about the changes the gods are constantly working in the world.\n')
acolyte.addIdeal('I hope to one day rise to the top of my faith\'s religious hierarchy.\n')
acolyte.addIdeal('I trust that my deity will guide my actions. I have faith that if I work hard, things will go well.\n')
acolyte.addIdeal('I seek to prove myself worthy of by god\'s favor by matching my actions against his or her teachings.\n')
acolyte.addBond('I would die to recover an ancient relic of my faith that was lost long ago.\n')
acolyte.addBond('I will someday get revenge on the corrupt temple hierarchy who branded me a heretic.\n')
acolyte.addBond('I owe my life to the priest who took me in whem my parents died.\n')
acolyte.addBond('Everything i do is for the common people.')
acolyte.addBond('I will do anything to protect the temple where I served.')
acolyte.addBond('I seek to preserve a sacred text that my enemies consider heretical and seek to destroy.\n')
acolyte.addFlaw('I judge others harshly, and myself even more severely.\n')
acolyte.addFlaw('I put too much trust in those who wield power within my temple\'s hierarchy.\n')
acolyte.addFlaw('My piety sometimes leads me to blindly trust those that mrofess faith in my god.\n')
acolyte.addFlaw('I am inflexible in my thinking.\n')
acolyte.addFlaw('I am suspicious of strangers and expect the worst of them.\n')
acolyte.addFlaw('Once I pick a goal, I become obsessed with it to the detriment of everything else in my life.\n')
charlatan = Background('Charlatan')
charlatan.setBackDesc('You have always had a way with people. You know what makes them tick, you can tease out their hearts\' desires after a few minutes of conversation, and with a few leading questions you can read them like they were children\'s books. It\'s a useful talent, and one that you\'re perfectly willing to use for your advantage.\n')
charlatan.addPrTrait('I fall in and out of love easily, and am always pursuing someone.\n')
charlatan.addPrTrait('I have a joke for every occasion, especially occasions where humor is inappropriate.\n')
charlatan.addPrTrait('Flattery is my preferred trick for getting what I want.\n')
charlatan.addPrTrait('I\'m a born gambler who can\'t resist taking a risk for a potential payoff.\n')
charlatan.addPrTrait('I lie about almost everything, even when there\'s no good reason to.\n')
charlatan.addPrTrait('Sarcasm and insults are my weapons of choice.\n')
charlatan.addPrTrait('I keep multiple holy symbols on me and invoke whatever deity might come in useful at any given moment.\n')
charlatan.addPrTrait('I pocket anything i see tha tmight have some value.\n')
charlatan.addIdeal('I am a free spirit—no one tells me what to do.\n')
charlatan.addIdeal('I never target people who can\'t afford to lose a few coins.\n')
charlatan.addIdeal('I distribute the money i acquire to the people who really need it.\n')
charlatan.addIdeal('I never run the same con twice.\n')
charlatan.addIdeal('Material goods come and go. Bonds of friendship last forever.\n')
charlatan.addIdeal('I\'m determined to make something of myself.\n')
charlatan.addBond('I fleeced the wrong person and must work to ensure that this individual never crosses paths with me or those i care about.\n')
charlatan.addBond('I owe everything to my mentor—a horrible person who\'s probably rotting in jail somewhere.\n')
charlatan.addBond('Somewhere out there, I have a child who doesn\'t know me. I\'m making the world better for him or her.\n')
charlatan.addBond('I came from a noble family, and one day I\'ll reclaim my lands and title from those who stole them from me.\n')
charlatan.addBond('A powerful person killed someone I love. Some day soon, I\'ll have my revenge...\n')
charlatan.addBond('I swindled and ruined a person who didn\'t deserve it. I seek to atone for my misdeeds but might never be able to forgive myself.\n')
charlatan.addFlaw('I can\'t resist a pretty face.\n')
charlatan.addFlaw('I\'m always in debt. I spend my ill-gotten gains on decadent luxuries faster than I bring them in.\n')
charlatan.addFlaw('I\'m convinced that no one could ever fool me the way I fool others.\n')
charlatan.addFlaw('I\'m too greedy for my own good. I can\'t resist taking a risk if there\'s money involved.\n')
charlatan.addFlaw('I can\'t resist swindling people who are more powerful than me.\n')
charlatan.addFlaw('I hate to admit it and will hate myself for it, but I\'ll run and preserve my own hide if the going gets tough.\n')
criminal = Background('Criminal')
criminal.setBackDesc('You are an experienced criminal with a history of breaking the law. You have spent a lot of time among other criminals and still have contacts with the criminal underworld. You\'re far closer than most people to the world of murder, theft, and violence that pervades the underbelly of civilization, and you have survived up to this point by flounting the rules and regulations of society.\n')
criminal.addPrTrait('I always have a plan for what to do when things go wrong.\n')
criminal.addPrTrait('I am always calm, no matter what the situation. I never raise my voice or let my emotions control me.\n')
criminal.addPrTrait('The first thign i do in a new place is note the locations of everything valuable—or where cuch things coulg be hidden.\n')
criminal.addPrTrait('I would rather make a new friend than a new enemy.\n')
criminal.addPrTrait('I am incredibly slow to trust. Those who seep the fairest often have the most to hide.\n')
criminal.addPrTrait('I don\'t pay attention to the risks in a situation. Never tell me the odds.\n')
criminal.addPrTrait('The best way to get me to do something is to tell me I can\'t do it.\n')
criminal.addPrTrait('I blow up at the slightest insult.\n')
criminal.addIdeal('I don\'t steal from others in the trade.\n')
criminal.addIdeal('Chains are meant to be broken, as those who would forge them.\n')
criminal.addIdeal('I steal from the wealthy so that i can help people in need.\n')
criminal.addIdeal('I will do whatever it takes to become wealthy.\n')
criminal.addIdeal('I\'m loyal to my friends, not to any ideals, and everyone else can take a trip down the Styx for all I care.\n')
criminal.addIdeal('There\'s a spark of good in everyone.\n')
criminal.addBond('I\'m trying to pay off an old debt I owe to a generous benefactor.\n')
criminal.addBond('My Ill-gotten gains go to support my family.\n')
criminal.addBond('Something important was taken from me, and I aim to steal it back.\n')
criminal.addBond('I will become the greatest thief that had ever lived.\n')
criminal.addBond('I\'m guilty of a terrible crime. I hope i can redeem myself for it.\n')
criminal.addBond('Someone I loved died becoues of a mistake I made. That will never happen again.\n')
criminal.addFlaw('When I see something valuable, I can\'t think about anything but how to steal it.\n')
criminal.addFlaw('When faced with a choice between money and my friends, I usually choose the money.\n')
criminal.addFlaw('If there\'s a plan, I\'ll forget it. If i don\'t forget it, I\'ll ignore it.\n')
criminal.addFlaw('I have a "tell" that reveals when I\'m lying.\n')
criminal.addFlaw('I turn tail and run when things look bad.\n')
criminal.addFlaw('An innocent person is in prison for a crime that I committed. I\'m ok with that.\n')
entertainer = Background('Entertainer')
entertainer.setBackDesc('You thrive in front of an audience. You know how to entrance them, entertain them, and even inspire them. Your poetics can stir the hearts of those who hear you, awakening greif or joy, laughter or anger. Your music raises the spirits or captures their sorrow. Your dance steps captivate, your humor cuts to the quick. Whatever techniques you use, your art is your life.\n')
entertainer.addPrTrait('I know a story relevant to almost every situation.\n')
entertainer.addPrTrait('Whenever I come to a new place, I collect local rumors and spread gossip.\n')
entertainer.addPrTrait('I’m a hopeless romantic, always searching for that “special someone.”\n')
entertainer.addPrTrait('Nobody stays angry at me or around me for long, since I can defuse any amount of tension.\n')
entertainer.addPrTrait('I love a good insult, even one directed at me.\n')
entertainer.addPrTrait('I get bitter if I’m not the center of attention.\n')
entertainer.addPrTrait('I’ll settle for nothing less than perfection.\n')
entertainer.addPrTrait('I change my mood or my mind as quickly as I change key in a song.\n')
entertainer.addIdeal('When I perform, I make the world better than it was.\n')
entertainer.addIdeal('The stories, legends, and songs of the past must never be forgotten, for they teach us who we are.\n')
entertainer.addIdeal('The world is in need of new ideas and bold action.\n')
entertainer.addIdeal('I\'m only in it for the money and fame.\n')
entertainer.addIdeal('I like seeing the smiles on people\'s faces whei I perform. That\'s all that matters.\n')
entertainer.addIdeal('Art should reflect the soul; it should come from within and reveal who we really are.\n')
entertainer.addBond('My instrument is my most treasured possession, and it reminds me of someone I love.\n')
entertainer.addBond('Someone stoll my precious instrument, and someday I\'ll get it back.\n')
entertainer.addBond('I want to become famous, whatever it takes.\n')
entertainer.addBond('I idolize a hero of the old tales and measures my deeds against that person\'s.\n')
entertainer.addBond('I will do anything to prove myelf superior to my hated rival.\n')
entertainer.addBond('I would do anything for the other members of my old troupe.\n')
entertainer.addFlaw('I\'ll do anything to win fame and renown.\n')
entertainer.addFlaw('I\'m a sucker for a pretty face.\n')
entertainer.addFlaw('A scandal prevents me from ever going home again. That kind of trouble seems to follow me around.\n')
entertainer.addFlaw('I once satirized a noble who still wants my head. It was a mistake that i will likely repeat.\n')
entertainer.addFlaw('I have trouble keeping my feelings hidden. My sharp tongue lands me in trouble.\n')
entertainer.addFlaw('Despite my best efforts, I am unreliable to my friends.\n')
folkHero = Background('Folk Hero')
folkHero.setBackDesc('You come from a humble social rank, but you are destined for so much more. Already the people of your home village regard you as their champion, and your destiny calls you to stand against the tyrants and monsters that threaten the common folk everywhere.\n')
folkHero.addPrTrait('I judge people by their actions, not their words.\n')
folkHero.addPrTrait('If someone is in trouble, I’m always ready to lend help.\n')
folkHero.addPrTrait('When I set my mind to something, I follow through no matter what gets in my way.\n')
folkHero.addPrTrait('I have a strong sense of fair play and always try to find the most equitable solution to arguments.\n')
folkHero.addPrTrait('I\'m confident in my own abilities and do what I can to instill confidence in others.\n')
folkHero.addPrTrait('Thinking is for other people. I prefer action.\n')
folkHero.addPrTrait('I misuse long words in an attempt to sound smarter.\n')
folkHero.addPrTrait('I get bored easily. When am I going to get on with my destiny?\n')
folkHero.addIdeal('Peole deserve to be treated with dignity and respect.\n')
folkHero.addIdeal('No one should get preferentail treatment before the law, and no one is above the law.\n')
folkHero.addIdeal('Tyrants must not be allowed to oppress the people\n')
folkHero.addIdeal('If I become strong, I can take what I want—What I deserve.\n')
folkHero.addIdeal('There\'s no good in pretending to be something I\'m not.\n')
folkHero.addIdeal('Nothing and no one can steer me away from my higher calling.\n')
folkHero.addBond('I have a family, but I have no idea where they are. One day, I hope to see them again.\n')
folkHero.addBond('I worked the land, I love the land, and I will protect the land.\n')
folkHero.addBond('A proud noble once gave me a horrible beating, and I will take my revenge on any bully I encounter.\n')
folkHero.addBond('My tools are symbols of my past life, and I carry them so that I will never forget my roots.\n')
folkHero.addBond('I protect those who cannot protect themselves.\n')
folkHero.addBond('I wish my childhood sweetheart had come with me to pursue my destiny.\n')
folkHero.addFlaw('The tyrant who rules my land will stop at nothing to see me killed.\n')
folkHero.addFlaw('I\'m convinced of the significance of my destiny, and blind to my shortcomings and the risk of failure.\n')
folkHero.addFlaw('The people who knew me when I was young know my shameful secret, so I can never go home again.\n')
folkHero.addFlaw('I have a weakness for the vices of the city, especially hard drink.\n')
folkHero.addFlaw('Secretly, I believe that things would be better if I were a tyrant lording over the land.\n')
folkHero.addFlaw('I have trouble trusting my allies.\n')
guildArtisan = Background('Guild Artisan')
guildArtisan.setBackDesc('You are a member of an artisan\'s guild, skilled in a particular field and closely associated with other artisans. You are a well-establishedpart of the mercantile world, freed by talent and wealth from the constraints of a feudal social order. You learned your skills as ans apprentice to a master artisan, under the sponsorship of your guild, untill you became a master in your own right.\n')
guildArtisan.addPrTrait('I believe that anything worth doing is worth doing right. I can\'t help it—I\'m a perfectionist.\n')
guildArtisan.addPrTrait('I\'m a snob who looks down on those who can\'t appreciate fine art.\n')
guildArtisan.addPrTrait('I always want to know how things work and what makes people tick.\n')
guildArtisan.addPrTrait('I\'m full of witty aphorisms and have a proverb for every occasion.\n')
guildArtisan.addPrTrait('I\'m rude to people who lack my commitment to hard work and fair play.\n')
guildArtisan.addPrTrait('I like to talk at length about my profession.\n')
guildArtisan.addPrTrait('I don\'t part with my money easily and will haggle tirelessly to get the best deal possible.\n')
guildArtisan.addPrTrait('I\'m well known for my work, and I want to make sure everyone appreciates it. I\'m always taken aback when people haven\'t heard o f me.\n')
guildArtisan.addIdeal('It is the duty of all civilized people to strengthen the bonds of community and the security of civilization.\n')
guildArtisan.addIdeal('My talents were given to me so that I could use them to benefit the world.\n')
guildArtisan.addIdeal('Everyone should be free to pursue his or her own livelihood.\n')
guildArtisan.addIdeal('I\'m only in it for the money.\n')
guildArtisan.addIdeal('I\'m committed to the people I care about, not to ideals.\n')
guildArtisan.addIdeal('I work hard to be teh best there is at my craft.\n')
guildArtisan.addBond('The workshop where I learned my trade is the most important place in the world to me.\n')
guildArtisan.addBond('I created a great work for someone, and then found them unworthy to receive it. I\'m still looking for someone worthy.\n')
guildArtisan.addBond('I owe my guild a great debt for forging me into the person I am today.\n')
guildArtisan.addBond('I pursue wealth to secure someone\'s love.\n')
guildArtisan.addBond('One day I will return to my guild and prove that I am the greatest artisan of them all.\n')
guildArtisan.addBond('I will get revenge on the evil forces that destroyed my place of business and ruined my livelihood.\n')
guildArtisan.addFlaw('I\'ll do anything to get my hands on something rare or priceless.\n')
guildArtisan.addFlaw('I\'m quick to assume that someone is trying to cheat me.\n')
guildArtisan.addFlaw('No one must ever learn that I once stole money from guild coffers.\n')
guildArtisan.addFlaw('I\'m never satisfied with what I have—I always want more.\n')
guildArtisan.addFlaw('I would kill to acquire a noble title.\n')
guildArtisan.addFlaw('I\'m horribly jealous of anyone who can outshine my handiwork. Everywhere I go, I\'m surrounded by rivals.\n')
hermit = Background('Hermit')
hermit.setBackDesc('You lived in seclusion—either in a sheltered community such as a monastery, or entirely alone—for a formative part of your life. In your time apart from the lcamor of society, you found quiety, solitude, and perhaps some of the answers you were looking for.\n')
hermit.addPrTrait('I\'ve been isolated for so long that I rarely speak, preferring gestures and the occasional grunt.\n')
hermit.addPrTrait('I am utterly serene, even in the face of disaster.\n')
hermit.addPrTrait('The leader of my community had something wise to say on every topic, and I am eager to share that wisdom.\n')
hermit.addPrTrait('I feel tremendous empathy for all who suffer.\n')
hermit.addPrTrait('I\'m oblivious to etiquette and social expectations.\n')
hermit.addPrTrait('I connect everything that happens to me to a grand, cosmic plan.\n')
hermit.addPrTrait('I often get lost in my own thoughts and contemplation, becoming oblivious to my surroundings.\n')
hermit.addPrTrait('I am working on a grand philosophical theory and love sharing my ideas.\n')
hermit.addIdeal('My gifts are meant to be shared with all, not used for my own benefit.\n')
hermit.addIdeal('Emotions must not cloud our sense of what is right and true, or our logical thinking.\n')
hermit.addIdeal('Inquiry and curiosity are the pillars of progress.\n')
hermit.addIdeal('Solitude and contemplation are paths toward mystical or magical power.\n')
hermit.addIdeal('Meddling in the affairs of others only causes trouble.\n')
hermit.addIdeal('If you know yourself, there\'s nothing left to know.\n')
hermit.addBond('Nothing is more important to me than the other members of my hermitage, order, or association.\n')
hermit.addBond('I entered seclusion to hide frome the ones who might still be hunting me. I must someday confront them.\n')
hermit.addBond('I\'m still seeking the enlightenment I pursued in my seclusion, and it still eludes me.\n')
hermit.addBond('I entered seclusion because I loved someone I could not have.\n')
hermit.addBond('Should my discovery come to light, it could bring ruin to the world.\n')
hermit.addBond('My isolation gave me great insight into a great evil that only I can destroy.\n')
hermit.addFlaw('Now that I\'ve returned to the world, I enjoy its delights a little too much.\n')
hermit.addFlaw('I harbor dark, bloodthirsty thoughts that my isolation and meditation failed to quell.\n')
hermit.addFlaw('I am dogmatic in my thoughts and philosophy.\n')
hermit.addFlaw('I let my need to win arguments overshadow friendships and harmony.\n')
hermit.addFlaw('I\'d risk too much to uncover a lost bit of knowledge.\n')
hermit.addFlaw('I like keeping secrets and won\'t share them with anyone.\n')
noble = Background('Noble')
noble.setBackDesc('You understand wealth, power, and privilege. You carry a noble title, and your family owns land, collects taxes, and wields significant political influence. You might be a pampered aristocrat unfamiliar with work or discomfort, a former merchant just elevated to the nobility, or a disinherited scoundrel with a disproportionate sense of entitlement. Or you could be an honest, hard-working landowner who cares deeply about the people who live and work on your land, keenly aware of your responsibility to them.\n')
noble.addPrTrait('My eloquent flattery makes everyone I talk to feel like the most wonderful and important person in the world.\n')
noble.addPrTrait('The common folk love me for my kindness and generosity.\n')
noble.addPrTrait('No one could doubt by looking at my regal bearing that I am a cut above the unwashed masses.\n')
noble.addPrTrait('I take great pains to always look my best and follow the latest fashions.\n')
noble.addPrTrait('I don\'t like to get my hands dirty, and I won\'t be caught dead in unsuitable accommodations.\n')
noble.addPrTrait('Despite my noble birth, I do not place myself above other folk. We all have the same blood.\n')
noble.addPrTrait('My favor, once lost, is lost forever.\n')
noble.addPrTrait('If you do me an injury, I will crush you, ruin your name, and salt your fields.\n')
noble.addIdeal('Respect is due to me because of my position, but all people regardless of station deserve to be treated with dignity.\n')
noble.addIdeal('It is my duty to respect the authority of those aboce me, just as those below me must respect mine.\n')
noble.addIdeal('I must prove that I can handle myself without the coddling of my family.\n')
noble.addIdeal('If I can attain more power, no one will tell me what to do.\n')
noble.addIdeal('Blood runs thicker than water.\n')
noble.addIdeal('It is my duty to protect and care for the people beneth me.\n')
noble.addBond('I will face any challenge to win the approval of my family.\n')
noble.addBond('My house\'s alliance with another noble family must be sustained at all costs.\n')
noble.addBond('Nothing is more important than the other members of my family.\n')
noble.addBond('I am in love with the heir of a family that my family despises.\n')
noble.addBond('My loyalty to my soverign is unwabering.\n')
noble.addBond('The common folk must see me as a hero of the people.\n')
noble.addFlaw('I secretly believe that everyone is beneath me.\n')
noble.addFlaw('I hide a truly scandalous secret that could ruin my family forever.\n')
noble.addFlaw('I too often hear veiled insults and threats in every word addressed to me, and I\'m quick to anger.\n')
noble.addFlaw('I have an insatiable desire for carnal pleasures.\n')
noble.addFlaw('In fact, the world does revolve around me.\n')
noble.addFlaw('By my words and actions, I often bring shame to my family.\n')
outlander = Background('Outlander')
outlander.setBackDesc('You grew up in the wilds, far from civilization and the comforts of town and technology. You\'ve witnessed the migration of herds larger than forests, survived weather more extreme than any city-dweller could comprehend, and enjoyed the solitude of being the only thinking creature for miles in any direction. The wilds are in your blood, wheather you were a nomad, an explorer, a recluse, a hunter-gatherer, or even a marauder. Even in places where you don\'t know the specific features of the terrain, you know the ways of the wild.\n')
outlander.addPrTrait('I\'m driven by a wanderlust that led me away from home.\n')
outlander.addPrTrait('I watch over my friends as if they were a litter of newborn pups.\n')
outlander.addPrTrait('I once ran twenty-five miles without stopping to warn my clan of an approaching orc horde. I\'d do it again if I had to.\n')
outlander.addPrTrait('I have a lesson for every situation, drawn from observing nature.\n')
outlander.addPrTrait('I place no stock in wealthy or well-mannered folk. Money and manners won\'t save you from a hungry owlbear.\n')
outlander.addPrTrait('I\'m always picking things up, absently fiddling with them, and sometimes accidentally breaking them.\n')
outlander.addPrTrait('I feel far more comfortable around animals than people.\n')
outlander.addPrTrait('I was, in fact, raised by wolves.\n')
outlander.addIdeal('Life is like the seasons, in constant change, and we must change with it.\n')
outlander.addIdeal('It is each person\'s responsibility to make the most happiness for the whole tribe.\n')
outlander.addIdeal('If I dishonor myself, then I dishonor my whole clan.\n')
outlander.addIdeal('The strongest are meant to rule.\n')
outlander.addIdeal('The natural world is more important than all the constraints of civilization.\n')
outlander.addIdeal('I must earn glory in battle, for myself and my clan.\n')
outlander.addBond('My family, clan, or tribe is the most important thing in my life, even when they are far from me.\n')
outlander.addBond('An injury to the unspoiled wilderness of my home is an injury to me.\n')
outlander.addBond('I will bring terrible wrath down on the evildoers who destroyed my homeland.\n')
outlander.addBond('I am the last of my tribe, and it is up to me to ensure their names enter legend.\n')
outlander.addBond('I suffer awful visions of a coming disaster and will do anything to prevent it.\n')
outlander.addBond('It is my duty to provide children to sustain my tribe.\n')
outlander.addFlaw('I am too enamored of ale, wine, and other intoxicants.\n')
outlander.addFlaw('There\'s no room for caution in a life lived to the fullest.\n')
outlander.addFlaw('I remeber every insult I\'ve received and nurse a silent resentment toward anyone who\'s ever wronged me.\n')
outlander.addFlaw('I am slow to trust members of other races, tribes, and societies.\n')
outlander.addFlaw('Violence is my answer to almost any challange.\n')
outlander.addFlaw('Don\'t expect me to save those who can\'t save themselves. It is nature\'s way that the strong thrive and the weak perish.\n')
sage = Background('Sage')
sage.setBackDesc('You spent years learning the lore of the multiverse. You scoured manuscripts, studie scrolls, and listened to the greatest experts on the subjects that interest you. Your efforts have made you a master in your fields of study.\n')
sage.addPrTrait('I use polysyllabic words that convey the empression of great erudition.\n')
sage.addPrTrait('I\'ve read every book in the world\'s greatest libraries—or I like to boast that I have.\n')
sage.addPrTrait('I\'m used to helping out those who aren\'t as smart as I am, and I patiently explain anything and everything to others.\n')
sage.addPrTrait('There\'s nothing I like more than a good mystery.\n')
sage.addPrTrait('I\'m willing to listen to every side of an argument before I make my own judgment.\n')
sage.addPrTrait('I . . . speak . . . slowly . . . when talking . . . to idiots, . . . which . . . almost . . . everyone . . . is . . . compared . . . to me.\n')
sage.addPrTrait('I am horribly, horribly awkward in social situations.\n')
sage.addPrTrait('I\'m convinced that people are always trying to steal my secrets.\n')
sage.addIdeal('The path to power and self-improvement is through knowledge.\n')
sage.addIdeal('What is beautiful points us beyond itself toward what is true.\n')
sage.addIdeal('Emotions must not cloud our logical thinking.\n')
sage.addIdeal('Hothing should fetter the infinite possibility inherent in all existance.\n')
sage.addIdeal('Knowledge is the path to power and domination.\n')
sage.addIdeal('The goal of a life of study is the betterment of oneself.\n')
sage.addBond('It is my duty to protect my students')
sage.addBond('I have an ancient text that holds terrible secrets that must not fall into the wrong hands.\n')
sage.addBond('I work to preserve a library, university, scriptorium, or monastery.\n')
sage.addBond('My life\'s work is a series of tomes related to a specific field of lore.\n')
sage.addBond('I\'ve been searching my whole life for the answer to a certain question.\n')
sage.addBond('I sold my soul for knowledge. I hope to do great deeds and win it back.\n')
sage.addFlaw('I am easily distracted by the promise of information.\n')
sage.addFlaw('Most people scream and run when they see a demon, I stop and take notes on its anatomy.\n')
sage.addFlaw('Unlocking an ancient mystery is worth the price of a civilization.\n')
sage.addFlaw('I overlook obvious solutions in favor of complicated ones.\n')
sage.addFlaw('I speak without really thinking through my words, invariably insulting others.\n')
sage.addFlaw('I can\'t keep a secret to save my life, or anyone else\'s.\n')
sailor = Background('Sailor')
sailor.setBackDesc('You sailed on a seagoing vessel for years. In that time, you faced down mighty storms, monsters of the deep, and those who wanted to sink your craft to the bottomless depths. Your first love is the distant line of the horizon, but the time has come to try your hand at something new.\n')
sailor.addPrTrait('My friends know they can rely on me, no matter what.\n')
sailor.addPrTrait('I work hard so that I can play hard when the work is done.\n')
sailor.addPrTrait('I enjoy sailing into new ports and making new friends over a flagon of ale.\n')
sailor.addPrTrait('I stretch the truth for the sake of a good story.\n')
sailor.addPrTrait('To me, a tavern brawl is a nice way to get to know a new city.\n')
sailor.addPrTrait('I never pass up a friendly wager.\n')
sailor.addPrTrait('My language is as foul as an otyuggh nest.\n')
sailor.addPrTrait('I like a job well done, especially if I can convince someone else to do it.\n')
sailor.addIdeal('The thing that keeps a ship together is mutual respect between captain and crew.\n')
sailor.addIdeal('We all do the work, so we all share in the rewards.\n')
sailor.addIdeal('The sea is freedom—the freedom to go anywhere and do anything.\n')
sailor.addIdeal('I\'m a predator, and the other ships on the sea are my prey.\n')
sailor.addIdeal('I\'m committed to my crewmates, not to ideals.\n')
sailor.addIdeal('Someday I\'ll own my own ship and chart my own destiny.\n')
sailor.addBond('I\'m loyal to my captain first, everything else second.\n')
sailor.addBond('The ship is most important—crewmates and captains come and go.\n')
sailor.addBond('I\'ll always remember my first ship.\n')
sailor.addBond('In a harbor town, I have a paramour whose eyes nearly stole me from the sea.\n')
sailor.addBond('I was cheated out of my fair share of the profits, and I want to get my due.\n')
sailor.addBond('Ruthless pirates murdered my captain and crewmates, plundered our ship, and left me to die. Vengeance will be mine.\n')
sailor.addFlaw('I follow orders, even if I think they\'re wrong.\n')
sailor.addFlaw('I\'ll say anything to avoid having to do extra work.\n')
sailor.addFlaw('Once someone questions my courage, I never back down no matter how dangerous the situation.\n')
sailor.addFlaw('Once I start drinking, it\'s hard for me to stop.\n')
sailor.addFlaw('I can\'t help but pocket loose coins and other trinkets I come across.\n')
sailor.addFlaw('My pride will probably lead to my destruction.\n')
soldier = Background('Soldier')
soldier.setBackDesc('War has been your life for as long as you care to remember. You trained as a youth, studied the use of weapons and armor, learned basic survival techniques, including how to stay alive on the battlefield. You might have been part of a standing national army or a mercenary company, or perhaps a memver of a local militia who rose to prominence during a recent war.\n')
urchin = Background('Urchin')
urchin.setBackDesc('You grew up on the streets alone, orphaned, and poor. You had no one to watch over you or to provide for you, so you learned to provide for yourself. You fought fiercely over food and kept a constant watch out for other desperate souls who might steal from you. You slept on rooftops and in alleyways, exposed to the elements, and endured sickness without the advantage of medicine or a place to recuperate. You\'ve survived despite all odds, and did so through cunning, strength, speed, or some combination of each.\n')
#urchin.addPrTrait()
backgroundList = [acolyte,charlatan,criminal,entertainer,folkHero,guildArtisan,hermit,noble,outlander,sage,sailor,soldier,urchin]
|
normal
|
{
"blob_id": "45449e728dadd241b00f5c4bfb3fd3950f04037c",
"index": 2627,
"step-1": "class Background(object):\n\n def __init__(self, name):\n self.name = name\n self.description = ''\n self.prTraits = []\n self.ideals = []\n self.bonds = []\n self.flaws = []\n\n def getBackName(self):\n return self.name\n\n def setBackDesc(self, desc):\n self.description = desc\n\n def getBackDesc(self):\n return self.description\n <mask token>\n\n def getPrTraits(self):\n return self.prTraits\n\n def addIdeal(self, ideal):\n self.ideals.append(ideal)\n <mask token>\n\n def addBond(self, bond):\n self.bonds.append(bond)\n\n def getBonds(self):\n return self.bonds\n\n def addFlaw(self, flaw):\n self.flaws.append(flaw)\n\n def getFlaws(self):\n return self.flaws\n\n\n<mask token>\n",
"step-2": "class Background(object):\n\n def __init__(self, name):\n self.name = name\n self.description = ''\n self.prTraits = []\n self.ideals = []\n self.bonds = []\n self.flaws = []\n\n def getBackName(self):\n return self.name\n\n def setBackDesc(self, desc):\n self.description = desc\n\n def getBackDesc(self):\n return self.description\n\n def addPrTrait(self, trait):\n self.prTraits.append(trait)\n\n def getPrTraits(self):\n return self.prTraits\n\n def addIdeal(self, ideal):\n self.ideals.append(ideal)\n\n def getIdeals(self):\n return self.ideals\n\n def addBond(self, bond):\n self.bonds.append(bond)\n\n def getBonds(self):\n return self.bonds\n\n def addFlaw(self, flaw):\n self.flaws.append(flaw)\n\n def getFlaws(self):\n return self.flaws\n\n\n<mask token>\n",
"step-3": "class Background(object):\n\n def __init__(self, name):\n self.name = name\n self.description = ''\n self.prTraits = []\n self.ideals = []\n self.bonds = []\n self.flaws = []\n\n def getBackName(self):\n return self.name\n\n def setBackDesc(self, desc):\n self.description = desc\n\n def getBackDesc(self):\n return self.description\n\n def addPrTrait(self, trait):\n self.prTraits.append(trait)\n\n def getPrTraits(self):\n return self.prTraits\n\n def addIdeal(self, ideal):\n self.ideals.append(ideal)\n\n def getIdeals(self):\n return self.ideals\n\n def addBond(self, bond):\n self.bonds.append(bond)\n\n def getBonds(self):\n return self.bonds\n\n def addFlaw(self, flaw):\n self.flaws.append(flaw)\n\n def getFlaws(self):\n return self.flaws\n\n\n<mask token>\nacolyte.setBackDesc(\n \"\"\"You have spent you life in the service of a temple or to a specific god or pantheon of gods. You act as an intermediary between the realm of the holy and the mortal world, performing sacred rites and offering sacrifices in order to conduct worshipers into the presence of the divine.\n\"\"\"\n )\nacolyte.addPrTrait(\n \"\"\"I idolize a particular hero of my faith and constantly refer to that person's deeds and example.\n\"\"\"\n )\nacolyte.addPrTrait(\n \"\"\"I can find common ground between the fiercest enemies, empathizing with them and always working toward peace.\n\"\"\"\n )\nacolyte.addPrTrait(\n 'I see omens in every event and action. The gods try to speak to us, we just need to listen.'\n )\nacolyte.addPrTrait('Nothing can shake my optimistic attitude')\nacolyte.addPrTrait(\n 'I quote (or misquote) sacred texts and proverbs in almost every situation.\\n'\n )\nacolyte.addPrTrait(\n \"\"\"I am tolerant (or intolerant) of other faits and respect (or condemn) the worsip of other gods.\n\"\"\"\n )\nacolyte.addPrTrait(\n \"\"\"I've enjoyed fine food, drink, and high society among my temple's elite. Rough living grates on me.\n\"\"\"\n )\nacolyte.addPrTrait(\n \"\"\"I've spent so long in the temple that I have little practical experience dealing with people in the outside world.\n\"\"\"\n )\nacolyte.addIdeal(\n 'The ancient traditions of worship and sacrifice must be preserved and upheld.\\n'\n )\nacolyte.addIdeal(\n 'I alwyas try to help those in need, no matter what the personal cost.\\n')\nacolyte.addIdeal(\n \"\"\"We must help bring about the changes the gods are constantly working in the world.\n\"\"\"\n )\nacolyte.addIdeal(\n \"I hope to one day rise to the top of my faith's religious hierarchy.\\n\")\nacolyte.addIdeal(\n \"\"\"I trust that my deity will guide my actions. I have faith that if I work hard, things will go well.\n\"\"\"\n )\nacolyte.addIdeal(\n \"\"\"I seek to prove myself worthy of by god's favor by matching my actions against his or her teachings.\n\"\"\"\n )\nacolyte.addBond(\n 'I would die to recover an ancient relic of my faith that was lost long ago.\\n'\n )\nacolyte.addBond(\n \"\"\"I will someday get revenge on the corrupt temple hierarchy who branded me a heretic.\n\"\"\"\n )\nacolyte.addBond(\n 'I owe my life to the priest who took me in whem my parents died.\\n')\nacolyte.addBond('Everything i do is for the common people.')\nacolyte.addBond('I will do anything to protect the temple where I served.')\nacolyte.addBond(\n \"\"\"I seek to preserve a sacred text that my enemies consider heretical and seek to destroy.\n\"\"\"\n )\nacolyte.addFlaw('I judge others harshly, and myself even more severely.\\n')\nacolyte.addFlaw(\n \"I put too much trust in those who wield power within my temple's hierarchy.\\n\"\n )\nacolyte.addFlaw(\n \"\"\"My piety sometimes leads me to blindly trust those that mrofess faith in my god.\n\"\"\"\n )\nacolyte.addFlaw('I am inflexible in my thinking.\\n')\nacolyte.addFlaw('I am suspicious of strangers and expect the worst of them.\\n')\nacolyte.addFlaw(\n \"\"\"Once I pick a goal, I become obsessed with it to the detriment of everything else in my life.\n\"\"\"\n )\n<mask token>\ncharlatan.setBackDesc(\n \"\"\"You have always had a way with people. You know what makes them tick, you can tease out their hearts' desires after a few minutes of conversation, and with a few leading questions you can read them like they were children's books. It's a useful talent, and one that you're perfectly willing to use for your advantage.\n\"\"\"\n )\ncharlatan.addPrTrait(\n 'I fall in and out of love easily, and am always pursuing someone.\\n')\ncharlatan.addPrTrait(\n \"\"\"I have a joke for every occasion, especially occasions where humor is inappropriate.\n\"\"\"\n )\ncharlatan.addPrTrait(\n 'Flattery is my preferred trick for getting what I want.\\n')\ncharlatan.addPrTrait(\n \"I'm a born gambler who can't resist taking a risk for a potential payoff.\\n\"\n )\ncharlatan.addPrTrait(\n \"I lie about almost everything, even when there's no good reason to.\\n\")\ncharlatan.addPrTrait('Sarcasm and insults are my weapons of choice.\\n')\ncharlatan.addPrTrait(\n \"\"\"I keep multiple holy symbols on me and invoke whatever deity might come in useful at any given moment.\n\"\"\"\n )\ncharlatan.addPrTrait('I pocket anything i see tha tmight have some value.\\n')\ncharlatan.addIdeal('I am a free spirit—no one tells me what to do.\\n')\ncharlatan.addIdeal(\n \"I never target people who can't afford to lose a few coins.\\n\")\ncharlatan.addIdeal(\n 'I distribute the money i acquire to the people who really need it.\\n')\ncharlatan.addIdeal('I never run the same con twice.\\n')\ncharlatan.addIdeal(\n 'Material goods come and go. Bonds of friendship last forever.\\n')\ncharlatan.addIdeal(\"I'm determined to make something of myself.\\n\")\ncharlatan.addBond(\n \"\"\"I fleeced the wrong person and must work to ensure that this individual never crosses paths with me or those i care about.\n\"\"\"\n )\ncharlatan.addBond(\n \"\"\"I owe everything to my mentor—a horrible person who's probably rotting in jail somewhere.\n\"\"\"\n )\ncharlatan.addBond(\n \"\"\"Somewhere out there, I have a child who doesn't know me. I'm making the world better for him or her.\n\"\"\"\n )\ncharlatan.addBond(\n \"\"\"I came from a noble family, and one day I'll reclaim my lands and title from those who stole them from me.\n\"\"\"\n )\ncharlatan.addBond(\n \"\"\"A powerful person killed someone I love. Some day soon, I'll have my revenge...\n\"\"\"\n )\ncharlatan.addBond(\n \"\"\"I swindled and ruined a person who didn't deserve it. I seek to atone for my misdeeds but might never be able to forgive myself.\n\"\"\"\n )\ncharlatan.addFlaw(\"I can't resist a pretty face.\\n\")\ncharlatan.addFlaw(\n \"\"\"I'm always in debt. I spend my ill-gotten gains on decadent luxuries faster than I bring them in.\n\"\"\"\n )\ncharlatan.addFlaw(\n \"I'm convinced that no one could ever fool me the way I fool others.\\n\")\ncharlatan.addFlaw(\n \"\"\"I'm too greedy for my own good. I can't resist taking a risk if there's money involved.\n\"\"\"\n )\ncharlatan.addFlaw(\n \"I can't resist swindling people who are more powerful than me.\\n\")\ncharlatan.addFlaw(\n \"\"\"I hate to admit it and will hate myself for it, but I'll run and preserve my own hide if the going gets tough.\n\"\"\"\n )\n<mask token>\ncriminal.setBackDesc(\n \"\"\"You are an experienced criminal with a history of breaking the law. You have spent a lot of time among other criminals and still have contacts with the criminal underworld. You're far closer than most people to the world of murder, theft, and violence that pervades the underbelly of civilization, and you have survived up to this point by flounting the rules and regulations of society.\n\"\"\"\n )\ncriminal.addPrTrait(\n 'I always have a plan for what to do when things go wrong.\\n')\ncriminal.addPrTrait(\n \"\"\"I am always calm, no matter what the situation. I never raise my voice or let my emotions control me.\n\"\"\"\n )\ncriminal.addPrTrait(\n \"\"\"The first thign i do in a new place is note the locations of everything valuable—or where cuch things coulg be hidden.\n\"\"\"\n )\ncriminal.addPrTrait('I would rather make a new friend than a new enemy.\\n')\ncriminal.addPrTrait(\n \"\"\"I am incredibly slow to trust. Those who seep the fairest often have the most to hide.\n\"\"\"\n )\ncriminal.addPrTrait(\n \"I don't pay attention to the risks in a situation. Never tell me the odds.\\n\"\n )\ncriminal.addPrTrait(\n \"The best way to get me to do something is to tell me I can't do it.\\n\")\ncriminal.addPrTrait('I blow up at the slightest insult.\\n')\ncriminal.addIdeal(\"I don't steal from others in the trade.\\n\")\ncriminal.addIdeal(\n 'Chains are meant to be broken, as those who would forge them.\\n')\ncriminal.addIdeal(\n 'I steal from the wealthy so that i can help people in need.\\n')\ncriminal.addIdeal('I will do whatever it takes to become wealthy.\\n')\ncriminal.addIdeal(\n \"\"\"I'm loyal to my friends, not to any ideals, and everyone else can take a trip down the Styx for all I care.\n\"\"\"\n )\ncriminal.addIdeal(\"There's a spark of good in everyone.\\n\")\ncriminal.addBond(\n \"I'm trying to pay off an old debt I owe to a generous benefactor.\\n\")\ncriminal.addBond('My Ill-gotten gains go to support my family.\\n')\ncriminal.addBond(\n 'Something important was taken from me, and I aim to steal it back.\\n')\ncriminal.addBond('I will become the greatest thief that had ever lived.\\n')\ncriminal.addBond(\n \"I'm guilty of a terrible crime. I hope i can redeem myself for it.\\n\")\ncriminal.addBond(\n \"\"\"Someone I loved died becoues of a mistake I made. That will never happen again.\n\"\"\"\n )\ncriminal.addFlaw(\n \"\"\"When I see something valuable, I can't think about anything but how to steal it.\n\"\"\"\n )\ncriminal.addFlaw(\n \"\"\"When faced with a choice between money and my friends, I usually choose the money.\n\"\"\"\n )\ncriminal.addFlaw(\n \"If there's a plan, I'll forget it. If i don't forget it, I'll ignore it.\\n\"\n )\ncriminal.addFlaw('I have a \"tell\" that reveals when I\\'m lying.\\n')\ncriminal.addFlaw('I turn tail and run when things look bad.\\n')\ncriminal.addFlaw(\n \"\"\"An innocent person is in prison for a crime that I committed. I'm ok with that.\n\"\"\"\n )\n<mask token>\nentertainer.setBackDesc(\n \"\"\"You thrive in front of an audience. You know how to entrance them, entertain them, and even inspire them. Your poetics can stir the hearts of those who hear you, awakening greif or joy, laughter or anger. Your music raises the spirits or captures their sorrow. Your dance steps captivate, your humor cuts to the quick. Whatever techniques you use, your art is your life.\n\"\"\"\n )\nentertainer.addPrTrait('I know a story relevant to almost every situation.\\n')\nentertainer.addPrTrait(\n \"\"\"Whenever I come to a new place, I collect local rumors and spread gossip.\n\"\"\"\n )\nentertainer.addPrTrait(\n 'I’m a hopeless romantic, always searching for that “special someone.”\\n')\nentertainer.addPrTrait(\n \"\"\"Nobody stays angry at me or around me for long, since I can defuse any amount of tension.\n\"\"\"\n )\nentertainer.addPrTrait('I love a good insult, even one directed at me.\\n')\nentertainer.addPrTrait('I get bitter if I’m not the center of attention.\\n')\nentertainer.addPrTrait('I’ll settle for nothing less than perfection.\\n')\nentertainer.addPrTrait(\n 'I change my mood or my mind as quickly as I change key in a song.\\n')\nentertainer.addIdeal('When I perform, I make the world better than it was.\\n')\nentertainer.addIdeal(\n \"\"\"The stories, legends, and songs of the past must never be forgotten, for they teach us who we are.\n\"\"\"\n )\nentertainer.addIdeal('The world is in need of new ideas and bold action.\\n')\nentertainer.addIdeal(\"I'm only in it for the money and fame.\\n\")\nentertainer.addIdeal(\n \"\"\"I like seeing the smiles on people's faces whei I perform. That's all that matters.\n\"\"\"\n )\nentertainer.addIdeal(\n \"\"\"Art should reflect the soul; it should come from within and reveal who we really are.\n\"\"\"\n )\nentertainer.addBond(\n \"\"\"My instrument is my most treasured possession, and it reminds me of someone I love.\n\"\"\"\n )\nentertainer.addBond(\n \"Someone stoll my precious instrument, and someday I'll get it back.\\n\")\nentertainer.addBond('I want to become famous, whatever it takes.\\n')\nentertainer.addBond(\n \"\"\"I idolize a hero of the old tales and measures my deeds against that person's.\n\"\"\"\n )\nentertainer.addBond(\n 'I will do anything to prove myelf superior to my hated rival.\\n')\nentertainer.addBond(\n 'I would do anything for the other members of my old troupe.\\n')\nentertainer.addFlaw(\"I'll do anything to win fame and renown.\\n\")\nentertainer.addFlaw(\"I'm a sucker for a pretty face.\\n\")\nentertainer.addFlaw(\n \"\"\"A scandal prevents me from ever going home again. That kind of trouble seems to follow me around.\n\"\"\"\n )\nentertainer.addFlaw(\n \"\"\"I once satirized a noble who still wants my head. It was a mistake that i will likely repeat.\n\"\"\"\n )\nentertainer.addFlaw(\n \"\"\"I have trouble keeping my feelings hidden. My sharp tongue lands me in trouble.\n\"\"\"\n )\nentertainer.addFlaw('Despite my best efforts, I am unreliable to my friends.\\n'\n )\n<mask token>\nfolkHero.setBackDesc(\n \"\"\"You come from a humble social rank, but you are destined for so much more. Already the people of your home village regard you as their champion, and your destiny calls you to stand against the tyrants and monsters that threaten the common folk everywhere.\n\"\"\"\n )\nfolkHero.addPrTrait('I judge people by their actions, not their words.\\n')\nfolkHero.addPrTrait(\n 'If someone is in trouble, I’m always ready to lend help.\\n')\nfolkHero.addPrTrait(\n \"\"\"When I set my mind to something, I follow through no matter what gets in my way.\n\"\"\"\n )\nfolkHero.addPrTrait(\n \"\"\"I have a strong sense of fair play and always try to find the most equitable solution to arguments.\n\"\"\"\n )\nfolkHero.addPrTrait(\n \"\"\"I'm confident in my own abilities and do what I can to instill confidence in others.\n\"\"\"\n )\nfolkHero.addPrTrait('Thinking is for other people. I prefer action.\\n')\nfolkHero.addPrTrait('I misuse long words in an attempt to sound smarter.\\n')\nfolkHero.addPrTrait(\n 'I get bored easily. When am I going to get on with my destiny?\\n')\nfolkHero.addIdeal('Peole deserve to be treated with dignity and respect.\\n')\nfolkHero.addIdeal(\n \"\"\"No one should get preferentail treatment before the law, and no one is above the law.\n\"\"\"\n )\nfolkHero.addIdeal('Tyrants must not be allowed to oppress the people\\n')\nfolkHero.addIdeal(\n 'If I become strong, I can take what I want—What I deserve.\\n')\nfolkHero.addIdeal(\"There's no good in pretending to be something I'm not.\\n\")\nfolkHero.addIdeal(\n 'Nothing and no one can steer me away from my higher calling.\\n')\nfolkHero.addBond(\n \"\"\"I have a family, but I have no idea where they are. One day, I hope to see them again.\n\"\"\"\n )\nfolkHero.addBond(\n 'I worked the land, I love the land, and I will protect the land.\\n')\nfolkHero.addBond(\n \"\"\"A proud noble once gave me a horrible beating, and I will take my revenge on any bully I encounter.\n\"\"\"\n )\nfolkHero.addBond(\n \"\"\"My tools are symbols of my past life, and I carry them so that I will never forget my roots.\n\"\"\"\n )\nfolkHero.addBond('I protect those who cannot protect themselves.\\n')\nfolkHero.addBond(\n 'I wish my childhood sweetheart had come with me to pursue my destiny.\\n')\nfolkHero.addFlaw(\n 'The tyrant who rules my land will stop at nothing to see me killed.\\n')\nfolkHero.addFlaw(\n \"\"\"I'm convinced of the significance of my destiny, and blind to my shortcomings and the risk of failure.\n\"\"\"\n )\nfolkHero.addFlaw(\n \"\"\"The people who knew me when I was young know my shameful secret, so I can never go home again.\n\"\"\"\n )\nfolkHero.addFlaw(\n 'I have a weakness for the vices of the city, especially hard drink.\\n')\nfolkHero.addFlaw(\n \"\"\"Secretly, I believe that things would be better if I were a tyrant lording over the land.\n\"\"\"\n )\nfolkHero.addFlaw('I have trouble trusting my allies.\\n')\n<mask token>\nguildArtisan.setBackDesc(\n \"\"\"You are a member of an artisan's guild, skilled in a particular field and closely associated with other artisans. You are a well-establishedpart of the mercantile world, freed by talent and wealth from the constraints of a feudal social order. You learned your skills as ans apprentice to a master artisan, under the sponsorship of your guild, untill you became a master in your own right.\n\"\"\"\n )\nguildArtisan.addPrTrait(\n \"\"\"I believe that anything worth doing is worth doing right. I can't help it—I'm a perfectionist.\n\"\"\"\n )\nguildArtisan.addPrTrait(\n \"I'm a snob who looks down on those who can't appreciate fine art.\\n\")\nguildArtisan.addPrTrait(\n 'I always want to know how things work and what makes people tick.\\n')\nguildArtisan.addPrTrait(\n \"I'm full of witty aphorisms and have a proverb for every occasion.\\n\")\nguildArtisan.addPrTrait(\n \"I'm rude to people who lack my commitment to hard work and fair play.\\n\")\nguildArtisan.addPrTrait('I like to talk at length about my profession.\\n')\nguildArtisan.addPrTrait(\n \"\"\"I don't part with my money easily and will haggle tirelessly to get the best deal possible.\n\"\"\"\n )\nguildArtisan.addPrTrait(\n \"\"\"I'm well known for my work, and I want to make sure everyone appreciates it. I'm always taken aback when people haven't heard o f me.\n\"\"\"\n )\nguildArtisan.addIdeal(\n \"\"\"It is the duty of all civilized people to strengthen the bonds of community and the security of civilization.\n\"\"\"\n )\nguildArtisan.addIdeal(\n \"\"\"My talents were given to me so that I could use them to benefit the world.\n\"\"\"\n )\nguildArtisan.addIdeal(\n 'Everyone should be free to pursue his or her own livelihood.\\n')\nguildArtisan.addIdeal(\"I'm only in it for the money.\\n\")\nguildArtisan.addIdeal(\n \"I'm committed to the people I care about, not to ideals.\\n\")\nguildArtisan.addIdeal('I work hard to be teh best there is at my craft.\\n')\nguildArtisan.addBond(\n \"\"\"The workshop where I learned my trade is the most important place in the world to me.\n\"\"\"\n )\nguildArtisan.addBond(\n \"\"\"I created a great work for someone, and then found them unworthy to receive it. I'm still looking for someone worthy.\n\"\"\"\n )\nguildArtisan.addBond(\n 'I owe my guild a great debt for forging me into the person I am today.\\n')\nguildArtisan.addBond(\"I pursue wealth to secure someone's love.\\n\")\nguildArtisan.addBond(\n \"\"\"One day I will return to my guild and prove that I am the greatest artisan of them all.\n\"\"\"\n )\nguildArtisan.addBond(\n \"\"\"I will get revenge on the evil forces that destroyed my place of business and ruined my livelihood.\n\"\"\"\n )\nguildArtisan.addFlaw(\n \"I'll do anything to get my hands on something rare or priceless.\\n\")\nguildArtisan.addFlaw(\n \"I'm quick to assume that someone is trying to cheat me.\\n\")\nguildArtisan.addFlaw(\n 'No one must ever learn that I once stole money from guild coffers.\\n')\nguildArtisan.addFlaw(\n \"I'm never satisfied with what I have—I always want more.\\n\")\nguildArtisan.addFlaw('I would kill to acquire a noble title.\\n')\nguildArtisan.addFlaw(\n \"\"\"I'm horribly jealous of anyone who can outshine my handiwork. Everywhere I go, I'm surrounded by rivals.\n\"\"\"\n )\n<mask token>\nhermit.setBackDesc(\n \"\"\"You lived in seclusion—either in a sheltered community such as a monastery, or entirely alone—for a formative part of your life. In your time apart from the lcamor of society, you found quiety, solitude, and perhaps some of the answers you were looking for.\n\"\"\"\n )\nhermit.addPrTrait(\n \"\"\"I've been isolated for so long that I rarely speak, preferring gestures and the occasional grunt.\n\"\"\"\n )\nhermit.addPrTrait('I am utterly serene, even in the face of disaster.\\n')\nhermit.addPrTrait(\n \"\"\"The leader of my community had something wise to say on every topic, and I am eager to share that wisdom.\n\"\"\"\n )\nhermit.addPrTrait('I feel tremendous empathy for all who suffer.\\n')\nhermit.addPrTrait(\"I'm oblivious to etiquette and social expectations.\\n\")\nhermit.addPrTrait(\n 'I connect everything that happens to me to a grand, cosmic plan.\\n')\nhermit.addPrTrait(\n \"\"\"I often get lost in my own thoughts and contemplation, becoming oblivious to my surroundings.\n\"\"\"\n )\nhermit.addPrTrait(\n 'I am working on a grand philosophical theory and love sharing my ideas.\\n'\n )\nhermit.addIdeal(\n 'My gifts are meant to be shared with all, not used for my own benefit.\\n')\nhermit.addIdeal(\n \"\"\"Emotions must not cloud our sense of what is right and true, or our logical thinking.\n\"\"\"\n )\nhermit.addIdeal('Inquiry and curiosity are the pillars of progress.\\n')\nhermit.addIdeal(\n 'Solitude and contemplation are paths toward mystical or magical power.\\n')\nhermit.addIdeal('Meddling in the affairs of others only causes trouble.\\n')\nhermit.addIdeal(\"If you know yourself, there's nothing left to know.\\n\")\nhermit.addBond(\n \"\"\"Nothing is more important to me than the other members of my hermitage, order, or association.\n\"\"\"\n )\nhermit.addBond(\n \"\"\"I entered seclusion to hide frome the ones who might still be hunting me. I must someday confront them.\n\"\"\"\n )\nhermit.addBond(\n \"\"\"I'm still seeking the enlightenment I pursued in my seclusion, and it still eludes me.\n\"\"\"\n )\nhermit.addBond(\n 'I entered seclusion because I loved someone I could not have.\\n')\nhermit.addBond(\n 'Should my discovery come to light, it could bring ruin to the world.\\n')\nhermit.addBond(\n 'My isolation gave me great insight into a great evil that only I can destroy.\\n'\n )\nhermit.addFlaw(\n \"Now that I've returned to the world, I enjoy its delights a little too much.\\n\"\n )\nhermit.addFlaw(\n \"\"\"I harbor dark, bloodthirsty thoughts that my isolation and meditation failed to quell.\n\"\"\"\n )\nhermit.addFlaw('I am dogmatic in my thoughts and philosophy.\\n')\nhermit.addFlaw(\n 'I let my need to win arguments overshadow friendships and harmony.\\n')\nhermit.addFlaw(\"I'd risk too much to uncover a lost bit of knowledge.\\n\")\nhermit.addFlaw(\"I like keeping secrets and won't share them with anyone.\\n\")\n<mask token>\nnoble.setBackDesc(\n \"\"\"You understand wealth, power, and privilege. You carry a noble title, and your family owns land, collects taxes, and wields significant political influence. You might be a pampered aristocrat unfamiliar with work or discomfort, a former merchant just elevated to the nobility, or a disinherited scoundrel with a disproportionate sense of entitlement. Or you could be an honest, hard-working landowner who cares deeply about the people who live and work on your land, keenly aware of your responsibility to them.\n\"\"\"\n )\nnoble.addPrTrait(\n \"\"\"My eloquent flattery makes everyone I talk to feel like the most wonderful and important person in the world.\n\"\"\"\n )\nnoble.addPrTrait('The common folk love me for my kindness and generosity.\\n')\nnoble.addPrTrait(\n \"\"\"No one could doubt by looking at my regal bearing that I am a cut above the unwashed masses.\n\"\"\"\n )\nnoble.addPrTrait(\n 'I take great pains to always look my best and follow the latest fashions.\\n'\n )\nnoble.addPrTrait(\n \"\"\"I don't like to get my hands dirty, and I won't be caught dead in unsuitable accommodations.\n\"\"\"\n )\nnoble.addPrTrait(\n \"\"\"Despite my noble birth, I do not place myself above other folk. We all have the same blood.\n\"\"\"\n )\nnoble.addPrTrait('My favor, once lost, is lost forever.\\n')\nnoble.addPrTrait(\n \"\"\"If you do me an injury, I will crush you, ruin your name, and salt your fields.\n\"\"\"\n )\nnoble.addIdeal(\n \"\"\"Respect is due to me because of my position, but all people regardless of station deserve to be treated with dignity.\n\"\"\"\n )\nnoble.addIdeal(\n \"\"\"It is my duty to respect the authority of those aboce me, just as those below me must respect mine.\n\"\"\"\n )\nnoble.addIdeal(\n 'I must prove that I can handle myself without the coddling of my family.\\n'\n )\nnoble.addIdeal('If I can attain more power, no one will tell me what to do.\\n')\nnoble.addIdeal('Blood runs thicker than water.\\n')\nnoble.addIdeal('It is my duty to protect and care for the people beneth me.\\n')\nnoble.addBond('I will face any challenge to win the approval of my family.\\n')\nnoble.addBond(\n \"My house's alliance with another noble family must be sustained at all costs.\\n\"\n )\nnoble.addBond(\n 'Nothing is more important than the other members of my family.\\n')\nnoble.addBond(\n 'I am in love with the heir of a family that my family despises.\\n')\nnoble.addBond('My loyalty to my soverign is unwabering.\\n')\nnoble.addBond('The common folk must see me as a hero of the people.\\n')\nnoble.addFlaw('I secretly believe that everyone is beneath me.\\n')\nnoble.addFlaw(\n 'I hide a truly scandalous secret that could ruin my family forever.\\n')\nnoble.addFlaw(\n \"\"\"I too often hear veiled insults and threats in every word addressed to me, and I'm quick to anger.\n\"\"\"\n )\nnoble.addFlaw('I have an insatiable desire for carnal pleasures.\\n')\nnoble.addFlaw('In fact, the world does revolve around me.\\n')\nnoble.addFlaw('By my words and actions, I often bring shame to my family.\\n')\n<mask token>\noutlander.setBackDesc(\n \"\"\"You grew up in the wilds, far from civilization and the comforts of town and technology. You've witnessed the migration of herds larger than forests, survived weather more extreme than any city-dweller could comprehend, and enjoyed the solitude of being the only thinking creature for miles in any direction. The wilds are in your blood, wheather you were a nomad, an explorer, a recluse, a hunter-gatherer, or even a marauder. Even in places where you don't know the specific features of the terrain, you know the ways of the wild.\n\"\"\"\n )\noutlander.addPrTrait(\"I'm driven by a wanderlust that led me away from home.\\n\"\n )\noutlander.addPrTrait(\n 'I watch over my friends as if they were a litter of newborn pups.\\n')\noutlander.addPrTrait(\n \"\"\"I once ran twenty-five miles without stopping to warn my clan of an approaching orc horde. I'd do it again if I had to.\n\"\"\"\n )\noutlander.addPrTrait(\n 'I have a lesson for every situation, drawn from observing nature.\\n')\noutlander.addPrTrait(\n \"\"\"I place no stock in wealthy or well-mannered folk. Money and manners won't save you from a hungry owlbear.\n\"\"\"\n )\noutlander.addPrTrait(\n \"\"\"I'm always picking things up, absently fiddling with them, and sometimes accidentally breaking them.\n\"\"\"\n )\noutlander.addPrTrait(\n 'I feel far more comfortable around animals than people.\\n')\noutlander.addPrTrait('I was, in fact, raised by wolves.\\n')\noutlander.addIdeal(\n 'Life is like the seasons, in constant change, and we must change with it.\\n'\n )\noutlander.addIdeal(\n \"\"\"It is each person's responsibility to make the most happiness for the whole tribe.\n\"\"\"\n )\noutlander.addIdeal('If I dishonor myself, then I dishonor my whole clan.\\n')\noutlander.addIdeal('The strongest are meant to rule.\\n')\noutlander.addIdeal(\n \"\"\"The natural world is more important than all the constraints of civilization.\n\"\"\"\n )\noutlander.addIdeal('I must earn glory in battle, for myself and my clan.\\n')\noutlander.addBond(\n \"\"\"My family, clan, or tribe is the most important thing in my life, even when they are far from me.\n\"\"\"\n )\noutlander.addBond(\n 'An injury to the unspoiled wilderness of my home is an injury to me.\\n')\noutlander.addBond(\n 'I will bring terrible wrath down on the evildoers who destroyed my homeland.\\n'\n )\noutlander.addBond(\n \"\"\"I am the last of my tribe, and it is up to me to ensure their names enter legend.\n\"\"\"\n )\noutlander.addBond(\n \"\"\"I suffer awful visions of a coming disaster and will do anything to prevent it.\n\"\"\"\n )\noutlander.addBond('It is my duty to provide children to sustain my tribe.\\n')\noutlander.addFlaw('I am too enamored of ale, wine, and other intoxicants.\\n')\noutlander.addFlaw(\n \"There's no room for caution in a life lived to the fullest.\\n\")\noutlander.addFlaw(\n \"\"\"I remeber every insult I've received and nurse a silent resentment toward anyone who's ever wronged me.\n\"\"\"\n )\noutlander.addFlaw(\n 'I am slow to trust members of other races, tribes, and societies.\\n')\noutlander.addFlaw('Violence is my answer to almost any challange.\\n')\noutlander.addFlaw(\n \"\"\"Don't expect me to save those who can't save themselves. It is nature's way that the strong thrive and the weak perish.\n\"\"\"\n )\n<mask token>\nsage.setBackDesc(\n \"\"\"You spent years learning the lore of the multiverse. You scoured manuscripts, studie scrolls, and listened to the greatest experts on the subjects that interest you. Your efforts have made you a master in your fields of study.\n\"\"\"\n )\nsage.addPrTrait(\n 'I use polysyllabic words that convey the empression of great erudition.\\n'\n )\nsage.addPrTrait(\n \"\"\"I've read every book in the world's greatest libraries—or I like to boast that I have.\n\"\"\"\n )\nsage.addPrTrait(\n \"\"\"I'm used to helping out those who aren't as smart as I am, and I patiently explain anything and everything to others.\n\"\"\"\n )\nsage.addPrTrait(\"There's nothing I like more than a good mystery.\\n\")\nsage.addPrTrait(\n \"\"\"I'm willing to listen to every side of an argument before I make my own judgment.\n\"\"\"\n )\nsage.addPrTrait(\n \"\"\"I . . . speak . . . slowly . . . when talking . . . to idiots, . . . which . . . almost . . . everyone . . . is . . . compared . . . to me.\n\"\"\"\n )\nsage.addPrTrait('I am horribly, horribly awkward in social situations.\\n')\nsage.addPrTrait(\n \"I'm convinced that people are always trying to steal my secrets.\\n\")\nsage.addIdeal('The path to power and self-improvement is through knowledge.\\n')\nsage.addIdeal(\n 'What is beautiful points us beyond itself toward what is true.\\n')\nsage.addIdeal('Emotions must not cloud our logical thinking.\\n')\nsage.addIdeal(\n 'Hothing should fetter the infinite possibility inherent in all existance.\\n'\n )\nsage.addIdeal('Knowledge is the path to power and domination.\\n')\nsage.addIdeal('The goal of a life of study is the betterment of oneself.\\n')\nsage.addBond('It is my duty to protect my students')\nsage.addBond(\n \"\"\"I have an ancient text that holds terrible secrets that must not fall into the wrong hands.\n\"\"\"\n )\nsage.addBond(\n 'I work to preserve a library, university, scriptorium, or monastery.\\n')\nsage.addBond(\n \"My life's work is a series of tomes related to a specific field of lore.\\n\"\n )\nsage.addBond(\n \"I've been searching my whole life for the answer to a certain question.\\n\"\n )\nsage.addBond(\n 'I sold my soul for knowledge. I hope to do great deeds and win it back.\\n'\n )\nsage.addFlaw('I am easily distracted by the promise of information.\\n')\nsage.addFlaw(\n \"\"\"Most people scream and run when they see a demon, I stop and take notes on its anatomy.\n\"\"\"\n )\nsage.addFlaw(\n 'Unlocking an ancient mystery is worth the price of a civilization.\\n')\nsage.addFlaw('I overlook obvious solutions in favor of complicated ones.\\n')\nsage.addFlaw(\n 'I speak without really thinking through my words, invariably insulting others.\\n'\n )\nsage.addFlaw(\"I can't keep a secret to save my life, or anyone else's.\\n\")\n<mask token>\nsailor.setBackDesc(\n \"\"\"You sailed on a seagoing vessel for years. In that time, you faced down mighty storms, monsters of the deep, and those who wanted to sink your craft to the bottomless depths. Your first love is the distant line of the horizon, but the time has come to try your hand at something new.\n\"\"\"\n )\nsailor.addPrTrait('My friends know they can rely on me, no matter what.\\n')\nsailor.addPrTrait(\n 'I work hard so that I can play hard when the work is done.\\n')\nsailor.addPrTrait(\n 'I enjoy sailing into new ports and making new friends over a flagon of ale.\\n'\n )\nsailor.addPrTrait('I stretch the truth for the sake of a good story.\\n')\nsailor.addPrTrait(\n 'To me, a tavern brawl is a nice way to get to know a new city.\\n')\nsailor.addPrTrait('I never pass up a friendly wager.\\n')\nsailor.addPrTrait('My language is as foul as an otyuggh nest.\\n')\nsailor.addPrTrait(\n 'I like a job well done, especially if I can convince someone else to do it.\\n'\n )\nsailor.addIdeal(\n \"\"\"The thing that keeps a ship together is mutual respect between captain and crew.\n\"\"\"\n )\nsailor.addIdeal('We all do the work, so we all share in the rewards.\\n')\nsailor.addIdeal(\n 'The sea is freedom—the freedom to go anywhere and do anything.\\n')\nsailor.addIdeal(\"I'm a predator, and the other ships on the sea are my prey.\\n\"\n )\nsailor.addIdeal(\"I'm committed to my crewmates, not to ideals.\\n\")\nsailor.addIdeal(\"Someday I'll own my own ship and chart my own destiny.\\n\")\nsailor.addBond(\"I'm loyal to my captain first, everything else second.\\n\")\nsailor.addBond(\n 'The ship is most important—crewmates and captains come and go.\\n')\nsailor.addBond(\"I'll always remember my first ship.\\n\")\nsailor.addBond(\n 'In a harbor town, I have a paramour whose eyes nearly stole me from the sea.\\n'\n )\nsailor.addBond(\n 'I was cheated out of my fair share of the profits, and I want to get my due.\\n'\n )\nsailor.addBond(\n \"\"\"Ruthless pirates murdered my captain and crewmates, plundered our ship, and left me to die. Vengeance will be mine.\n\"\"\"\n )\nsailor.addFlaw(\"I follow orders, even if I think they're wrong.\\n\")\nsailor.addFlaw(\"I'll say anything to avoid having to do extra work.\\n\")\nsailor.addFlaw(\n \"\"\"Once someone questions my courage, I never back down no matter how dangerous the situation.\n\"\"\"\n )\nsailor.addFlaw(\"Once I start drinking, it's hard for me to stop.\\n\")\nsailor.addFlaw(\n \"I can't help but pocket loose coins and other trinkets I come across.\\n\")\nsailor.addFlaw('My pride will probably lead to my destruction.\\n')\n<mask token>\nsoldier.setBackDesc(\n \"\"\"War has been your life for as long as you care to remember. You trained as a youth, studied the use of weapons and armor, learned basic survival techniques, including how to stay alive on the battlefield. You might have been part of a standing national army or a mercenary company, or perhaps a memver of a local militia who rose to prominence during a recent war.\n\"\"\"\n )\n<mask token>\nurchin.setBackDesc(\n \"\"\"You grew up on the streets alone, orphaned, and poor. You had no one to watch over you or to provide for you, so you learned to provide for yourself. You fought fiercely over food and kept a constant watch out for other desperate souls who might steal from you. You slept on rooftops and in alleyways, exposed to the elements, and endured sickness without the advantage of medicine or a place to recuperate. You've survived despite all odds, and did so through cunning, strength, speed, or some combination of each.\n\"\"\"\n )\n<mask token>\n",
"step-4": "class Background(object):\n\n def __init__(self, name):\n self.name = name\n self.description = ''\n self.prTraits = []\n self.ideals = []\n self.bonds = []\n self.flaws = []\n\n def getBackName(self):\n return self.name\n\n def setBackDesc(self, desc):\n self.description = desc\n\n def getBackDesc(self):\n return self.description\n\n def addPrTrait(self, trait):\n self.prTraits.append(trait)\n\n def getPrTraits(self):\n return self.prTraits\n\n def addIdeal(self, ideal):\n self.ideals.append(ideal)\n\n def getIdeals(self):\n return self.ideals\n\n def addBond(self, bond):\n self.bonds.append(bond)\n\n def getBonds(self):\n return self.bonds\n\n def addFlaw(self, flaw):\n self.flaws.append(flaw)\n\n def getFlaws(self):\n return self.flaws\n\n\nacolyte = Background('Acolyte')\nacolyte.setBackDesc(\n \"\"\"You have spent you life in the service of a temple or to a specific god or pantheon of gods. You act as an intermediary between the realm of the holy and the mortal world, performing sacred rites and offering sacrifices in order to conduct worshipers into the presence of the divine.\n\"\"\"\n )\nacolyte.addPrTrait(\n \"\"\"I idolize a particular hero of my faith and constantly refer to that person's deeds and example.\n\"\"\"\n )\nacolyte.addPrTrait(\n \"\"\"I can find common ground between the fiercest enemies, empathizing with them and always working toward peace.\n\"\"\"\n )\nacolyte.addPrTrait(\n 'I see omens in every event and action. The gods try to speak to us, we just need to listen.'\n )\nacolyte.addPrTrait('Nothing can shake my optimistic attitude')\nacolyte.addPrTrait(\n 'I quote (or misquote) sacred texts and proverbs in almost every situation.\\n'\n )\nacolyte.addPrTrait(\n \"\"\"I am tolerant (or intolerant) of other faits and respect (or condemn) the worsip of other gods.\n\"\"\"\n )\nacolyte.addPrTrait(\n \"\"\"I've enjoyed fine food, drink, and high society among my temple's elite. Rough living grates on me.\n\"\"\"\n )\nacolyte.addPrTrait(\n \"\"\"I've spent so long in the temple that I have little practical experience dealing with people in the outside world.\n\"\"\"\n )\nacolyte.addIdeal(\n 'The ancient traditions of worship and sacrifice must be preserved and upheld.\\n'\n )\nacolyte.addIdeal(\n 'I alwyas try to help those in need, no matter what the personal cost.\\n')\nacolyte.addIdeal(\n \"\"\"We must help bring about the changes the gods are constantly working in the world.\n\"\"\"\n )\nacolyte.addIdeal(\n \"I hope to one day rise to the top of my faith's religious hierarchy.\\n\")\nacolyte.addIdeal(\n \"\"\"I trust that my deity will guide my actions. I have faith that if I work hard, things will go well.\n\"\"\"\n )\nacolyte.addIdeal(\n \"\"\"I seek to prove myself worthy of by god's favor by matching my actions against his or her teachings.\n\"\"\"\n )\nacolyte.addBond(\n 'I would die to recover an ancient relic of my faith that was lost long ago.\\n'\n )\nacolyte.addBond(\n \"\"\"I will someday get revenge on the corrupt temple hierarchy who branded me a heretic.\n\"\"\"\n )\nacolyte.addBond(\n 'I owe my life to the priest who took me in whem my parents died.\\n')\nacolyte.addBond('Everything i do is for the common people.')\nacolyte.addBond('I will do anything to protect the temple where I served.')\nacolyte.addBond(\n \"\"\"I seek to preserve a sacred text that my enemies consider heretical and seek to destroy.\n\"\"\"\n )\nacolyte.addFlaw('I judge others harshly, and myself even more severely.\\n')\nacolyte.addFlaw(\n \"I put too much trust in those who wield power within my temple's hierarchy.\\n\"\n )\nacolyte.addFlaw(\n \"\"\"My piety sometimes leads me to blindly trust those that mrofess faith in my god.\n\"\"\"\n )\nacolyte.addFlaw('I am inflexible in my thinking.\\n')\nacolyte.addFlaw('I am suspicious of strangers and expect the worst of them.\\n')\nacolyte.addFlaw(\n \"\"\"Once I pick a goal, I become obsessed with it to the detriment of everything else in my life.\n\"\"\"\n )\ncharlatan = Background('Charlatan')\ncharlatan.setBackDesc(\n \"\"\"You have always had a way with people. You know what makes them tick, you can tease out their hearts' desires after a few minutes of conversation, and with a few leading questions you can read them like they were children's books. It's a useful talent, and one that you're perfectly willing to use for your advantage.\n\"\"\"\n )\ncharlatan.addPrTrait(\n 'I fall in and out of love easily, and am always pursuing someone.\\n')\ncharlatan.addPrTrait(\n \"\"\"I have a joke for every occasion, especially occasions where humor is inappropriate.\n\"\"\"\n )\ncharlatan.addPrTrait(\n 'Flattery is my preferred trick for getting what I want.\\n')\ncharlatan.addPrTrait(\n \"I'm a born gambler who can't resist taking a risk for a potential payoff.\\n\"\n )\ncharlatan.addPrTrait(\n \"I lie about almost everything, even when there's no good reason to.\\n\")\ncharlatan.addPrTrait('Sarcasm and insults are my weapons of choice.\\n')\ncharlatan.addPrTrait(\n \"\"\"I keep multiple holy symbols on me and invoke whatever deity might come in useful at any given moment.\n\"\"\"\n )\ncharlatan.addPrTrait('I pocket anything i see tha tmight have some value.\\n')\ncharlatan.addIdeal('I am a free spirit—no one tells me what to do.\\n')\ncharlatan.addIdeal(\n \"I never target people who can't afford to lose a few coins.\\n\")\ncharlatan.addIdeal(\n 'I distribute the money i acquire to the people who really need it.\\n')\ncharlatan.addIdeal('I never run the same con twice.\\n')\ncharlatan.addIdeal(\n 'Material goods come and go. Bonds of friendship last forever.\\n')\ncharlatan.addIdeal(\"I'm determined to make something of myself.\\n\")\ncharlatan.addBond(\n \"\"\"I fleeced the wrong person and must work to ensure that this individual never crosses paths with me or those i care about.\n\"\"\"\n )\ncharlatan.addBond(\n \"\"\"I owe everything to my mentor—a horrible person who's probably rotting in jail somewhere.\n\"\"\"\n )\ncharlatan.addBond(\n \"\"\"Somewhere out there, I have a child who doesn't know me. I'm making the world better for him or her.\n\"\"\"\n )\ncharlatan.addBond(\n \"\"\"I came from a noble family, and one day I'll reclaim my lands and title from those who stole them from me.\n\"\"\"\n )\ncharlatan.addBond(\n \"\"\"A powerful person killed someone I love. Some day soon, I'll have my revenge...\n\"\"\"\n )\ncharlatan.addBond(\n \"\"\"I swindled and ruined a person who didn't deserve it. I seek to atone for my misdeeds but might never be able to forgive myself.\n\"\"\"\n )\ncharlatan.addFlaw(\"I can't resist a pretty face.\\n\")\ncharlatan.addFlaw(\n \"\"\"I'm always in debt. I spend my ill-gotten gains on decadent luxuries faster than I bring them in.\n\"\"\"\n )\ncharlatan.addFlaw(\n \"I'm convinced that no one could ever fool me the way I fool others.\\n\")\ncharlatan.addFlaw(\n \"\"\"I'm too greedy for my own good. I can't resist taking a risk if there's money involved.\n\"\"\"\n )\ncharlatan.addFlaw(\n \"I can't resist swindling people who are more powerful than me.\\n\")\ncharlatan.addFlaw(\n \"\"\"I hate to admit it and will hate myself for it, but I'll run and preserve my own hide if the going gets tough.\n\"\"\"\n )\ncriminal = Background('Criminal')\ncriminal.setBackDesc(\n \"\"\"You are an experienced criminal with a history of breaking the law. You have spent a lot of time among other criminals and still have contacts with the criminal underworld. You're far closer than most people to the world of murder, theft, and violence that pervades the underbelly of civilization, and you have survived up to this point by flounting the rules and regulations of society.\n\"\"\"\n )\ncriminal.addPrTrait(\n 'I always have a plan for what to do when things go wrong.\\n')\ncriminal.addPrTrait(\n \"\"\"I am always calm, no matter what the situation. I never raise my voice or let my emotions control me.\n\"\"\"\n )\ncriminal.addPrTrait(\n \"\"\"The first thign i do in a new place is note the locations of everything valuable—or where cuch things coulg be hidden.\n\"\"\"\n )\ncriminal.addPrTrait('I would rather make a new friend than a new enemy.\\n')\ncriminal.addPrTrait(\n \"\"\"I am incredibly slow to trust. Those who seep the fairest often have the most to hide.\n\"\"\"\n )\ncriminal.addPrTrait(\n \"I don't pay attention to the risks in a situation. Never tell me the odds.\\n\"\n )\ncriminal.addPrTrait(\n \"The best way to get me to do something is to tell me I can't do it.\\n\")\ncriminal.addPrTrait('I blow up at the slightest insult.\\n')\ncriminal.addIdeal(\"I don't steal from others in the trade.\\n\")\ncriminal.addIdeal(\n 'Chains are meant to be broken, as those who would forge them.\\n')\ncriminal.addIdeal(\n 'I steal from the wealthy so that i can help people in need.\\n')\ncriminal.addIdeal('I will do whatever it takes to become wealthy.\\n')\ncriminal.addIdeal(\n \"\"\"I'm loyal to my friends, not to any ideals, and everyone else can take a trip down the Styx for all I care.\n\"\"\"\n )\ncriminal.addIdeal(\"There's a spark of good in everyone.\\n\")\ncriminal.addBond(\n \"I'm trying to pay off an old debt I owe to a generous benefactor.\\n\")\ncriminal.addBond('My Ill-gotten gains go to support my family.\\n')\ncriminal.addBond(\n 'Something important was taken from me, and I aim to steal it back.\\n')\ncriminal.addBond('I will become the greatest thief that had ever lived.\\n')\ncriminal.addBond(\n \"I'm guilty of a terrible crime. I hope i can redeem myself for it.\\n\")\ncriminal.addBond(\n \"\"\"Someone I loved died becoues of a mistake I made. That will never happen again.\n\"\"\"\n )\ncriminal.addFlaw(\n \"\"\"When I see something valuable, I can't think about anything but how to steal it.\n\"\"\"\n )\ncriminal.addFlaw(\n \"\"\"When faced with a choice between money and my friends, I usually choose the money.\n\"\"\"\n )\ncriminal.addFlaw(\n \"If there's a plan, I'll forget it. If i don't forget it, I'll ignore it.\\n\"\n )\ncriminal.addFlaw('I have a \"tell\" that reveals when I\\'m lying.\\n')\ncriminal.addFlaw('I turn tail and run when things look bad.\\n')\ncriminal.addFlaw(\n \"\"\"An innocent person is in prison for a crime that I committed. I'm ok with that.\n\"\"\"\n )\nentertainer = Background('Entertainer')\nentertainer.setBackDesc(\n \"\"\"You thrive in front of an audience. You know how to entrance them, entertain them, and even inspire them. Your poetics can stir the hearts of those who hear you, awakening greif or joy, laughter or anger. Your music raises the spirits or captures their sorrow. Your dance steps captivate, your humor cuts to the quick. Whatever techniques you use, your art is your life.\n\"\"\"\n )\nentertainer.addPrTrait('I know a story relevant to almost every situation.\\n')\nentertainer.addPrTrait(\n \"\"\"Whenever I come to a new place, I collect local rumors and spread gossip.\n\"\"\"\n )\nentertainer.addPrTrait(\n 'I’m a hopeless romantic, always searching for that “special someone.”\\n')\nentertainer.addPrTrait(\n \"\"\"Nobody stays angry at me or around me for long, since I can defuse any amount of tension.\n\"\"\"\n )\nentertainer.addPrTrait('I love a good insult, even one directed at me.\\n')\nentertainer.addPrTrait('I get bitter if I’m not the center of attention.\\n')\nentertainer.addPrTrait('I’ll settle for nothing less than perfection.\\n')\nentertainer.addPrTrait(\n 'I change my mood or my mind as quickly as I change key in a song.\\n')\nentertainer.addIdeal('When I perform, I make the world better than it was.\\n')\nentertainer.addIdeal(\n \"\"\"The stories, legends, and songs of the past must never be forgotten, for they teach us who we are.\n\"\"\"\n )\nentertainer.addIdeal('The world is in need of new ideas and bold action.\\n')\nentertainer.addIdeal(\"I'm only in it for the money and fame.\\n\")\nentertainer.addIdeal(\n \"\"\"I like seeing the smiles on people's faces whei I perform. That's all that matters.\n\"\"\"\n )\nentertainer.addIdeal(\n \"\"\"Art should reflect the soul; it should come from within and reveal who we really are.\n\"\"\"\n )\nentertainer.addBond(\n \"\"\"My instrument is my most treasured possession, and it reminds me of someone I love.\n\"\"\"\n )\nentertainer.addBond(\n \"Someone stoll my precious instrument, and someday I'll get it back.\\n\")\nentertainer.addBond('I want to become famous, whatever it takes.\\n')\nentertainer.addBond(\n \"\"\"I idolize a hero of the old tales and measures my deeds against that person's.\n\"\"\"\n )\nentertainer.addBond(\n 'I will do anything to prove myelf superior to my hated rival.\\n')\nentertainer.addBond(\n 'I would do anything for the other members of my old troupe.\\n')\nentertainer.addFlaw(\"I'll do anything to win fame and renown.\\n\")\nentertainer.addFlaw(\"I'm a sucker for a pretty face.\\n\")\nentertainer.addFlaw(\n \"\"\"A scandal prevents me from ever going home again. That kind of trouble seems to follow me around.\n\"\"\"\n )\nentertainer.addFlaw(\n \"\"\"I once satirized a noble who still wants my head. It was a mistake that i will likely repeat.\n\"\"\"\n )\nentertainer.addFlaw(\n \"\"\"I have trouble keeping my feelings hidden. My sharp tongue lands me in trouble.\n\"\"\"\n )\nentertainer.addFlaw('Despite my best efforts, I am unreliable to my friends.\\n'\n )\nfolkHero = Background('Folk Hero')\nfolkHero.setBackDesc(\n \"\"\"You come from a humble social rank, but you are destined for so much more. Already the people of your home village regard you as their champion, and your destiny calls you to stand against the tyrants and monsters that threaten the common folk everywhere.\n\"\"\"\n )\nfolkHero.addPrTrait('I judge people by their actions, not their words.\\n')\nfolkHero.addPrTrait(\n 'If someone is in trouble, I’m always ready to lend help.\\n')\nfolkHero.addPrTrait(\n \"\"\"When I set my mind to something, I follow through no matter what gets in my way.\n\"\"\"\n )\nfolkHero.addPrTrait(\n \"\"\"I have a strong sense of fair play and always try to find the most equitable solution to arguments.\n\"\"\"\n )\nfolkHero.addPrTrait(\n \"\"\"I'm confident in my own abilities and do what I can to instill confidence in others.\n\"\"\"\n )\nfolkHero.addPrTrait('Thinking is for other people. I prefer action.\\n')\nfolkHero.addPrTrait('I misuse long words in an attempt to sound smarter.\\n')\nfolkHero.addPrTrait(\n 'I get bored easily. When am I going to get on with my destiny?\\n')\nfolkHero.addIdeal('Peole deserve to be treated with dignity and respect.\\n')\nfolkHero.addIdeal(\n \"\"\"No one should get preferentail treatment before the law, and no one is above the law.\n\"\"\"\n )\nfolkHero.addIdeal('Tyrants must not be allowed to oppress the people\\n')\nfolkHero.addIdeal(\n 'If I become strong, I can take what I want—What I deserve.\\n')\nfolkHero.addIdeal(\"There's no good in pretending to be something I'm not.\\n\")\nfolkHero.addIdeal(\n 'Nothing and no one can steer me away from my higher calling.\\n')\nfolkHero.addBond(\n \"\"\"I have a family, but I have no idea where they are. One day, I hope to see them again.\n\"\"\"\n )\nfolkHero.addBond(\n 'I worked the land, I love the land, and I will protect the land.\\n')\nfolkHero.addBond(\n \"\"\"A proud noble once gave me a horrible beating, and I will take my revenge on any bully I encounter.\n\"\"\"\n )\nfolkHero.addBond(\n \"\"\"My tools are symbols of my past life, and I carry them so that I will never forget my roots.\n\"\"\"\n )\nfolkHero.addBond('I protect those who cannot protect themselves.\\n')\nfolkHero.addBond(\n 'I wish my childhood sweetheart had come with me to pursue my destiny.\\n')\nfolkHero.addFlaw(\n 'The tyrant who rules my land will stop at nothing to see me killed.\\n')\nfolkHero.addFlaw(\n \"\"\"I'm convinced of the significance of my destiny, and blind to my shortcomings and the risk of failure.\n\"\"\"\n )\nfolkHero.addFlaw(\n \"\"\"The people who knew me when I was young know my shameful secret, so I can never go home again.\n\"\"\"\n )\nfolkHero.addFlaw(\n 'I have a weakness for the vices of the city, especially hard drink.\\n')\nfolkHero.addFlaw(\n \"\"\"Secretly, I believe that things would be better if I were a tyrant lording over the land.\n\"\"\"\n )\nfolkHero.addFlaw('I have trouble trusting my allies.\\n')\nguildArtisan = Background('Guild Artisan')\nguildArtisan.setBackDesc(\n \"\"\"You are a member of an artisan's guild, skilled in a particular field and closely associated with other artisans. You are a well-establishedpart of the mercantile world, freed by talent and wealth from the constraints of a feudal social order. You learned your skills as ans apprentice to a master artisan, under the sponsorship of your guild, untill you became a master in your own right.\n\"\"\"\n )\nguildArtisan.addPrTrait(\n \"\"\"I believe that anything worth doing is worth doing right. I can't help it—I'm a perfectionist.\n\"\"\"\n )\nguildArtisan.addPrTrait(\n \"I'm a snob who looks down on those who can't appreciate fine art.\\n\")\nguildArtisan.addPrTrait(\n 'I always want to know how things work and what makes people tick.\\n')\nguildArtisan.addPrTrait(\n \"I'm full of witty aphorisms and have a proverb for every occasion.\\n\")\nguildArtisan.addPrTrait(\n \"I'm rude to people who lack my commitment to hard work and fair play.\\n\")\nguildArtisan.addPrTrait('I like to talk at length about my profession.\\n')\nguildArtisan.addPrTrait(\n \"\"\"I don't part with my money easily and will haggle tirelessly to get the best deal possible.\n\"\"\"\n )\nguildArtisan.addPrTrait(\n \"\"\"I'm well known for my work, and I want to make sure everyone appreciates it. I'm always taken aback when people haven't heard o f me.\n\"\"\"\n )\nguildArtisan.addIdeal(\n \"\"\"It is the duty of all civilized people to strengthen the bonds of community and the security of civilization.\n\"\"\"\n )\nguildArtisan.addIdeal(\n \"\"\"My talents were given to me so that I could use them to benefit the world.\n\"\"\"\n )\nguildArtisan.addIdeal(\n 'Everyone should be free to pursue his or her own livelihood.\\n')\nguildArtisan.addIdeal(\"I'm only in it for the money.\\n\")\nguildArtisan.addIdeal(\n \"I'm committed to the people I care about, not to ideals.\\n\")\nguildArtisan.addIdeal('I work hard to be teh best there is at my craft.\\n')\nguildArtisan.addBond(\n \"\"\"The workshop where I learned my trade is the most important place in the world to me.\n\"\"\"\n )\nguildArtisan.addBond(\n \"\"\"I created a great work for someone, and then found them unworthy to receive it. I'm still looking for someone worthy.\n\"\"\"\n )\nguildArtisan.addBond(\n 'I owe my guild a great debt for forging me into the person I am today.\\n')\nguildArtisan.addBond(\"I pursue wealth to secure someone's love.\\n\")\nguildArtisan.addBond(\n \"\"\"One day I will return to my guild and prove that I am the greatest artisan of them all.\n\"\"\"\n )\nguildArtisan.addBond(\n \"\"\"I will get revenge on the evil forces that destroyed my place of business and ruined my livelihood.\n\"\"\"\n )\nguildArtisan.addFlaw(\n \"I'll do anything to get my hands on something rare or priceless.\\n\")\nguildArtisan.addFlaw(\n \"I'm quick to assume that someone is trying to cheat me.\\n\")\nguildArtisan.addFlaw(\n 'No one must ever learn that I once stole money from guild coffers.\\n')\nguildArtisan.addFlaw(\n \"I'm never satisfied with what I have—I always want more.\\n\")\nguildArtisan.addFlaw('I would kill to acquire a noble title.\\n')\nguildArtisan.addFlaw(\n \"\"\"I'm horribly jealous of anyone who can outshine my handiwork. Everywhere I go, I'm surrounded by rivals.\n\"\"\"\n )\nhermit = Background('Hermit')\nhermit.setBackDesc(\n \"\"\"You lived in seclusion—either in a sheltered community such as a monastery, or entirely alone—for a formative part of your life. In your time apart from the lcamor of society, you found quiety, solitude, and perhaps some of the answers you were looking for.\n\"\"\"\n )\nhermit.addPrTrait(\n \"\"\"I've been isolated for so long that I rarely speak, preferring gestures and the occasional grunt.\n\"\"\"\n )\nhermit.addPrTrait('I am utterly serene, even in the face of disaster.\\n')\nhermit.addPrTrait(\n \"\"\"The leader of my community had something wise to say on every topic, and I am eager to share that wisdom.\n\"\"\"\n )\nhermit.addPrTrait('I feel tremendous empathy for all who suffer.\\n')\nhermit.addPrTrait(\"I'm oblivious to etiquette and social expectations.\\n\")\nhermit.addPrTrait(\n 'I connect everything that happens to me to a grand, cosmic plan.\\n')\nhermit.addPrTrait(\n \"\"\"I often get lost in my own thoughts and contemplation, becoming oblivious to my surroundings.\n\"\"\"\n )\nhermit.addPrTrait(\n 'I am working on a grand philosophical theory and love sharing my ideas.\\n'\n )\nhermit.addIdeal(\n 'My gifts are meant to be shared with all, not used for my own benefit.\\n')\nhermit.addIdeal(\n \"\"\"Emotions must not cloud our sense of what is right and true, or our logical thinking.\n\"\"\"\n )\nhermit.addIdeal('Inquiry and curiosity are the pillars of progress.\\n')\nhermit.addIdeal(\n 'Solitude and contemplation are paths toward mystical or magical power.\\n')\nhermit.addIdeal('Meddling in the affairs of others only causes trouble.\\n')\nhermit.addIdeal(\"If you know yourself, there's nothing left to know.\\n\")\nhermit.addBond(\n \"\"\"Nothing is more important to me than the other members of my hermitage, order, or association.\n\"\"\"\n )\nhermit.addBond(\n \"\"\"I entered seclusion to hide frome the ones who might still be hunting me. I must someday confront them.\n\"\"\"\n )\nhermit.addBond(\n \"\"\"I'm still seeking the enlightenment I pursued in my seclusion, and it still eludes me.\n\"\"\"\n )\nhermit.addBond(\n 'I entered seclusion because I loved someone I could not have.\\n')\nhermit.addBond(\n 'Should my discovery come to light, it could bring ruin to the world.\\n')\nhermit.addBond(\n 'My isolation gave me great insight into a great evil that only I can destroy.\\n'\n )\nhermit.addFlaw(\n \"Now that I've returned to the world, I enjoy its delights a little too much.\\n\"\n )\nhermit.addFlaw(\n \"\"\"I harbor dark, bloodthirsty thoughts that my isolation and meditation failed to quell.\n\"\"\"\n )\nhermit.addFlaw('I am dogmatic in my thoughts and philosophy.\\n')\nhermit.addFlaw(\n 'I let my need to win arguments overshadow friendships and harmony.\\n')\nhermit.addFlaw(\"I'd risk too much to uncover a lost bit of knowledge.\\n\")\nhermit.addFlaw(\"I like keeping secrets and won't share them with anyone.\\n\")\nnoble = Background('Noble')\nnoble.setBackDesc(\n \"\"\"You understand wealth, power, and privilege. You carry a noble title, and your family owns land, collects taxes, and wields significant political influence. You might be a pampered aristocrat unfamiliar with work or discomfort, a former merchant just elevated to the nobility, or a disinherited scoundrel with a disproportionate sense of entitlement. Or you could be an honest, hard-working landowner who cares deeply about the people who live and work on your land, keenly aware of your responsibility to them.\n\"\"\"\n )\nnoble.addPrTrait(\n \"\"\"My eloquent flattery makes everyone I talk to feel like the most wonderful and important person in the world.\n\"\"\"\n )\nnoble.addPrTrait('The common folk love me for my kindness and generosity.\\n')\nnoble.addPrTrait(\n \"\"\"No one could doubt by looking at my regal bearing that I am a cut above the unwashed masses.\n\"\"\"\n )\nnoble.addPrTrait(\n 'I take great pains to always look my best and follow the latest fashions.\\n'\n )\nnoble.addPrTrait(\n \"\"\"I don't like to get my hands dirty, and I won't be caught dead in unsuitable accommodations.\n\"\"\"\n )\nnoble.addPrTrait(\n \"\"\"Despite my noble birth, I do not place myself above other folk. We all have the same blood.\n\"\"\"\n )\nnoble.addPrTrait('My favor, once lost, is lost forever.\\n')\nnoble.addPrTrait(\n \"\"\"If you do me an injury, I will crush you, ruin your name, and salt your fields.\n\"\"\"\n )\nnoble.addIdeal(\n \"\"\"Respect is due to me because of my position, but all people regardless of station deserve to be treated with dignity.\n\"\"\"\n )\nnoble.addIdeal(\n \"\"\"It is my duty to respect the authority of those aboce me, just as those below me must respect mine.\n\"\"\"\n )\nnoble.addIdeal(\n 'I must prove that I can handle myself without the coddling of my family.\\n'\n )\nnoble.addIdeal('If I can attain more power, no one will tell me what to do.\\n')\nnoble.addIdeal('Blood runs thicker than water.\\n')\nnoble.addIdeal('It is my duty to protect and care for the people beneth me.\\n')\nnoble.addBond('I will face any challenge to win the approval of my family.\\n')\nnoble.addBond(\n \"My house's alliance with another noble family must be sustained at all costs.\\n\"\n )\nnoble.addBond(\n 'Nothing is more important than the other members of my family.\\n')\nnoble.addBond(\n 'I am in love with the heir of a family that my family despises.\\n')\nnoble.addBond('My loyalty to my soverign is unwabering.\\n')\nnoble.addBond('The common folk must see me as a hero of the people.\\n')\nnoble.addFlaw('I secretly believe that everyone is beneath me.\\n')\nnoble.addFlaw(\n 'I hide a truly scandalous secret that could ruin my family forever.\\n')\nnoble.addFlaw(\n \"\"\"I too often hear veiled insults and threats in every word addressed to me, and I'm quick to anger.\n\"\"\"\n )\nnoble.addFlaw('I have an insatiable desire for carnal pleasures.\\n')\nnoble.addFlaw('In fact, the world does revolve around me.\\n')\nnoble.addFlaw('By my words and actions, I often bring shame to my family.\\n')\noutlander = Background('Outlander')\noutlander.setBackDesc(\n \"\"\"You grew up in the wilds, far from civilization and the comforts of town and technology. You've witnessed the migration of herds larger than forests, survived weather more extreme than any city-dweller could comprehend, and enjoyed the solitude of being the only thinking creature for miles in any direction. The wilds are in your blood, wheather you were a nomad, an explorer, a recluse, a hunter-gatherer, or even a marauder. Even in places where you don't know the specific features of the terrain, you know the ways of the wild.\n\"\"\"\n )\noutlander.addPrTrait(\"I'm driven by a wanderlust that led me away from home.\\n\"\n )\noutlander.addPrTrait(\n 'I watch over my friends as if they were a litter of newborn pups.\\n')\noutlander.addPrTrait(\n \"\"\"I once ran twenty-five miles without stopping to warn my clan of an approaching orc horde. I'd do it again if I had to.\n\"\"\"\n )\noutlander.addPrTrait(\n 'I have a lesson for every situation, drawn from observing nature.\\n')\noutlander.addPrTrait(\n \"\"\"I place no stock in wealthy or well-mannered folk. Money and manners won't save you from a hungry owlbear.\n\"\"\"\n )\noutlander.addPrTrait(\n \"\"\"I'm always picking things up, absently fiddling with them, and sometimes accidentally breaking them.\n\"\"\"\n )\noutlander.addPrTrait(\n 'I feel far more comfortable around animals than people.\\n')\noutlander.addPrTrait('I was, in fact, raised by wolves.\\n')\noutlander.addIdeal(\n 'Life is like the seasons, in constant change, and we must change with it.\\n'\n )\noutlander.addIdeal(\n \"\"\"It is each person's responsibility to make the most happiness for the whole tribe.\n\"\"\"\n )\noutlander.addIdeal('If I dishonor myself, then I dishonor my whole clan.\\n')\noutlander.addIdeal('The strongest are meant to rule.\\n')\noutlander.addIdeal(\n \"\"\"The natural world is more important than all the constraints of civilization.\n\"\"\"\n )\noutlander.addIdeal('I must earn glory in battle, for myself and my clan.\\n')\noutlander.addBond(\n \"\"\"My family, clan, or tribe is the most important thing in my life, even when they are far from me.\n\"\"\"\n )\noutlander.addBond(\n 'An injury to the unspoiled wilderness of my home is an injury to me.\\n')\noutlander.addBond(\n 'I will bring terrible wrath down on the evildoers who destroyed my homeland.\\n'\n )\noutlander.addBond(\n \"\"\"I am the last of my tribe, and it is up to me to ensure their names enter legend.\n\"\"\"\n )\noutlander.addBond(\n \"\"\"I suffer awful visions of a coming disaster and will do anything to prevent it.\n\"\"\"\n )\noutlander.addBond('It is my duty to provide children to sustain my tribe.\\n')\noutlander.addFlaw('I am too enamored of ale, wine, and other intoxicants.\\n')\noutlander.addFlaw(\n \"There's no room for caution in a life lived to the fullest.\\n\")\noutlander.addFlaw(\n \"\"\"I remeber every insult I've received and nurse a silent resentment toward anyone who's ever wronged me.\n\"\"\"\n )\noutlander.addFlaw(\n 'I am slow to trust members of other races, tribes, and societies.\\n')\noutlander.addFlaw('Violence is my answer to almost any challange.\\n')\noutlander.addFlaw(\n \"\"\"Don't expect me to save those who can't save themselves. It is nature's way that the strong thrive and the weak perish.\n\"\"\"\n )\nsage = Background('Sage')\nsage.setBackDesc(\n \"\"\"You spent years learning the lore of the multiverse. You scoured manuscripts, studie scrolls, and listened to the greatest experts on the subjects that interest you. Your efforts have made you a master in your fields of study.\n\"\"\"\n )\nsage.addPrTrait(\n 'I use polysyllabic words that convey the empression of great erudition.\\n'\n )\nsage.addPrTrait(\n \"\"\"I've read every book in the world's greatest libraries—or I like to boast that I have.\n\"\"\"\n )\nsage.addPrTrait(\n \"\"\"I'm used to helping out those who aren't as smart as I am, and I patiently explain anything and everything to others.\n\"\"\"\n )\nsage.addPrTrait(\"There's nothing I like more than a good mystery.\\n\")\nsage.addPrTrait(\n \"\"\"I'm willing to listen to every side of an argument before I make my own judgment.\n\"\"\"\n )\nsage.addPrTrait(\n \"\"\"I . . . speak . . . slowly . . . when talking . . . to idiots, . . . which . . . almost . . . everyone . . . is . . . compared . . . to me.\n\"\"\"\n )\nsage.addPrTrait('I am horribly, horribly awkward in social situations.\\n')\nsage.addPrTrait(\n \"I'm convinced that people are always trying to steal my secrets.\\n\")\nsage.addIdeal('The path to power and self-improvement is through knowledge.\\n')\nsage.addIdeal(\n 'What is beautiful points us beyond itself toward what is true.\\n')\nsage.addIdeal('Emotions must not cloud our logical thinking.\\n')\nsage.addIdeal(\n 'Hothing should fetter the infinite possibility inherent in all existance.\\n'\n )\nsage.addIdeal('Knowledge is the path to power and domination.\\n')\nsage.addIdeal('The goal of a life of study is the betterment of oneself.\\n')\nsage.addBond('It is my duty to protect my students')\nsage.addBond(\n \"\"\"I have an ancient text that holds terrible secrets that must not fall into the wrong hands.\n\"\"\"\n )\nsage.addBond(\n 'I work to preserve a library, university, scriptorium, or monastery.\\n')\nsage.addBond(\n \"My life's work is a series of tomes related to a specific field of lore.\\n\"\n )\nsage.addBond(\n \"I've been searching my whole life for the answer to a certain question.\\n\"\n )\nsage.addBond(\n 'I sold my soul for knowledge. I hope to do great deeds and win it back.\\n'\n )\nsage.addFlaw('I am easily distracted by the promise of information.\\n')\nsage.addFlaw(\n \"\"\"Most people scream and run when they see a demon, I stop and take notes on its anatomy.\n\"\"\"\n )\nsage.addFlaw(\n 'Unlocking an ancient mystery is worth the price of a civilization.\\n')\nsage.addFlaw('I overlook obvious solutions in favor of complicated ones.\\n')\nsage.addFlaw(\n 'I speak without really thinking through my words, invariably insulting others.\\n'\n )\nsage.addFlaw(\"I can't keep a secret to save my life, or anyone else's.\\n\")\nsailor = Background('Sailor')\nsailor.setBackDesc(\n \"\"\"You sailed on a seagoing vessel for years. In that time, you faced down mighty storms, monsters of the deep, and those who wanted to sink your craft to the bottomless depths. Your first love is the distant line of the horizon, but the time has come to try your hand at something new.\n\"\"\"\n )\nsailor.addPrTrait('My friends know they can rely on me, no matter what.\\n')\nsailor.addPrTrait(\n 'I work hard so that I can play hard when the work is done.\\n')\nsailor.addPrTrait(\n 'I enjoy sailing into new ports and making new friends over a flagon of ale.\\n'\n )\nsailor.addPrTrait('I stretch the truth for the sake of a good story.\\n')\nsailor.addPrTrait(\n 'To me, a tavern brawl is a nice way to get to know a new city.\\n')\nsailor.addPrTrait('I never pass up a friendly wager.\\n')\nsailor.addPrTrait('My language is as foul as an otyuggh nest.\\n')\nsailor.addPrTrait(\n 'I like a job well done, especially if I can convince someone else to do it.\\n'\n )\nsailor.addIdeal(\n \"\"\"The thing that keeps a ship together is mutual respect between captain and crew.\n\"\"\"\n )\nsailor.addIdeal('We all do the work, so we all share in the rewards.\\n')\nsailor.addIdeal(\n 'The sea is freedom—the freedom to go anywhere and do anything.\\n')\nsailor.addIdeal(\"I'm a predator, and the other ships on the sea are my prey.\\n\"\n )\nsailor.addIdeal(\"I'm committed to my crewmates, not to ideals.\\n\")\nsailor.addIdeal(\"Someday I'll own my own ship and chart my own destiny.\\n\")\nsailor.addBond(\"I'm loyal to my captain first, everything else second.\\n\")\nsailor.addBond(\n 'The ship is most important—crewmates and captains come and go.\\n')\nsailor.addBond(\"I'll always remember my first ship.\\n\")\nsailor.addBond(\n 'In a harbor town, I have a paramour whose eyes nearly stole me from the sea.\\n'\n )\nsailor.addBond(\n 'I was cheated out of my fair share of the profits, and I want to get my due.\\n'\n )\nsailor.addBond(\n \"\"\"Ruthless pirates murdered my captain and crewmates, plundered our ship, and left me to die. Vengeance will be mine.\n\"\"\"\n )\nsailor.addFlaw(\"I follow orders, even if I think they're wrong.\\n\")\nsailor.addFlaw(\"I'll say anything to avoid having to do extra work.\\n\")\nsailor.addFlaw(\n \"\"\"Once someone questions my courage, I never back down no matter how dangerous the situation.\n\"\"\"\n )\nsailor.addFlaw(\"Once I start drinking, it's hard for me to stop.\\n\")\nsailor.addFlaw(\n \"I can't help but pocket loose coins and other trinkets I come across.\\n\")\nsailor.addFlaw('My pride will probably lead to my destruction.\\n')\nsoldier = Background('Soldier')\nsoldier.setBackDesc(\n \"\"\"War has been your life for as long as you care to remember. You trained as a youth, studied the use of weapons and armor, learned basic survival techniques, including how to stay alive on the battlefield. You might have been part of a standing national army or a mercenary company, or perhaps a memver of a local militia who rose to prominence during a recent war.\n\"\"\"\n )\nurchin = Background('Urchin')\nurchin.setBackDesc(\n \"\"\"You grew up on the streets alone, orphaned, and poor. You had no one to watch over you or to provide for you, so you learned to provide for yourself. You fought fiercely over food and kept a constant watch out for other desperate souls who might steal from you. You slept on rooftops and in alleyways, exposed to the elements, and endured sickness without the advantage of medicine or a place to recuperate. You've survived despite all odds, and did so through cunning, strength, speed, or some combination of each.\n\"\"\"\n )\nbackgroundList = [acolyte, charlatan, criminal, entertainer, folkHero,\n guildArtisan, hermit, noble, outlander, sage, sailor, soldier, urchin]\n",
"step-5": "class Background(object):\n def __init__(self, name):\n self.name = name\n self.description = ''\n self.prTraits = []\n self.ideals = []\n self.bonds = []\n self.flaws = []\n\n def getBackName(self):\n return self.name\n\n def setBackDesc(self,desc):\n self.description = desc\n def getBackDesc(self):\n return self.description\n\n def addPrTrait(self, trait):\n self.prTraits.append(trait)\n def getPrTraits(self):\n return self.prTraits\n\n def addIdeal(self, ideal):\n self.ideals.append(ideal)\n def getIdeals(self):\n return self.ideals\n\n def addBond(self, bond):\n self.bonds.append(bond)\n def getBonds(self):\n return self.bonds\n\n def addFlaw(self, flaw):\n self.flaws.append(flaw)\n def getFlaws(self):\n return self.flaws\n\nacolyte = Background('Acolyte')\nacolyte.setBackDesc('You have spent you life in the service of a temple or to a specific god or pantheon of gods. You act as an intermediary between the realm of the holy and the mortal world, performing sacred rites and offering sacrifices in order to conduct worshipers into the presence of the divine.\\n')\nacolyte.addPrTrait('I idolize a particular hero of my faith and constantly refer to that person\\'s deeds and example.\\n')\nacolyte.addPrTrait('I can find common ground between the fiercest enemies, empathizing with them and always working toward peace.\\n')\nacolyte.addPrTrait('I see omens in every event and action. The gods try to speak to us, we just need to listen.')\nacolyte.addPrTrait('Nothing can shake my optimistic attitude')\nacolyte.addPrTrait('I quote (or misquote) sacred texts and proverbs in almost every situation.\\n')\nacolyte.addPrTrait('I am tolerant (or intolerant) of other faits and respect (or condemn) the worsip of other gods.\\n')\nacolyte.addPrTrait('I\\'ve enjoyed fine food, drink, and high society among my temple\\'s elite. Rough living grates on me.\\n')\nacolyte.addPrTrait('I\\'ve spent so long in the temple that I have little practical experience dealing with people in the outside world.\\n')\nacolyte.addIdeal('The ancient traditions of worship and sacrifice must be preserved and upheld.\\n')\nacolyte.addIdeal('I alwyas try to help those in need, no matter what the personal cost.\\n')\nacolyte.addIdeal('We must help bring about the changes the gods are constantly working in the world.\\n')\nacolyte.addIdeal('I hope to one day rise to the top of my faith\\'s religious hierarchy.\\n')\nacolyte.addIdeal('I trust that my deity will guide my actions. I have faith that if I work hard, things will go well.\\n')\nacolyte.addIdeal('I seek to prove myself worthy of by god\\'s favor by matching my actions against his or her teachings.\\n')\nacolyte.addBond('I would die to recover an ancient relic of my faith that was lost long ago.\\n')\nacolyte.addBond('I will someday get revenge on the corrupt temple hierarchy who branded me a heretic.\\n')\nacolyte.addBond('I owe my life to the priest who took me in whem my parents died.\\n')\nacolyte.addBond('Everything i do is for the common people.')\nacolyte.addBond('I will do anything to protect the temple where I served.')\nacolyte.addBond('I seek to preserve a sacred text that my enemies consider heretical and seek to destroy.\\n')\nacolyte.addFlaw('I judge others harshly, and myself even more severely.\\n')\nacolyte.addFlaw('I put too much trust in those who wield power within my temple\\'s hierarchy.\\n')\nacolyte.addFlaw('My piety sometimes leads me to blindly trust those that mrofess faith in my god.\\n')\nacolyte.addFlaw('I am inflexible in my thinking.\\n')\nacolyte.addFlaw('I am suspicious of strangers and expect the worst of them.\\n')\nacolyte.addFlaw('Once I pick a goal, I become obsessed with it to the detriment of everything else in my life.\\n')\n\ncharlatan = Background('Charlatan')\ncharlatan.setBackDesc('You have always had a way with people. You know what makes them tick, you can tease out their hearts\\' desires after a few minutes of conversation, and with a few leading questions you can read them like they were children\\'s books. It\\'s a useful talent, and one that you\\'re perfectly willing to use for your advantage.\\n')\ncharlatan.addPrTrait('I fall in and out of love easily, and am always pursuing someone.\\n')\ncharlatan.addPrTrait('I have a joke for every occasion, especially occasions where humor is inappropriate.\\n')\ncharlatan.addPrTrait('Flattery is my preferred trick for getting what I want.\\n')\ncharlatan.addPrTrait('I\\'m a born gambler who can\\'t resist taking a risk for a potential payoff.\\n')\ncharlatan.addPrTrait('I lie about almost everything, even when there\\'s no good reason to.\\n')\ncharlatan.addPrTrait('Sarcasm and insults are my weapons of choice.\\n')\ncharlatan.addPrTrait('I keep multiple holy symbols on me and invoke whatever deity might come in useful at any given moment.\\n')\ncharlatan.addPrTrait('I pocket anything i see tha tmight have some value.\\n')\ncharlatan.addIdeal('I am a free spirit—no one tells me what to do.\\n')\ncharlatan.addIdeal('I never target people who can\\'t afford to lose a few coins.\\n')\ncharlatan.addIdeal('I distribute the money i acquire to the people who really need it.\\n')\ncharlatan.addIdeal('I never run the same con twice.\\n')\ncharlatan.addIdeal('Material goods come and go. Bonds of friendship last forever.\\n')\ncharlatan.addIdeal('I\\'m determined to make something of myself.\\n')\ncharlatan.addBond('I fleeced the wrong person and must work to ensure that this individual never crosses paths with me or those i care about.\\n')\ncharlatan.addBond('I owe everything to my mentor—a horrible person who\\'s probably rotting in jail somewhere.\\n')\ncharlatan.addBond('Somewhere out there, I have a child who doesn\\'t know me. I\\'m making the world better for him or her.\\n')\ncharlatan.addBond('I came from a noble family, and one day I\\'ll reclaim my lands and title from those who stole them from me.\\n')\ncharlatan.addBond('A powerful person killed someone I love. Some day soon, I\\'ll have my revenge...\\n')\ncharlatan.addBond('I swindled and ruined a person who didn\\'t deserve it. I seek to atone for my misdeeds but might never be able to forgive myself.\\n')\ncharlatan.addFlaw('I can\\'t resist a pretty face.\\n')\ncharlatan.addFlaw('I\\'m always in debt. I spend my ill-gotten gains on decadent luxuries faster than I bring them in.\\n')\ncharlatan.addFlaw('I\\'m convinced that no one could ever fool me the way I fool others.\\n')\ncharlatan.addFlaw('I\\'m too greedy for my own good. I can\\'t resist taking a risk if there\\'s money involved.\\n')\ncharlatan.addFlaw('I can\\'t resist swindling people who are more powerful than me.\\n')\ncharlatan.addFlaw('I hate to admit it and will hate myself for it, but I\\'ll run and preserve my own hide if the going gets tough.\\n')\n\ncriminal = Background('Criminal')\ncriminal.setBackDesc('You are an experienced criminal with a history of breaking the law. You have spent a lot of time among other criminals and still have contacts with the criminal underworld. You\\'re far closer than most people to the world of murder, theft, and violence that pervades the underbelly of civilization, and you have survived up to this point by flounting the rules and regulations of society.\\n')\ncriminal.addPrTrait('I always have a plan for what to do when things go wrong.\\n')\ncriminal.addPrTrait('I am always calm, no matter what the situation. I never raise my voice or let my emotions control me.\\n')\ncriminal.addPrTrait('The first thign i do in a new place is note the locations of everything valuable—or where cuch things coulg be hidden.\\n')\ncriminal.addPrTrait('I would rather make a new friend than a new enemy.\\n')\ncriminal.addPrTrait('I am incredibly slow to trust. Those who seep the fairest often have the most to hide.\\n')\ncriminal.addPrTrait('I don\\'t pay attention to the risks in a situation. Never tell me the odds.\\n')\ncriminal.addPrTrait('The best way to get me to do something is to tell me I can\\'t do it.\\n')\ncriminal.addPrTrait('I blow up at the slightest insult.\\n')\ncriminal.addIdeal('I don\\'t steal from others in the trade.\\n')\ncriminal.addIdeal('Chains are meant to be broken, as those who would forge them.\\n')\ncriminal.addIdeal('I steal from the wealthy so that i can help people in need.\\n')\ncriminal.addIdeal('I will do whatever it takes to become wealthy.\\n')\ncriminal.addIdeal('I\\'m loyal to my friends, not to any ideals, and everyone else can take a trip down the Styx for all I care.\\n')\ncriminal.addIdeal('There\\'s a spark of good in everyone.\\n')\ncriminal.addBond('I\\'m trying to pay off an old debt I owe to a generous benefactor.\\n')\ncriminal.addBond('My Ill-gotten gains go to support my family.\\n')\ncriminal.addBond('Something important was taken from me, and I aim to steal it back.\\n')\ncriminal.addBond('I will become the greatest thief that had ever lived.\\n')\ncriminal.addBond('I\\'m guilty of a terrible crime. I hope i can redeem myself for it.\\n')\ncriminal.addBond('Someone I loved died becoues of a mistake I made. That will never happen again.\\n')\ncriminal.addFlaw('When I see something valuable, I can\\'t think about anything but how to steal it.\\n')\ncriminal.addFlaw('When faced with a choice between money and my friends, I usually choose the money.\\n')\ncriminal.addFlaw('If there\\'s a plan, I\\'ll forget it. If i don\\'t forget it, I\\'ll ignore it.\\n')\ncriminal.addFlaw('I have a \"tell\" that reveals when I\\'m lying.\\n')\ncriminal.addFlaw('I turn tail and run when things look bad.\\n')\ncriminal.addFlaw('An innocent person is in prison for a crime that I committed. I\\'m ok with that.\\n')\n\nentertainer = Background('Entertainer')\nentertainer.setBackDesc('You thrive in front of an audience. You know how to entrance them, entertain them, and even inspire them. Your poetics can stir the hearts of those who hear you, awakening greif or joy, laughter or anger. Your music raises the spirits or captures their sorrow. Your dance steps captivate, your humor cuts to the quick. Whatever techniques you use, your art is your life.\\n')\nentertainer.addPrTrait('I know a story relevant to almost every situation.\\n')\nentertainer.addPrTrait('Whenever I come to a new place, I collect local rumors and spread gossip.\\n')\nentertainer.addPrTrait('I’m a hopeless romantic, always searching for that “special someone.”\\n')\nentertainer.addPrTrait('Nobody stays angry at me or around me for long, since I can defuse any amount of tension.\\n')\nentertainer.addPrTrait('I love a good insult, even one directed at me.\\n')\nentertainer.addPrTrait('I get bitter if I’m not the center of attention.\\n')\nentertainer.addPrTrait('I’ll settle for nothing less than perfection.\\n')\nentertainer.addPrTrait('I change my mood or my mind as quickly as I change key in a song.\\n')\nentertainer.addIdeal('When I perform, I make the world better than it was.\\n')\nentertainer.addIdeal('The stories, legends, and songs of the past must never be forgotten, for they teach us who we are.\\n')\nentertainer.addIdeal('The world is in need of new ideas and bold action.\\n')\nentertainer.addIdeal('I\\'m only in it for the money and fame.\\n')\nentertainer.addIdeal('I like seeing the smiles on people\\'s faces whei I perform. That\\'s all that matters.\\n')\nentertainer.addIdeal('Art should reflect the soul; it should come from within and reveal who we really are.\\n')\nentertainer.addBond('My instrument is my most treasured possession, and it reminds me of someone I love.\\n')\nentertainer.addBond('Someone stoll my precious instrument, and someday I\\'ll get it back.\\n')\nentertainer.addBond('I want to become famous, whatever it takes.\\n')\nentertainer.addBond('I idolize a hero of the old tales and measures my deeds against that person\\'s.\\n')\nentertainer.addBond('I will do anything to prove myelf superior to my hated rival.\\n')\nentertainer.addBond('I would do anything for the other members of my old troupe.\\n')\nentertainer.addFlaw('I\\'ll do anything to win fame and renown.\\n')\nentertainer.addFlaw('I\\'m a sucker for a pretty face.\\n')\nentertainer.addFlaw('A scandal prevents me from ever going home again. That kind of trouble seems to follow me around.\\n')\nentertainer.addFlaw('I once satirized a noble who still wants my head. It was a mistake that i will likely repeat.\\n')\nentertainer.addFlaw('I have trouble keeping my feelings hidden. My sharp tongue lands me in trouble.\\n')\nentertainer.addFlaw('Despite my best efforts, I am unreliable to my friends.\\n')\n\nfolkHero = Background('Folk Hero')\nfolkHero.setBackDesc('You come from a humble social rank, but you are destined for so much more. Already the people of your home village regard you as their champion, and your destiny calls you to stand against the tyrants and monsters that threaten the common folk everywhere.\\n')\nfolkHero.addPrTrait('I judge people by their actions, not their words.\\n')\nfolkHero.addPrTrait('If someone is in trouble, I’m always ready to lend help.\\n')\nfolkHero.addPrTrait('When I set my mind to something, I follow through no matter what gets in my way.\\n')\nfolkHero.addPrTrait('I have a strong sense of fair play and always try to find the most equitable solution to arguments.\\n')\nfolkHero.addPrTrait('I\\'m confident in my own abilities and do what I can to instill confidence in others.\\n')\nfolkHero.addPrTrait('Thinking is for other people. I prefer action.\\n')\nfolkHero.addPrTrait('I misuse long words in an attempt to sound smarter.\\n')\nfolkHero.addPrTrait('I get bored easily. When am I going to get on with my destiny?\\n')\nfolkHero.addIdeal('Peole deserve to be treated with dignity and respect.\\n')\nfolkHero.addIdeal('No one should get preferentail treatment before the law, and no one is above the law.\\n')\nfolkHero.addIdeal('Tyrants must not be allowed to oppress the people\\n')\nfolkHero.addIdeal('If I become strong, I can take what I want—What I deserve.\\n')\nfolkHero.addIdeal('There\\'s no good in pretending to be something I\\'m not.\\n')\nfolkHero.addIdeal('Nothing and no one can steer me away from my higher calling.\\n')\nfolkHero.addBond('I have a family, but I have no idea where they are. One day, I hope to see them again.\\n')\nfolkHero.addBond('I worked the land, I love the land, and I will protect the land.\\n')\nfolkHero.addBond('A proud noble once gave me a horrible beating, and I will take my revenge on any bully I encounter.\\n')\nfolkHero.addBond('My tools are symbols of my past life, and I carry them so that I will never forget my roots.\\n')\nfolkHero.addBond('I protect those who cannot protect themselves.\\n')\nfolkHero.addBond('I wish my childhood sweetheart had come with me to pursue my destiny.\\n')\nfolkHero.addFlaw('The tyrant who rules my land will stop at nothing to see me killed.\\n')\nfolkHero.addFlaw('I\\'m convinced of the significance of my destiny, and blind to my shortcomings and the risk of failure.\\n')\nfolkHero.addFlaw('The people who knew me when I was young know my shameful secret, so I can never go home again.\\n')\nfolkHero.addFlaw('I have a weakness for the vices of the city, especially hard drink.\\n')\nfolkHero.addFlaw('Secretly, I believe that things would be better if I were a tyrant lording over the land.\\n')\nfolkHero.addFlaw('I have trouble trusting my allies.\\n')\n\nguildArtisan = Background('Guild Artisan')\nguildArtisan.setBackDesc('You are a member of an artisan\\'s guild, skilled in a particular field and closely associated with other artisans. You are a well-establishedpart of the mercantile world, freed by talent and wealth from the constraints of a feudal social order. You learned your skills as ans apprentice to a master artisan, under the sponsorship of your guild, untill you became a master in your own right.\\n')\nguildArtisan.addPrTrait('I believe that anything worth doing is worth doing right. I can\\'t help it—I\\'m a perfectionist.\\n')\nguildArtisan.addPrTrait('I\\'m a snob who looks down on those who can\\'t appreciate fine art.\\n')\nguildArtisan.addPrTrait('I always want to know how things work and what makes people tick.\\n')\nguildArtisan.addPrTrait('I\\'m full of witty aphorisms and have a proverb for every occasion.\\n')\nguildArtisan.addPrTrait('I\\'m rude to people who lack my commitment to hard work and fair play.\\n')\nguildArtisan.addPrTrait('I like to talk at length about my profession.\\n')\nguildArtisan.addPrTrait('I don\\'t part with my money easily and will haggle tirelessly to get the best deal possible.\\n')\nguildArtisan.addPrTrait('I\\'m well known for my work, and I want to make sure everyone appreciates it. I\\'m always taken aback when people haven\\'t heard o f me.\\n')\nguildArtisan.addIdeal('It is the duty of all civilized people to strengthen the bonds of community and the security of civilization.\\n')\nguildArtisan.addIdeal('My talents were given to me so that I could use them to benefit the world.\\n')\nguildArtisan.addIdeal('Everyone should be free to pursue his or her own livelihood.\\n')\nguildArtisan.addIdeal('I\\'m only in it for the money.\\n')\nguildArtisan.addIdeal('I\\'m committed to the people I care about, not to ideals.\\n')\nguildArtisan.addIdeal('I work hard to be teh best there is at my craft.\\n')\nguildArtisan.addBond('The workshop where I learned my trade is the most important place in the world to me.\\n')\nguildArtisan.addBond('I created a great work for someone, and then found them unworthy to receive it. I\\'m still looking for someone worthy.\\n')\nguildArtisan.addBond('I owe my guild a great debt for forging me into the person I am today.\\n')\nguildArtisan.addBond('I pursue wealth to secure someone\\'s love.\\n')\nguildArtisan.addBond('One day I will return to my guild and prove that I am the greatest artisan of them all.\\n')\nguildArtisan.addBond('I will get revenge on the evil forces that destroyed my place of business and ruined my livelihood.\\n')\nguildArtisan.addFlaw('I\\'ll do anything to get my hands on something rare or priceless.\\n')\nguildArtisan.addFlaw('I\\'m quick to assume that someone is trying to cheat me.\\n')\nguildArtisan.addFlaw('No one must ever learn that I once stole money from guild coffers.\\n')\nguildArtisan.addFlaw('I\\'m never satisfied with what I have—I always want more.\\n')\nguildArtisan.addFlaw('I would kill to acquire a noble title.\\n')\nguildArtisan.addFlaw('I\\'m horribly jealous of anyone who can outshine my handiwork. Everywhere I go, I\\'m surrounded by rivals.\\n')\n\nhermit = Background('Hermit')\nhermit.setBackDesc('You lived in seclusion—either in a sheltered community such as a monastery, or entirely alone—for a formative part of your life. In your time apart from the lcamor of society, you found quiety, solitude, and perhaps some of the answers you were looking for.\\n')\nhermit.addPrTrait('I\\'ve been isolated for so long that I rarely speak, preferring gestures and the occasional grunt.\\n')\nhermit.addPrTrait('I am utterly serene, even in the face of disaster.\\n')\nhermit.addPrTrait('The leader of my community had something wise to say on every topic, and I am eager to share that wisdom.\\n')\nhermit.addPrTrait('I feel tremendous empathy for all who suffer.\\n')\nhermit.addPrTrait('I\\'m oblivious to etiquette and social expectations.\\n')\nhermit.addPrTrait('I connect everything that happens to me to a grand, cosmic plan.\\n')\nhermit.addPrTrait('I often get lost in my own thoughts and contemplation, becoming oblivious to my surroundings.\\n')\nhermit.addPrTrait('I am working on a grand philosophical theory and love sharing my ideas.\\n')\nhermit.addIdeal('My gifts are meant to be shared with all, not used for my own benefit.\\n')\nhermit.addIdeal('Emotions must not cloud our sense of what is right and true, or our logical thinking.\\n')\nhermit.addIdeal('Inquiry and curiosity are the pillars of progress.\\n')\nhermit.addIdeal('Solitude and contemplation are paths toward mystical or magical power.\\n')\nhermit.addIdeal('Meddling in the affairs of others only causes trouble.\\n')\nhermit.addIdeal('If you know yourself, there\\'s nothing left to know.\\n')\nhermit.addBond('Nothing is more important to me than the other members of my hermitage, order, or association.\\n')\nhermit.addBond('I entered seclusion to hide frome the ones who might still be hunting me. I must someday confront them.\\n')\nhermit.addBond('I\\'m still seeking the enlightenment I pursued in my seclusion, and it still eludes me.\\n')\nhermit.addBond('I entered seclusion because I loved someone I could not have.\\n')\nhermit.addBond('Should my discovery come to light, it could bring ruin to the world.\\n')\nhermit.addBond('My isolation gave me great insight into a great evil that only I can destroy.\\n')\nhermit.addFlaw('Now that I\\'ve returned to the world, I enjoy its delights a little too much.\\n')\nhermit.addFlaw('I harbor dark, bloodthirsty thoughts that my isolation and meditation failed to quell.\\n')\nhermit.addFlaw('I am dogmatic in my thoughts and philosophy.\\n')\nhermit.addFlaw('I let my need to win arguments overshadow friendships and harmony.\\n')\nhermit.addFlaw('I\\'d risk too much to uncover a lost bit of knowledge.\\n')\nhermit.addFlaw('I like keeping secrets and won\\'t share them with anyone.\\n')\n\nnoble = Background('Noble')\nnoble.setBackDesc('You understand wealth, power, and privilege. You carry a noble title, and your family owns land, collects taxes, and wields significant political influence. You might be a pampered aristocrat unfamiliar with work or discomfort, a former merchant just elevated to the nobility, or a disinherited scoundrel with a disproportionate sense of entitlement. Or you could be an honest, hard-working landowner who cares deeply about the people who live and work on your land, keenly aware of your responsibility to them.\\n')\nnoble.addPrTrait('My eloquent flattery makes everyone I talk to feel like the most wonderful and important person in the world.\\n')\nnoble.addPrTrait('The common folk love me for my kindness and generosity.\\n')\nnoble.addPrTrait('No one could doubt by looking at my regal bearing that I am a cut above the unwashed masses.\\n')\nnoble.addPrTrait('I take great pains to always look my best and follow the latest fashions.\\n')\nnoble.addPrTrait('I don\\'t like to get my hands dirty, and I won\\'t be caught dead in unsuitable accommodations.\\n')\nnoble.addPrTrait('Despite my noble birth, I do not place myself above other folk. We all have the same blood.\\n')\nnoble.addPrTrait('My favor, once lost, is lost forever.\\n')\nnoble.addPrTrait('If you do me an injury, I will crush you, ruin your name, and salt your fields.\\n')\nnoble.addIdeal('Respect is due to me because of my position, but all people regardless of station deserve to be treated with dignity.\\n')\nnoble.addIdeal('It is my duty to respect the authority of those aboce me, just as those below me must respect mine.\\n')\nnoble.addIdeal('I must prove that I can handle myself without the coddling of my family.\\n')\nnoble.addIdeal('If I can attain more power, no one will tell me what to do.\\n')\nnoble.addIdeal('Blood runs thicker than water.\\n')\nnoble.addIdeal('It is my duty to protect and care for the people beneth me.\\n')\nnoble.addBond('I will face any challenge to win the approval of my family.\\n')\nnoble.addBond('My house\\'s alliance with another noble family must be sustained at all costs.\\n')\nnoble.addBond('Nothing is more important than the other members of my family.\\n')\nnoble.addBond('I am in love with the heir of a family that my family despises.\\n')\nnoble.addBond('My loyalty to my soverign is unwabering.\\n')\nnoble.addBond('The common folk must see me as a hero of the people.\\n')\nnoble.addFlaw('I secretly believe that everyone is beneath me.\\n')\nnoble.addFlaw('I hide a truly scandalous secret that could ruin my family forever.\\n')\nnoble.addFlaw('I too often hear veiled insults and threats in every word addressed to me, and I\\'m quick to anger.\\n')\nnoble.addFlaw('I have an insatiable desire for carnal pleasures.\\n')\nnoble.addFlaw('In fact, the world does revolve around me.\\n')\nnoble.addFlaw('By my words and actions, I often bring shame to my family.\\n')\n\noutlander = Background('Outlander')\noutlander.setBackDesc('You grew up in the wilds, far from civilization and the comforts of town and technology. You\\'ve witnessed the migration of herds larger than forests, survived weather more extreme than any city-dweller could comprehend, and enjoyed the solitude of being the only thinking creature for miles in any direction. The wilds are in your blood, wheather you were a nomad, an explorer, a recluse, a hunter-gatherer, or even a marauder. Even in places where you don\\'t know the specific features of the terrain, you know the ways of the wild.\\n')\noutlander.addPrTrait('I\\'m driven by a wanderlust that led me away from home.\\n')\noutlander.addPrTrait('I watch over my friends as if they were a litter of newborn pups.\\n')\noutlander.addPrTrait('I once ran twenty-five miles without stopping to warn my clan of an approaching orc horde. I\\'d do it again if I had to.\\n')\noutlander.addPrTrait('I have a lesson for every situation, drawn from observing nature.\\n')\noutlander.addPrTrait('I place no stock in wealthy or well-mannered folk. Money and manners won\\'t save you from a hungry owlbear.\\n')\noutlander.addPrTrait('I\\'m always picking things up, absently fiddling with them, and sometimes accidentally breaking them.\\n')\noutlander.addPrTrait('I feel far more comfortable around animals than people.\\n')\noutlander.addPrTrait('I was, in fact, raised by wolves.\\n')\noutlander.addIdeal('Life is like the seasons, in constant change, and we must change with it.\\n')\noutlander.addIdeal('It is each person\\'s responsibility to make the most happiness for the whole tribe.\\n')\noutlander.addIdeal('If I dishonor myself, then I dishonor my whole clan.\\n')\noutlander.addIdeal('The strongest are meant to rule.\\n')\noutlander.addIdeal('The natural world is more important than all the constraints of civilization.\\n')\noutlander.addIdeal('I must earn glory in battle, for myself and my clan.\\n')\noutlander.addBond('My family, clan, or tribe is the most important thing in my life, even when they are far from me.\\n')\noutlander.addBond('An injury to the unspoiled wilderness of my home is an injury to me.\\n')\noutlander.addBond('I will bring terrible wrath down on the evildoers who destroyed my homeland.\\n')\noutlander.addBond('I am the last of my tribe, and it is up to me to ensure their names enter legend.\\n')\noutlander.addBond('I suffer awful visions of a coming disaster and will do anything to prevent it.\\n')\noutlander.addBond('It is my duty to provide children to sustain my tribe.\\n')\noutlander.addFlaw('I am too enamored of ale, wine, and other intoxicants.\\n')\noutlander.addFlaw('There\\'s no room for caution in a life lived to the fullest.\\n')\noutlander.addFlaw('I remeber every insult I\\'ve received and nurse a silent resentment toward anyone who\\'s ever wronged me.\\n')\noutlander.addFlaw('I am slow to trust members of other races, tribes, and societies.\\n')\noutlander.addFlaw('Violence is my answer to almost any challange.\\n')\noutlander.addFlaw('Don\\'t expect me to save those who can\\'t save themselves. It is nature\\'s way that the strong thrive and the weak perish.\\n')\n\nsage = Background('Sage')\nsage.setBackDesc('You spent years learning the lore of the multiverse. You scoured manuscripts, studie scrolls, and listened to the greatest experts on the subjects that interest you. Your efforts have made you a master in your fields of study.\\n')\nsage.addPrTrait('I use polysyllabic words that convey the empression of great erudition.\\n')\nsage.addPrTrait('I\\'ve read every book in the world\\'s greatest libraries—or I like to boast that I have.\\n')\nsage.addPrTrait('I\\'m used to helping out those who aren\\'t as smart as I am, and I patiently explain anything and everything to others.\\n')\nsage.addPrTrait('There\\'s nothing I like more than a good mystery.\\n')\nsage.addPrTrait('I\\'m willing to listen to every side of an argument before I make my own judgment.\\n')\nsage.addPrTrait('I . . . speak . . . slowly . . . when talking . . . to idiots, . . . which . . . almost . . . everyone . . . is . . . compared . . . to me.\\n')\nsage.addPrTrait('I am horribly, horribly awkward in social situations.\\n')\nsage.addPrTrait('I\\'m convinced that people are always trying to steal my secrets.\\n')\nsage.addIdeal('The path to power and self-improvement is through knowledge.\\n')\nsage.addIdeal('What is beautiful points us beyond itself toward what is true.\\n')\nsage.addIdeal('Emotions must not cloud our logical thinking.\\n')\nsage.addIdeal('Hothing should fetter the infinite possibility inherent in all existance.\\n')\nsage.addIdeal('Knowledge is the path to power and domination.\\n')\nsage.addIdeal('The goal of a life of study is the betterment of oneself.\\n')\nsage.addBond('It is my duty to protect my students')\nsage.addBond('I have an ancient text that holds terrible secrets that must not fall into the wrong hands.\\n')\nsage.addBond('I work to preserve a library, university, scriptorium, or monastery.\\n')\nsage.addBond('My life\\'s work is a series of tomes related to a specific field of lore.\\n')\nsage.addBond('I\\'ve been searching my whole life for the answer to a certain question.\\n')\nsage.addBond('I sold my soul for knowledge. I hope to do great deeds and win it back.\\n')\nsage.addFlaw('I am easily distracted by the promise of information.\\n')\nsage.addFlaw('Most people scream and run when they see a demon, I stop and take notes on its anatomy.\\n')\nsage.addFlaw('Unlocking an ancient mystery is worth the price of a civilization.\\n')\nsage.addFlaw('I overlook obvious solutions in favor of complicated ones.\\n')\nsage.addFlaw('I speak without really thinking through my words, invariably insulting others.\\n')\nsage.addFlaw('I can\\'t keep a secret to save my life, or anyone else\\'s.\\n')\n\nsailor = Background('Sailor')\nsailor.setBackDesc('You sailed on a seagoing vessel for years. In that time, you faced down mighty storms, monsters of the deep, and those who wanted to sink your craft to the bottomless depths. Your first love is the distant line of the horizon, but the time has come to try your hand at something new.\\n')\nsailor.addPrTrait('My friends know they can rely on me, no matter what.\\n')\nsailor.addPrTrait('I work hard so that I can play hard when the work is done.\\n')\nsailor.addPrTrait('I enjoy sailing into new ports and making new friends over a flagon of ale.\\n')\nsailor.addPrTrait('I stretch the truth for the sake of a good story.\\n')\nsailor.addPrTrait('To me, a tavern brawl is a nice way to get to know a new city.\\n')\nsailor.addPrTrait('I never pass up a friendly wager.\\n')\nsailor.addPrTrait('My language is as foul as an otyuggh nest.\\n')\nsailor.addPrTrait('I like a job well done, especially if I can convince someone else to do it.\\n')\nsailor.addIdeal('The thing that keeps a ship together is mutual respect between captain and crew.\\n')\nsailor.addIdeal('We all do the work, so we all share in the rewards.\\n')\nsailor.addIdeal('The sea is freedom—the freedom to go anywhere and do anything.\\n')\nsailor.addIdeal('I\\'m a predator, and the other ships on the sea are my prey.\\n')\nsailor.addIdeal('I\\'m committed to my crewmates, not to ideals.\\n')\nsailor.addIdeal('Someday I\\'ll own my own ship and chart my own destiny.\\n')\nsailor.addBond('I\\'m loyal to my captain first, everything else second.\\n')\nsailor.addBond('The ship is most important—crewmates and captains come and go.\\n')\nsailor.addBond('I\\'ll always remember my first ship.\\n')\nsailor.addBond('In a harbor town, I have a paramour whose eyes nearly stole me from the sea.\\n')\nsailor.addBond('I was cheated out of my fair share of the profits, and I want to get my due.\\n')\nsailor.addBond('Ruthless pirates murdered my captain and crewmates, plundered our ship, and left me to die. Vengeance will be mine.\\n')\nsailor.addFlaw('I follow orders, even if I think they\\'re wrong.\\n')\nsailor.addFlaw('I\\'ll say anything to avoid having to do extra work.\\n')\nsailor.addFlaw('Once someone questions my courage, I never back down no matter how dangerous the situation.\\n')\nsailor.addFlaw('Once I start drinking, it\\'s hard for me to stop.\\n')\nsailor.addFlaw('I can\\'t help but pocket loose coins and other trinkets I come across.\\n')\nsailor.addFlaw('My pride will probably lead to my destruction.\\n')\n\nsoldier = Background('Soldier')\nsoldier.setBackDesc('War has been your life for as long as you care to remember. You trained as a youth, studied the use of weapons and armor, learned basic survival techniques, including how to stay alive on the battlefield. You might have been part of a standing national army or a mercenary company, or perhaps a memver of a local militia who rose to prominence during a recent war.\\n')\n\nurchin = Background('Urchin')\nurchin.setBackDesc('You grew up on the streets alone, orphaned, and poor. You had no one to watch over you or to provide for you, so you learned to provide for yourself. You fought fiercely over food and kept a constant watch out for other desperate souls who might steal from you. You slept on rooftops and in alleyways, exposed to the elements, and endured sickness without the advantage of medicine or a place to recuperate. You\\'ve survived despite all odds, and did so through cunning, strength, speed, or some combination of each.\\n')\n#urchin.addPrTrait()\nbackgroundList = [acolyte,charlatan,criminal,entertainer,folkHero,guildArtisan,hermit,noble,outlander,sage,sailor,soldier,urchin]",
"step-ids": [
11,
13,
14,
15,
16
]
}
|
[
11,
13,
14,
15,
16
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main(req: func.HttpRequest) ->func.HttpResponse:
return func.HttpResponse(body=json.dumps(cosmos_client.DB.Goals),
mimetype='application/json', charset='utf-8')
<|reserved_special_token_1|>
import azure.functions as func
import json
from ..common import cosmos_client
def main(req: func.HttpRequest) ->func.HttpResponse:
return func.HttpResponse(body=json.dumps(cosmos_client.DB.Goals),
mimetype='application/json', charset='utf-8')
<|reserved_special_token_1|>
import azure.functions as func
import json
from ..common import cosmos_client
def main(req: func.HttpRequest) -> func.HttpResponse:
return func.HttpResponse(
body = json.dumps(cosmos_client.DB.Goals),
mimetype="application/json",
charset="utf-8"
)
# [
# {'amount':1000, 'description': 'foo bar baz prize'},
# {'amount':2000, 'description': 'foo bar baz prize'}
# ]
|
flexible
|
{
"blob_id": "e38be2890526c640ba8d9db5a376ff57ba9e0aa2",
"index": 8703,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main(req: func.HttpRequest) ->func.HttpResponse:\n return func.HttpResponse(body=json.dumps(cosmos_client.DB.Goals),\n mimetype='application/json', charset='utf-8')\n",
"step-3": "import azure.functions as func\nimport json\nfrom ..common import cosmos_client\n\n\ndef main(req: func.HttpRequest) ->func.HttpResponse:\n return func.HttpResponse(body=json.dumps(cosmos_client.DB.Goals),\n mimetype='application/json', charset='utf-8')\n",
"step-4": "import azure.functions as func\nimport json\nfrom ..common import cosmos_client\n\ndef main(req: func.HttpRequest) -> func.HttpResponse:\n \n return func.HttpResponse(\n body = json.dumps(cosmos_client.DB.Goals),\n mimetype=\"application/json\",\n charset=\"utf-8\"\n )\n\n # [\n # {'amount':1000, 'description': 'foo bar baz prize'}, \n # {'amount':2000, 'description': 'foo bar baz prize'}\n # ]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def hexStringtoBytes(hexstring):
byteArray = bytes.fromhex(hexstring)
return byteArray
def xorBytes(bytes1, bytes2):
xored = bytes([(x ^ bytes2[i]) for i, x in enumerate(bytes1)])
return xored
<|reserved_special_token_0|>
def scoreString(input):
arr = [(chr(x) in string.printable) for x in input]
return arr.count(True)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def hexStringtoBytes(hexstring):
byteArray = bytes.fromhex(hexstring)
return byteArray
def xorBytes(bytes1, bytes2):
xored = bytes([(x ^ bytes2[i]) for i, x in enumerate(bytes1)])
return xored
def xorAgainstCharacter(byteArray, character):
str2 = [ord(character)] * len(byteArray)
return xorBytes(byteArray, str2)
def scoreString(input):
arr = [(chr(x) in string.printable) for x in input]
return arr.count(True)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def hexStringtoBytes(hexstring):
byteArray = bytes.fromhex(hexstring)
return byteArray
def xorBytes(bytes1, bytes2):
xored = bytes([(x ^ bytes2[i]) for i, x in enumerate(bytes1)])
return xored
def xorAgainstCharacter(byteArray, character):
str2 = [ord(character)] * len(byteArray)
return xorBytes(byteArray, str2)
def scoreString(input):
arr = [(chr(x) in string.printable) for x in input]
return arr.count(True)
if __name__ == '__main__':
hexstring = (
'1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736')
bytes1 = hexStringtoBytes(hexstring)
scores = []
for x in string.printable:
temp = xorAgainstCharacter(bytes1, x)
print(str(x), str(temp))
scores.append(scoreString(temp))
<|reserved_special_token_1|>
import base64
import string
def hexStringtoBytes(hexstring):
byteArray = bytes.fromhex(hexstring)
return byteArray
def xorBytes(bytes1, bytes2):
xored = bytes([(x ^ bytes2[i]) for i, x in enumerate(bytes1)])
return xored
def xorAgainstCharacter(byteArray, character):
str2 = [ord(character)] * len(byteArray)
return xorBytes(byteArray, str2)
def scoreString(input):
arr = [(chr(x) in string.printable) for x in input]
return arr.count(True)
if __name__ == '__main__':
hexstring = (
'1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736')
bytes1 = hexStringtoBytes(hexstring)
scores = []
for x in string.printable:
temp = xorAgainstCharacter(bytes1, x)
print(str(x), str(temp))
scores.append(scoreString(temp))
<|reserved_special_token_1|>
import base64
import string
def hexStringtoBytes(hexstring):
byteArray = bytes.fromhex(hexstring)
return byteArray
def xorBytes(bytes1, bytes2):
xored = bytes([x^bytes2[i] for i,x in enumerate(bytes1)])
return xored
def xorAgainstCharacter(byteArray, character):
str2 = [ord(character)] * len(byteArray)
return xorBytes(byteArray,str2)
def scoreString(input):
arr = [(chr(x) in string.printable) for x in input]
return arr.count(True)
if __name__ == "__main__":
hexstring = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736'
bytes1 = hexStringtoBytes(hexstring)
scores = []
for x in string.printable:
temp = xorAgainstCharacter(bytes1, x)
print(str(x), str(temp))
scores.append(scoreString(temp))
|
flexible
|
{
"blob_id": "a32fb683f8d46f901e8dcd2d075ace22ee81e076",
"index": 451,
"step-1": "<mask token>\n\n\ndef hexStringtoBytes(hexstring):\n byteArray = bytes.fromhex(hexstring)\n return byteArray\n\n\ndef xorBytes(bytes1, bytes2):\n xored = bytes([(x ^ bytes2[i]) for i, x in enumerate(bytes1)])\n return xored\n\n\n<mask token>\n\n\ndef scoreString(input):\n arr = [(chr(x) in string.printable) for x in input]\n return arr.count(True)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef hexStringtoBytes(hexstring):\n byteArray = bytes.fromhex(hexstring)\n return byteArray\n\n\ndef xorBytes(bytes1, bytes2):\n xored = bytes([(x ^ bytes2[i]) for i, x in enumerate(bytes1)])\n return xored\n\n\ndef xorAgainstCharacter(byteArray, character):\n str2 = [ord(character)] * len(byteArray)\n return xorBytes(byteArray, str2)\n\n\ndef scoreString(input):\n arr = [(chr(x) in string.printable) for x in input]\n return arr.count(True)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef hexStringtoBytes(hexstring):\n byteArray = bytes.fromhex(hexstring)\n return byteArray\n\n\ndef xorBytes(bytes1, bytes2):\n xored = bytes([(x ^ bytes2[i]) for i, x in enumerate(bytes1)])\n return xored\n\n\ndef xorAgainstCharacter(byteArray, character):\n str2 = [ord(character)] * len(byteArray)\n return xorBytes(byteArray, str2)\n\n\ndef scoreString(input):\n arr = [(chr(x) in string.printable) for x in input]\n return arr.count(True)\n\n\nif __name__ == '__main__':\n hexstring = (\n '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736')\n bytes1 = hexStringtoBytes(hexstring)\n scores = []\n for x in string.printable:\n temp = xorAgainstCharacter(bytes1, x)\n print(str(x), str(temp))\n scores.append(scoreString(temp))\n",
"step-4": "import base64\nimport string\n\n\ndef hexStringtoBytes(hexstring):\n byteArray = bytes.fromhex(hexstring)\n return byteArray\n\n\ndef xorBytes(bytes1, bytes2):\n xored = bytes([(x ^ bytes2[i]) for i, x in enumerate(bytes1)])\n return xored\n\n\ndef xorAgainstCharacter(byteArray, character):\n str2 = [ord(character)] * len(byteArray)\n return xorBytes(byteArray, str2)\n\n\ndef scoreString(input):\n arr = [(chr(x) in string.printable) for x in input]\n return arr.count(True)\n\n\nif __name__ == '__main__':\n hexstring = (\n '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736')\n bytes1 = hexStringtoBytes(hexstring)\n scores = []\n for x in string.printable:\n temp = xorAgainstCharacter(bytes1, x)\n print(str(x), str(temp))\n scores.append(scoreString(temp))\n",
"step-5": "import base64\r\nimport string\r\ndef hexStringtoBytes(hexstring):\r\n byteArray = bytes.fromhex(hexstring)\r\n return byteArray\r\n\r\ndef xorBytes(bytes1, bytes2):\r\n xored = bytes([x^bytes2[i] for i,x in enumerate(bytes1)])\r\n return xored\r\n\r\ndef xorAgainstCharacter(byteArray, character):\r\n str2 = [ord(character)] * len(byteArray)\r\n return xorBytes(byteArray,str2)\r\n\r\ndef scoreString(input):\r\n arr = [(chr(x) in string.printable) for x in input]\r\n return arr.count(True)\r\n\r\nif __name__ == \"__main__\":\r\n hexstring = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736'\r\n bytes1 = hexStringtoBytes(hexstring)\r\n scores = []\r\n for x in string.printable:\r\n temp = xorAgainstCharacter(bytes1, x)\r\n print(str(x), str(temp))\r\n scores.append(scoreString(temp))\r\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import discord, requests
from random import choice
TOKEN = 'TOKEN'
CONTACT_EMAIL = None #'Contact email for getting 10000 words/day instead of 1000'
translate_command = '$t'
id_start = '<@!'
client = discord.Client()
def unescape(text):
return text.replace(''', '\'').replace('<','<').replace('>', '>') # to improve
@client.event
async def on_ready():
print(f'{client.user} has connected to Discord!')
@client.event
async def on_message(message):
if message.content.startswith(translate_command):
lang = message.content[len(translate_command):message.content.find(' ')]
ttt = message.content[len(translate_command)+len(lang)+1:]
s = ttt.find(id_start)
while s != -1:
e = ttt.find('>',s)
ttt = ttt[:s]+client.get_user(int(ttt[s+len(id_start):e])).name+ttt[e:]
s = ttt.find(id_start)
body = {
'q': ttt,
'langpair': lang+'|en' if len(lang) == 2 else lang[:2]+'|'+lang[2:],
'de': CONTACT_EMAIL
}
r = requests.get('https://api.mymemory.translated.net/get', params=body)
message_sent = await message.channel.send(unescape(r.json()['responseData']['translatedText']))
def check(reaction, user):
return user == message.author and str(reaction.emoji) == '❌'
try:
reaction, user = await client.wait_for('reaction_add', timeout=600.0, check=check)
except asyncio.TimeoutError:
pass
else:
await message_sent.delete()
client.run(TOKEN)
|
normal
|
{
"blob_id": "1ab69874a89311b22220dda541dfe03462a98a55",
"index": 2243,
"step-1": "<mask token>\n\n\ndef unescape(text):\n return text.replace(''', \"'\").replace('<', '<').replace('>', '>')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef unescape(text):\n return text.replace(''', \"'\").replace('<', '<').replace('>', '>')\n\n\n@client.event\nasync def on_ready():\n print(f'{client.user} has connected to Discord!')\n\n\n@client.event\nasync def on_message(message):\n if message.content.startswith(translate_command):\n lang = message.content[len(translate_command):message.content.find(' ')\n ]\n ttt = message.content[len(translate_command) + len(lang) + 1:]\n s = ttt.find(id_start)\n while s != -1:\n e = ttt.find('>', s)\n ttt = ttt[:s] + client.get_user(int(ttt[s + len(id_start):e])\n ).name + ttt[e:]\n s = ttt.find(id_start)\n body = {'q': ttt, 'langpair': lang + '|en' if len(lang) == 2 else \n lang[:2] + '|' + lang[2:], 'de': CONTACT_EMAIL}\n r = requests.get('https://api.mymemory.translated.net/get', params=body\n )\n message_sent = await message.channel.send(unescape(r.json()[\n 'responseData']['translatedText']))\n\n def check(reaction, user):\n return user == message.author and str(reaction.emoji) == '❌'\n try:\n reaction, user = await client.wait_for('reaction_add', timeout=\n 600.0, check=check)\n except asyncio.TimeoutError:\n pass\n else:\n await message_sent.delete()\n\n\nclient.run(TOKEN)\n",
"step-3": "<mask token>\nTOKEN = 'TOKEN'\nCONTACT_EMAIL = None\ntranslate_command = '$t'\nid_start = '<@!'\nclient = discord.Client()\n\n\ndef unescape(text):\n return text.replace(''', \"'\").replace('<', '<').replace('>', '>')\n\n\n@client.event\nasync def on_ready():\n print(f'{client.user} has connected to Discord!')\n\n\n@client.event\nasync def on_message(message):\n if message.content.startswith(translate_command):\n lang = message.content[len(translate_command):message.content.find(' ')\n ]\n ttt = message.content[len(translate_command) + len(lang) + 1:]\n s = ttt.find(id_start)\n while s != -1:\n e = ttt.find('>', s)\n ttt = ttt[:s] + client.get_user(int(ttt[s + len(id_start):e])\n ).name + ttt[e:]\n s = ttt.find(id_start)\n body = {'q': ttt, 'langpair': lang + '|en' if len(lang) == 2 else \n lang[:2] + '|' + lang[2:], 'de': CONTACT_EMAIL}\n r = requests.get('https://api.mymemory.translated.net/get', params=body\n )\n message_sent = await message.channel.send(unescape(r.json()[\n 'responseData']['translatedText']))\n\n def check(reaction, user):\n return user == message.author and str(reaction.emoji) == '❌'\n try:\n reaction, user = await client.wait_for('reaction_add', timeout=\n 600.0, check=check)\n except asyncio.TimeoutError:\n pass\n else:\n await message_sent.delete()\n\n\nclient.run(TOKEN)\n",
"step-4": "import discord, requests\nfrom random import choice\nTOKEN = 'TOKEN'\nCONTACT_EMAIL = None\ntranslate_command = '$t'\nid_start = '<@!'\nclient = discord.Client()\n\n\ndef unescape(text):\n return text.replace(''', \"'\").replace('<', '<').replace('>', '>')\n\n\n@client.event\nasync def on_ready():\n print(f'{client.user} has connected to Discord!')\n\n\n@client.event\nasync def on_message(message):\n if message.content.startswith(translate_command):\n lang = message.content[len(translate_command):message.content.find(' ')\n ]\n ttt = message.content[len(translate_command) + len(lang) + 1:]\n s = ttt.find(id_start)\n while s != -1:\n e = ttt.find('>', s)\n ttt = ttt[:s] + client.get_user(int(ttt[s + len(id_start):e])\n ).name + ttt[e:]\n s = ttt.find(id_start)\n body = {'q': ttt, 'langpair': lang + '|en' if len(lang) == 2 else \n lang[:2] + '|' + lang[2:], 'de': CONTACT_EMAIL}\n r = requests.get('https://api.mymemory.translated.net/get', params=body\n )\n message_sent = await message.channel.send(unescape(r.json()[\n 'responseData']['translatedText']))\n\n def check(reaction, user):\n return user == message.author and str(reaction.emoji) == '❌'\n try:\n reaction, user = await client.wait_for('reaction_add', timeout=\n 600.0, check=check)\n except asyncio.TimeoutError:\n pass\n else:\n await message_sent.delete()\n\n\nclient.run(TOKEN)\n",
"step-5": "import discord, requests\r\nfrom random import choice\r\n\r\nTOKEN = 'TOKEN'\r\nCONTACT_EMAIL = None #'Contact email for getting 10000 words/day instead of 1000'\r\n\r\ntranslate_command = '$t'\r\nid_start = '<@!'\r\n\r\nclient = discord.Client()\r\n\r\ndef unescape(text):\r\n return text.replace(''', '\\'').replace('<','<').replace('>', '>') # to improve\r\n\r\n@client.event\r\nasync def on_ready():\r\n print(f'{client.user} has connected to Discord!')\r\n\r\n@client.event\r\nasync def on_message(message):\r\n if message.content.startswith(translate_command):\r\n lang = message.content[len(translate_command):message.content.find(' ')]\r\n ttt = message.content[len(translate_command)+len(lang)+1:]\r\n s = ttt.find(id_start)\r\n while s != -1:\r\n e = ttt.find('>',s)\r\n ttt = ttt[:s]+client.get_user(int(ttt[s+len(id_start):e])).name+ttt[e:]\r\n s = ttt.find(id_start)\r\n body = {\r\n 'q': ttt,\r\n 'langpair': lang+'|en' if len(lang) == 2 else lang[:2]+'|'+lang[2:],\r\n 'de': CONTACT_EMAIL\r\n }\r\n r = requests.get('https://api.mymemory.translated.net/get', params=body)\r\n \r\n message_sent = await message.channel.send(unescape(r.json()['responseData']['translatedText']))\r\n \r\n def check(reaction, user):\r\n return user == message.author and str(reaction.emoji) == '❌'\r\n \r\n try:\r\n reaction, user = await client.wait_for('reaction_add', timeout=600.0, check=check)\r\n except asyncio.TimeoutError:\r\n pass\r\n else:\r\n await message_sent.delete()\r\n\r\nclient.run(TOKEN)\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class simple_drawing_window1(simple_drawing_window):
<|reserved_special_token_0|>
def paintEvent(self, e):
p = QPainter()
p.begin(self)
"""
p.setPen(QColor(0,0,0))
p.setBrush(QColor(0,127,0))
p.drawPolygon(
[QPoint(70,100), QPoint(100,110),
QPoint(130, 100), QPoint(100,150),]
)
"""
p.setPen(QColor(255, 127, 0))
p.setBrush(QColor(255, 127, 0))
p.drawPolygon([QPoint(50, 100), QPoint(200, 100), QPoint(200, 400),
QPoint(50, 400)])
p.drawPixmap(QRect(400, 150, 200, 200), self.rabbit)
p.end()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class simple_drawing_window1(simple_drawing_window):
def __init__(self):
super().__init__()
def paintEvent(self, e):
p = QPainter()
p.begin(self)
"""
p.setPen(QColor(0,0,0))
p.setBrush(QColor(0,127,0))
p.drawPolygon(
[QPoint(70,100), QPoint(100,110),
QPoint(130, 100), QPoint(100,150),]
)
"""
p.setPen(QColor(255, 127, 0))
p.setBrush(QColor(255, 127, 0))
p.drawPolygon([QPoint(50, 100), QPoint(200, 100), QPoint(200, 400),
QPoint(50, 400)])
p.drawPixmap(QRect(400, 150, 200, 200), self.rabbit)
p.end()
<|reserved_special_token_1|>
import sys
from PySide6.QtCore import *
from PySide6.QtWidgets import *
from PySide6.QtGui import *
from simple_drawing_window import *
class simple_drawing_window1(simple_drawing_window):
def __init__(self):
super().__init__()
def paintEvent(self, e):
p = QPainter()
p.begin(self)
"""
p.setPen(QColor(0,0,0))
p.setBrush(QColor(0,127,0))
p.drawPolygon(
[QPoint(70,100), QPoint(100,110),
QPoint(130, 100), QPoint(100,150),]
)
"""
p.setPen(QColor(255, 127, 0))
p.setBrush(QColor(255, 127, 0))
p.drawPolygon([QPoint(50, 100), QPoint(200, 100), QPoint(200, 400),
QPoint(50, 400)])
p.drawPixmap(QRect(400, 150, 200, 200), self.rabbit)
p.end()
<|reserved_special_token_1|>
import sys
from PySide6.QtCore import *
from PySide6.QtWidgets import *
from PySide6.QtGui import *
from simple_drawing_window import *
class simple_drawing_window1( simple_drawing_window):
def __init__(self):
super().__init__()
def paintEvent(self, e):
p = QPainter()
p.begin(self)
"""
p.setPen(QColor(0,0,0))
p.setBrush(QColor(0,127,0))
p.drawPolygon(
[QPoint(70,100), QPoint(100,110),
QPoint(130, 100), QPoint(100,150),]
)
"""
p.setPen(QColor(255,127,0))
p.setBrush(QColor(255,127,0))
p.drawPolygon(
[QPoint(50,100), QPoint(200,100),QPoint(200,400), QPoint(50,400),]
)
p.drawPixmap(QRect(400,150,200,200), self.rabbit)
p.end()
|
flexible
|
{
"blob_id": "6fc43919f521234d0dc9e167bb72f014e9c0bf17",
"index": 2102,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass simple_drawing_window1(simple_drawing_window):\n <mask token>\n\n def paintEvent(self, e):\n p = QPainter()\n p.begin(self)\n \"\"\"\n\t\tp.setPen(QColor(0,0,0))\n\t\tp.setBrush(QColor(0,127,0))\n\t\tp.drawPolygon(\n\t\t\t[QPoint(70,100), QPoint(100,110), \n\t\t\tQPoint(130, 100), QPoint(100,150),]\n\t\t)\n\t\t\"\"\"\n p.setPen(QColor(255, 127, 0))\n p.setBrush(QColor(255, 127, 0))\n p.drawPolygon([QPoint(50, 100), QPoint(200, 100), QPoint(200, 400),\n QPoint(50, 400)])\n p.drawPixmap(QRect(400, 150, 200, 200), self.rabbit)\n p.end()\n",
"step-3": "<mask token>\n\n\nclass simple_drawing_window1(simple_drawing_window):\n\n def __init__(self):\n super().__init__()\n\n def paintEvent(self, e):\n p = QPainter()\n p.begin(self)\n \"\"\"\n\t\tp.setPen(QColor(0,0,0))\n\t\tp.setBrush(QColor(0,127,0))\n\t\tp.drawPolygon(\n\t\t\t[QPoint(70,100), QPoint(100,110), \n\t\t\tQPoint(130, 100), QPoint(100,150),]\n\t\t)\n\t\t\"\"\"\n p.setPen(QColor(255, 127, 0))\n p.setBrush(QColor(255, 127, 0))\n p.drawPolygon([QPoint(50, 100), QPoint(200, 100), QPoint(200, 400),\n QPoint(50, 400)])\n p.drawPixmap(QRect(400, 150, 200, 200), self.rabbit)\n p.end()\n",
"step-4": "import sys\nfrom PySide6.QtCore import *\nfrom PySide6.QtWidgets import *\nfrom PySide6.QtGui import *\nfrom simple_drawing_window import *\n\n\nclass simple_drawing_window1(simple_drawing_window):\n\n def __init__(self):\n super().__init__()\n\n def paintEvent(self, e):\n p = QPainter()\n p.begin(self)\n \"\"\"\n\t\tp.setPen(QColor(0,0,0))\n\t\tp.setBrush(QColor(0,127,0))\n\t\tp.drawPolygon(\n\t\t\t[QPoint(70,100), QPoint(100,110), \n\t\t\tQPoint(130, 100), QPoint(100,150),]\n\t\t)\n\t\t\"\"\"\n p.setPen(QColor(255, 127, 0))\n p.setBrush(QColor(255, 127, 0))\n p.drawPolygon([QPoint(50, 100), QPoint(200, 100), QPoint(200, 400),\n QPoint(50, 400)])\n p.drawPixmap(QRect(400, 150, 200, 200), self.rabbit)\n p.end()\n",
"step-5": "\nimport sys\nfrom PySide6.QtCore import *\nfrom PySide6.QtWidgets import *\nfrom PySide6.QtGui import *\nfrom simple_drawing_window import *\n\nclass simple_drawing_window1( simple_drawing_window):\n\tdef __init__(self):\n\t\tsuper().__init__()\n \n\tdef paintEvent(self, e):\n\t\tp = QPainter()\n\t\tp.begin(self)\n\t\t\"\"\"\n\t\tp.setPen(QColor(0,0,0))\n\t\tp.setBrush(QColor(0,127,0))\n\t\tp.drawPolygon(\n\t\t\t[QPoint(70,100), QPoint(100,110), \n\t\t\tQPoint(130, 100), QPoint(100,150),]\n\t\t)\n\t\t\"\"\"\n\n\t\tp.setPen(QColor(255,127,0))\n\t\tp.setBrush(QColor(255,127,0))\n \n\t\t\n \n\t\tp.drawPolygon(\n\t\t\t[QPoint(50,100), QPoint(200,100),QPoint(200,400), QPoint(50,400),]\n\t\t)\n\t\t\n\t\tp.drawPixmap(QRect(400,150,200,200), self.rabbit)\n \n\t\tp.end()\n\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division
import os
from solid import *
from solid.utils import *
from shapes import *
import sys
# Assumes SolidPython is in site-packages or elsewhwere in sys.path
from solid import *
from solid.utils import *
def voxels():
# shape = cube([1, 1, 1], center=False);
shape = []
for x in range(-5, 4, 1):
for y in range(-5, 4, 1):
for z in range(0, 10, 1):
translate([x, y, z])
new_cube = color([0,0,1, 0.5])(cube([1, 1, 1], center=False));
# shape = (shape+new_cube)
shape.append(new_cube)
return shape
def basic_geometry():
box_functions = [makeRectBeam, makeCubeBeam, makeTriangleBeam,makeNothingBox, makeCylindBeam, makeHollowCylindBeam, makeHollowCone, makeEye]
# cylind_functions = [makeCylindBeam, makeHollowCylindBeam, makeHollowCone, makeEye, makeNothingCylind]
shape_list = []
for bf in box_functions:
for cf in box_functions:
for bf2 in box_functions:
for i in range(2):
shape = union()(
# translate([-2, -3, 0])(
bf(5, 4, 5),
translate([0, 0, 5])(
cf(4, 3, 5)),
translate([0, 0, 10])(
bf2(5, 4, 5))
)
if i == 0:
shapeInner = cylinder(r=0.5, h=20, center=False)
shape = shape - shapeInner
shape_list.append(shape)
return shape_list
def export(shape, filename):
with open(filename + '.scad', 'w+') as f:
f.write(scad_render(shape, file_header='$fn = %s;' % SEGMENTS))
f.closed
print("Success")
if __name__ == '__main__':
out_dir = sys.argv[1] if len(sys.argv) > 1 else os.curdir
file_out = os.path.join(out_dir, 'basic_geometry.scad')
shape_list = basic_geometry()
for i, shape in enumerate(shape_list):
export(shape, "output" + str(i))
print("Created OpenSCAD file...")
print("Compiling STL file...")
|
normal
|
{
"blob_id": "27ca60435c614e4d748917da45fc2fc75ee59f1c",
"index": 1682,
"step-1": "<mask token>\n\n\ndef voxels():\n shape = []\n for x in range(-5, 4, 1):\n for y in range(-5, 4, 1):\n for z in range(0, 10, 1):\n translate([x, y, z])\n new_cube = color([0, 0, 1, 0.5])(cube([1, 1, 1], center=False))\n shape.append(new_cube)\n return shape\n\n\n<mask token>\n\n\ndef export(shape, filename):\n with open(filename + '.scad', 'w+') as f:\n f.write(scad_render(shape, file_header='$fn = %s;' % SEGMENTS))\n f.closed\n print('Success')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef voxels():\n shape = []\n for x in range(-5, 4, 1):\n for y in range(-5, 4, 1):\n for z in range(0, 10, 1):\n translate([x, y, z])\n new_cube = color([0, 0, 1, 0.5])(cube([1, 1, 1], center=False))\n shape.append(new_cube)\n return shape\n\n\ndef basic_geometry():\n box_functions = [makeRectBeam, makeCubeBeam, makeTriangleBeam,\n makeNothingBox, makeCylindBeam, makeHollowCylindBeam,\n makeHollowCone, makeEye]\n shape_list = []\n for bf in box_functions:\n for cf in box_functions:\n for bf2 in box_functions:\n for i in range(2):\n shape = union()(bf(5, 4, 5), translate([0, 0, 5])(cf(4,\n 3, 5)), translate([0, 0, 10])(bf2(5, 4, 5)))\n if i == 0:\n shapeInner = cylinder(r=0.5, h=20, center=False)\n shape = shape - shapeInner\n shape_list.append(shape)\n return shape_list\n\n\ndef export(shape, filename):\n with open(filename + '.scad', 'w+') as f:\n f.write(scad_render(shape, file_header='$fn = %s;' % SEGMENTS))\n f.closed\n print('Success')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef voxels():\n shape = []\n for x in range(-5, 4, 1):\n for y in range(-5, 4, 1):\n for z in range(0, 10, 1):\n translate([x, y, z])\n new_cube = color([0, 0, 1, 0.5])(cube([1, 1, 1], center=False))\n shape.append(new_cube)\n return shape\n\n\ndef basic_geometry():\n box_functions = [makeRectBeam, makeCubeBeam, makeTriangleBeam,\n makeNothingBox, makeCylindBeam, makeHollowCylindBeam,\n makeHollowCone, makeEye]\n shape_list = []\n for bf in box_functions:\n for cf in box_functions:\n for bf2 in box_functions:\n for i in range(2):\n shape = union()(bf(5, 4, 5), translate([0, 0, 5])(cf(4,\n 3, 5)), translate([0, 0, 10])(bf2(5, 4, 5)))\n if i == 0:\n shapeInner = cylinder(r=0.5, h=20, center=False)\n shape = shape - shapeInner\n shape_list.append(shape)\n return shape_list\n\n\ndef export(shape, filename):\n with open(filename + '.scad', 'w+') as f:\n f.write(scad_render(shape, file_header='$fn = %s;' % SEGMENTS))\n f.closed\n print('Success')\n\n\nif __name__ == '__main__':\n out_dir = sys.argv[1] if len(sys.argv) > 1 else os.curdir\n file_out = os.path.join(out_dir, 'basic_geometry.scad')\n shape_list = basic_geometry()\n for i, shape in enumerate(shape_list):\n export(shape, 'output' + str(i))\n print('Created OpenSCAD file...')\n print('Compiling STL file...')\n",
"step-4": "from __future__ import division\nimport os\nfrom solid import *\nfrom solid.utils import *\nfrom shapes import *\nimport sys\nfrom solid import *\nfrom solid.utils import *\n\n\ndef voxels():\n shape = []\n for x in range(-5, 4, 1):\n for y in range(-5, 4, 1):\n for z in range(0, 10, 1):\n translate([x, y, z])\n new_cube = color([0, 0, 1, 0.5])(cube([1, 1, 1], center=False))\n shape.append(new_cube)\n return shape\n\n\ndef basic_geometry():\n box_functions = [makeRectBeam, makeCubeBeam, makeTriangleBeam,\n makeNothingBox, makeCylindBeam, makeHollowCylindBeam,\n makeHollowCone, makeEye]\n shape_list = []\n for bf in box_functions:\n for cf in box_functions:\n for bf2 in box_functions:\n for i in range(2):\n shape = union()(bf(5, 4, 5), translate([0, 0, 5])(cf(4,\n 3, 5)), translate([0, 0, 10])(bf2(5, 4, 5)))\n if i == 0:\n shapeInner = cylinder(r=0.5, h=20, center=False)\n shape = shape - shapeInner\n shape_list.append(shape)\n return shape_list\n\n\ndef export(shape, filename):\n with open(filename + '.scad', 'w+') as f:\n f.write(scad_render(shape, file_header='$fn = %s;' % SEGMENTS))\n f.closed\n print('Success')\n\n\nif __name__ == '__main__':\n out_dir = sys.argv[1] if len(sys.argv) > 1 else os.curdir\n file_out = os.path.join(out_dir, 'basic_geometry.scad')\n shape_list = basic_geometry()\n for i, shape in enumerate(shape_list):\n export(shape, 'output' + str(i))\n print('Created OpenSCAD file...')\n print('Compiling STL file...')\n",
"step-5": "#! /usr/bin/env python\n# -*- coding: utf-8 -*-\nfrom __future__ import division\nimport os\nfrom solid import *\nfrom solid.utils import *\n\nfrom shapes import *\nimport sys\n\n# Assumes SolidPython is in site-packages or elsewhwere in sys.path\nfrom solid import *\nfrom solid.utils import *\n\ndef voxels():\n # shape = cube([1, 1, 1], center=False);\n shape = []\n for x in range(-5, 4, 1):\n for y in range(-5, 4, 1):\n for z in range(0, 10, 1):\n translate([x, y, z])\n new_cube = color([0,0,1, 0.5])(cube([1, 1, 1], center=False));\n # shape = (shape+new_cube)\n shape.append(new_cube)\n return shape\n\ndef basic_geometry():\n box_functions = [makeRectBeam, makeCubeBeam, makeTriangleBeam,makeNothingBox, makeCylindBeam, makeHollowCylindBeam, makeHollowCone, makeEye]\n # cylind_functions = [makeCylindBeam, makeHollowCylindBeam, makeHollowCone, makeEye, makeNothingCylind]\n shape_list = []\n for bf in box_functions:\n for cf in box_functions:\n for bf2 in box_functions:\n for i in range(2):\n shape = union()(\n # translate([-2, -3, 0])(\n bf(5, 4, 5),\n translate([0, 0, 5])(\n cf(4, 3, 5)),\n translate([0, 0, 10])(\n bf2(5, 4, 5))\n )\n if i == 0:\n shapeInner = cylinder(r=0.5, h=20, center=False)\n shape = shape - shapeInner\n shape_list.append(shape)\n\n return shape_list\n\ndef export(shape, filename):\n with open(filename + '.scad', 'w+') as f:\n f.write(scad_render(shape, file_header='$fn = %s;' % SEGMENTS))\n\n f.closed\n print(\"Success\")\n\nif __name__ == '__main__':\n out_dir = sys.argv[1] if len(sys.argv) > 1 else os.curdir\n file_out = os.path.join(out_dir, 'basic_geometry.scad')\n\n shape_list = basic_geometry()\n for i, shape in enumerate(shape_list):\n export(shape, \"output\" + str(i))\n print(\"Created OpenSCAD file...\")\n print(\"Compiling STL file...\")",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
@csrf_exempt
def create(request):
if request.method == 'POST':
json_data = request.body
stream = io.BytesIO(json_data)
pythondata = JSONParser().parse(stream)
serializer = StudentSerializer(data=pythondata)
if serializer.is_valid():
serializer.save()
res = {'msg': 'data inserted', 'code': 200}
json_data = JSONRenderer().render(res)
return HttpResponse(json_data)
else:
json_data = JSONRenderer().render(serializer.errors)
return HttpResponse(json_data)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def student_detail(request, pk):
stu = Student.objects.get(id=pk)
serializers = StudentSerializer(stu)
return JsonResponse(serializers.data)
<|reserved_special_token_0|>
@csrf_exempt
def create(request):
if request.method == 'POST':
json_data = request.body
stream = io.BytesIO(json_data)
pythondata = JSONParser().parse(stream)
serializer = StudentSerializer(data=pythondata)
if serializer.is_valid():
serializer.save()
res = {'msg': 'data inserted', 'code': 200}
json_data = JSONRenderer().render(res)
return HttpResponse(json_data)
else:
json_data = JSONRenderer().render(serializer.errors)
return HttpResponse(json_data)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def student_detail(request, pk):
stu = Student.objects.get(id=pk)
serializers = StudentSerializer(stu)
return JsonResponse(serializers.data)
def student_list(request):
stu = Student.objects.all()
serializers = StudentSerializer(stu, many=True)
return JsonResponse(serializers.data, safe=False)
@csrf_exempt
def create(request):
if request.method == 'POST':
json_data = request.body
stream = io.BytesIO(json_data)
pythondata = JSONParser().parse(stream)
serializer = StudentSerializer(data=pythondata)
if serializer.is_valid():
serializer.save()
res = {'msg': 'data inserted', 'code': 200}
json_data = JSONRenderer().render(res)
return HttpResponse(json_data)
else:
json_data = JSONRenderer().render(serializer.errors)
return HttpResponse(json_data)
<|reserved_special_token_1|>
import django
from rest_framework import serializers
from django.shortcuts import render
from .models import Student
from .serializiers import StudentSerializer
from rest_framework.renderers import JSONRenderer
from django.http import HttpResponse, JsonResponse
import io
from rest_framework.parsers import JSONParser
from rest_framework.renderers import JSONRenderer
from django.views.decorators.csrf import csrf_exempt
def student_detail(request, pk):
stu = Student.objects.get(id=pk)
serializers = StudentSerializer(stu)
return JsonResponse(serializers.data)
def student_list(request):
stu = Student.objects.all()
serializers = StudentSerializer(stu, many=True)
return JsonResponse(serializers.data, safe=False)
@csrf_exempt
def create(request):
if request.method == 'POST':
json_data = request.body
stream = io.BytesIO(json_data)
pythondata = JSONParser().parse(stream)
serializer = StudentSerializer(data=pythondata)
if serializer.is_valid():
serializer.save()
res = {'msg': 'data inserted', 'code': 200}
json_data = JSONRenderer().render(res)
return HttpResponse(json_data)
else:
json_data = JSONRenderer().render(serializer.errors)
return HttpResponse(json_data)
<|reserved_special_token_1|>
import django
from rest_framework import serializers
from django.shortcuts import render
from .models import Student
from .serializiers import StudentSerializer
from rest_framework.renderers import JSONRenderer
from django.http import HttpResponse,JsonResponse
import io
from rest_framework.parsers import JSONParser
from rest_framework.renderers import JSONRenderer
from django.views.decorators.csrf import csrf_exempt
# Single Model object.
def student_detail(request,pk):
#Student model object
stu = Student.objects.get(id=pk)
#Serializers convert student model object to python dictionary
serializers = StudentSerializer(stu)
#JSONRenderer convert student python dictionary to json object
# json_data = JSONRenderer().render(serializers.data)
# return HttpResponse(json_data,content_type='application/json')
#use simply to reduce the extra line of code
return JsonResponse(serializers.data)
def student_list(request):
#Student model object
stu = Student.objects.all()
#Serializers convert student model object to python dictionary
serializers = StudentSerializer(stu,many=True)
#JSONRenderer convert student python dictionary to json object
# json_data = JSONRenderer().render(serializers.data)
# return HttpResponse(json_data,content_type='application/json')
return JsonResponse(serializers.data,safe=False)
@csrf_exempt
def create(request):
if request.method=='POST':
json_data = request.body
stream = io.BytesIO(json_data)
pythondata = JSONParser().parse(stream)
serializer = StudentSerializer(data=pythondata)
if serializer.is_valid():
serializer.save()
res = {'msg':'data inserted','code':200}
json_data = JSONRenderer().render(res)
return HttpResponse(json_data)
else:
json_data = JSONRenderer().render(serializer.errors)
return HttpResponse(json_data)
|
flexible
|
{
"blob_id": "99785ffb4b594db1fac05ca3d3f5764151b2b7b6",
"index": 103,
"step-1": "<mask token>\n\n\n@csrf_exempt\ndef create(request):\n if request.method == 'POST':\n json_data = request.body\n stream = io.BytesIO(json_data)\n pythondata = JSONParser().parse(stream)\n serializer = StudentSerializer(data=pythondata)\n if serializer.is_valid():\n serializer.save()\n res = {'msg': 'data inserted', 'code': 200}\n json_data = JSONRenderer().render(res)\n return HttpResponse(json_data)\n else:\n json_data = JSONRenderer().render(serializer.errors)\n return HttpResponse(json_data)\n",
"step-2": "<mask token>\n\n\ndef student_detail(request, pk):\n stu = Student.objects.get(id=pk)\n serializers = StudentSerializer(stu)\n return JsonResponse(serializers.data)\n\n\n<mask token>\n\n\n@csrf_exempt\ndef create(request):\n if request.method == 'POST':\n json_data = request.body\n stream = io.BytesIO(json_data)\n pythondata = JSONParser().parse(stream)\n serializer = StudentSerializer(data=pythondata)\n if serializer.is_valid():\n serializer.save()\n res = {'msg': 'data inserted', 'code': 200}\n json_data = JSONRenderer().render(res)\n return HttpResponse(json_data)\n else:\n json_data = JSONRenderer().render(serializer.errors)\n return HttpResponse(json_data)\n",
"step-3": "<mask token>\n\n\ndef student_detail(request, pk):\n stu = Student.objects.get(id=pk)\n serializers = StudentSerializer(stu)\n return JsonResponse(serializers.data)\n\n\ndef student_list(request):\n stu = Student.objects.all()\n serializers = StudentSerializer(stu, many=True)\n return JsonResponse(serializers.data, safe=False)\n\n\n@csrf_exempt\ndef create(request):\n if request.method == 'POST':\n json_data = request.body\n stream = io.BytesIO(json_data)\n pythondata = JSONParser().parse(stream)\n serializer = StudentSerializer(data=pythondata)\n if serializer.is_valid():\n serializer.save()\n res = {'msg': 'data inserted', 'code': 200}\n json_data = JSONRenderer().render(res)\n return HttpResponse(json_data)\n else:\n json_data = JSONRenderer().render(serializer.errors)\n return HttpResponse(json_data)\n",
"step-4": "import django\nfrom rest_framework import serializers\nfrom django.shortcuts import render\nfrom .models import Student\nfrom .serializiers import StudentSerializer\nfrom rest_framework.renderers import JSONRenderer\nfrom django.http import HttpResponse, JsonResponse\nimport io\nfrom rest_framework.parsers import JSONParser\nfrom rest_framework.renderers import JSONRenderer\nfrom django.views.decorators.csrf import csrf_exempt\n\n\ndef student_detail(request, pk):\n stu = Student.objects.get(id=pk)\n serializers = StudentSerializer(stu)\n return JsonResponse(serializers.data)\n\n\ndef student_list(request):\n stu = Student.objects.all()\n serializers = StudentSerializer(stu, many=True)\n return JsonResponse(serializers.data, safe=False)\n\n\n@csrf_exempt\ndef create(request):\n if request.method == 'POST':\n json_data = request.body\n stream = io.BytesIO(json_data)\n pythondata = JSONParser().parse(stream)\n serializer = StudentSerializer(data=pythondata)\n if serializer.is_valid():\n serializer.save()\n res = {'msg': 'data inserted', 'code': 200}\n json_data = JSONRenderer().render(res)\n return HttpResponse(json_data)\n else:\n json_data = JSONRenderer().render(serializer.errors)\n return HttpResponse(json_data)\n",
"step-5": "import django\nfrom rest_framework import serializers\nfrom django.shortcuts import render\nfrom .models import Student\nfrom .serializiers import StudentSerializer\nfrom rest_framework.renderers import JSONRenderer\nfrom django.http import HttpResponse,JsonResponse\n\nimport io\nfrom rest_framework.parsers import JSONParser \nfrom rest_framework.renderers import JSONRenderer\nfrom django.views.decorators.csrf import csrf_exempt\n\n# Single Model object.\n\n\n\ndef student_detail(request,pk):\n \n #Student model object\n stu = Student.objects.get(id=pk) \n #Serializers convert student model object to python dictionary\n serializers = StudentSerializer(stu)\n #JSONRenderer convert student python dictionary to json object\n # json_data = JSONRenderer().render(serializers.data)\n\n # return HttpResponse(json_data,content_type='application/json')\n\n #use simply to reduce the extra line of code\n return JsonResponse(serializers.data)\n\ndef student_list(request):\n\n #Student model object\n stu = Student.objects.all()\n #Serializers convert student model object to python dictionary\n serializers = StudentSerializer(stu,many=True)\n #JSONRenderer convert student python dictionary to json object\n # json_data = JSONRenderer().render(serializers.data)\n\n # return HttpResponse(json_data,content_type='application/json')\n return JsonResponse(serializers.data,safe=False)\n\n@csrf_exempt\ndef create(request):\n if request.method=='POST':\n json_data = request.body\n stream = io.BytesIO(json_data)\n pythondata = JSONParser().parse(stream)\n serializer = StudentSerializer(data=pythondata)\n if serializer.is_valid():\n serializer.save()\n res = {'msg':'data inserted','code':200}\n json_data = JSONRenderer().render(res)\n return HttpResponse(json_data)\n else:\n json_data = JSONRenderer().render(serializer.errors)\n return HttpResponse(json_data)\n ",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from model import *
from data import *
import os
import matplotlib.pyplot as plt
from sklearn.metrics import confusion_matrix
data_gen_args = dict(horizontal_flip = True,
vertical_flip = True)
imageTargetSize = (256, 256)
trainPath = '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/Training'
trainImagePath = 'Selected Images Training'
trainLabelPath = 'Selected Images Label Binarized Training'
#augTrainPath = '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/train/aug'
#validationPath = '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/validation'
#validationImagePath = 'Selected Images Resized Validation'
#validationLabelPath = 'Selected Images Label Resized Binarized Validation'
trainGene = trainGenerator(batch_size = 1,
train_path = trainPath,
trainImage_folder = trainImagePath,
trainLabel_folder = trainLabelPath,
aug_dict = data_gen_args,
save_to_dir = None,
target_size = imageTargetSize,
trainImage_color_mode = 'grayscale',
trainLabel_color_mode = 'grayscale',
trainImage_save_prefix = 'Image',
trainLabel_save_prefix = 'Label',
seed = 1,
flag_multi_class = False,
num_class = 2)
""""
validationGene = validationGenerator(validation_path = validationPath,
validationImage_path = validationImagePath,
validationLabel_path = validationLabelPath,
target_size = imageTargetSize,
flag_multi_class = False,
as_gray = True)
"""
model = unet()
model_checkpoint = ModelCheckpoint('unet_testing.hdf5', monitor='loss',verbose=1, save_best_only=True)
trainHistory = model.fit_generator(trainGene,
steps_per_epoch=100,
epochs=7,
callbacks = [model_checkpoint]
)
testImagePath = '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/Test/Selected Images Test'
testGene = testGenerator(test_path = testImagePath,
target_size = imageTargetSize,
flag_multi_class = False,
as_gray = True)
results = model.predict_generator(testGene, len(os.listdir(testImagePath)), verbose = 1)
saveResult("/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/here", results)
training_loss = trainHistory.history['loss']
#test_loss = history.history['val_loss']
epoch_count = range(1, len(training_loss)+1)
plt.plot(epoch_count, training_loss, 'r--')
#plt.plot(epoch_count, test_loss, 'b-')
plt.legend(['Training Loss'])
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.title('U-Net Training Loss Function')
plt.show();
|
normal
|
{
"blob_id": "ba379ed90bccd05d058f69f33a960779f8b8bcd5",
"index": 5632,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsaveResult(\n '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/here'\n , results)\n<mask token>\nplt.plot(epoch_count, training_loss, 'r--')\nplt.legend(['Training Loss'])\nplt.xlabel('Epoch')\nplt.ylabel('Loss')\nplt.title('U-Net Training Loss Function')\nplt.show()\n",
"step-3": "<mask token>\ndata_gen_args = dict(horizontal_flip=True, vertical_flip=True)\nimageTargetSize = 256, 256\ntrainPath = (\n '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/Training'\n )\ntrainImagePath = 'Selected Images Training'\ntrainLabelPath = 'Selected Images Label Binarized Training'\ntrainGene = trainGenerator(batch_size=1, train_path=trainPath,\n trainImage_folder=trainImagePath, trainLabel_folder=trainLabelPath,\n aug_dict=data_gen_args, save_to_dir=None, target_size=imageTargetSize,\n trainImage_color_mode='grayscale', trainLabel_color_mode='grayscale',\n trainImage_save_prefix='Image', trainLabel_save_prefix='Label', seed=1,\n flag_multi_class=False, num_class=2)\n<mask token>\nmodel = unet()\nmodel_checkpoint = ModelCheckpoint('unet_testing.hdf5', monitor='loss',\n verbose=1, save_best_only=True)\ntrainHistory = model.fit_generator(trainGene, steps_per_epoch=100, epochs=7,\n callbacks=[model_checkpoint])\ntestImagePath = (\n '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/Test/Selected Images Test'\n )\ntestGene = testGenerator(test_path=testImagePath, target_size=\n imageTargetSize, flag_multi_class=False, as_gray=True)\nresults = model.predict_generator(testGene, len(os.listdir(testImagePath)),\n verbose=1)\nsaveResult(\n '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/here'\n , results)\ntraining_loss = trainHistory.history['loss']\nepoch_count = range(1, len(training_loss) + 1)\nplt.plot(epoch_count, training_loss, 'r--')\nplt.legend(['Training Loss'])\nplt.xlabel('Epoch')\nplt.ylabel('Loss')\nplt.title('U-Net Training Loss Function')\nplt.show()\n",
"step-4": "from model import *\nfrom data import *\nimport os\nimport matplotlib.pyplot as plt\nfrom sklearn.metrics import confusion_matrix\ndata_gen_args = dict(horizontal_flip=True, vertical_flip=True)\nimageTargetSize = 256, 256\ntrainPath = (\n '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/Training'\n )\ntrainImagePath = 'Selected Images Training'\ntrainLabelPath = 'Selected Images Label Binarized Training'\ntrainGene = trainGenerator(batch_size=1, train_path=trainPath,\n trainImage_folder=trainImagePath, trainLabel_folder=trainLabelPath,\n aug_dict=data_gen_args, save_to_dir=None, target_size=imageTargetSize,\n trainImage_color_mode='grayscale', trainLabel_color_mode='grayscale',\n trainImage_save_prefix='Image', trainLabel_save_prefix='Label', seed=1,\n flag_multi_class=False, num_class=2)\n<mask token>\nmodel = unet()\nmodel_checkpoint = ModelCheckpoint('unet_testing.hdf5', monitor='loss',\n verbose=1, save_best_only=True)\ntrainHistory = model.fit_generator(trainGene, steps_per_epoch=100, epochs=7,\n callbacks=[model_checkpoint])\ntestImagePath = (\n '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/Test/Selected Images Test'\n )\ntestGene = testGenerator(test_path=testImagePath, target_size=\n imageTargetSize, flag_multi_class=False, as_gray=True)\nresults = model.predict_generator(testGene, len(os.listdir(testImagePath)),\n verbose=1)\nsaveResult(\n '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/here'\n , results)\ntraining_loss = trainHistory.history['loss']\nepoch_count = range(1, len(training_loss) + 1)\nplt.plot(epoch_count, training_loss, 'r--')\nplt.legend(['Training Loss'])\nplt.xlabel('Epoch')\nplt.ylabel('Loss')\nplt.title('U-Net Training Loss Function')\nplt.show()\n",
"step-5": "from model import *\nfrom data import *\nimport os\nimport matplotlib.pyplot as plt\nfrom sklearn.metrics import confusion_matrix\n\ndata_gen_args = dict(horizontal_flip = True,\n vertical_flip = True)\n\n\nimageTargetSize = (256, 256)\n\n\ntrainPath = '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/Training'\ntrainImagePath = 'Selected Images Training'\ntrainLabelPath = 'Selected Images Label Binarized Training'\n#augTrainPath = '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/train/aug'\n\n#validationPath = '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/validation'\n#validationImagePath = 'Selected Images Resized Validation'\n#validationLabelPath = 'Selected Images Label Resized Binarized Validation'\n\n\ntrainGene = trainGenerator(batch_size = 1,\n train_path = trainPath,\n trainImage_folder = trainImagePath,\n trainLabel_folder = trainLabelPath,\n aug_dict = data_gen_args,\n save_to_dir = None,\n target_size = imageTargetSize,\n trainImage_color_mode = 'grayscale',\n trainLabel_color_mode = 'grayscale',\n trainImage_save_prefix = 'Image',\n trainLabel_save_prefix = 'Label',\n seed = 1,\n flag_multi_class = False,\n num_class = 2)\n\n\"\"\"\"\nvalidationGene = validationGenerator(validation_path = validationPath,\n validationImage_path = validationImagePath,\n validationLabel_path = validationLabelPath,\n target_size = imageTargetSize,\n flag_multi_class = False,\n as_gray = True)\n\"\"\"\n\nmodel = unet()\nmodel_checkpoint = ModelCheckpoint('unet_testing.hdf5', monitor='loss',verbose=1, save_best_only=True)\ntrainHistory = model.fit_generator(trainGene,\n steps_per_epoch=100,\n epochs=7,\n callbacks = [model_checkpoint]\n )\n\n\ntestImagePath = '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/Test/Selected Images Test'\ntestGene = testGenerator(test_path = testImagePath,\n target_size = imageTargetSize,\n flag_multi_class = False,\n as_gray = True)\n\n\nresults = model.predict_generator(testGene, len(os.listdir(testImagePath)), verbose = 1)\nsaveResult(\"/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/here\", results)\n\n\ntraining_loss = trainHistory.history['loss']\n#test_loss = history.history['val_loss']\n\nepoch_count = range(1, len(training_loss)+1)\n\n\nplt.plot(epoch_count, training_loss, 'r--')\n#plt.plot(epoch_count, test_loss, 'b-')\nplt.legend(['Training Loss'])\nplt.xlabel('Epoch')\nplt.ylabel('Loss')\nplt.title('U-Net Training Loss Function')\nplt.show();\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def update_mode(args):
"""
This method is the main method for running this program in Update mode.
Update mode takes in a specifically formated XLSX file and outputs a JSON
file containing all of the data for races and subraces needed by the
program in run mode
Arguments:
:param args: (dict) A dictionary containing the needed arguments
Returns:
bool: Whether or not the update completed successfully or not
"""
try:
workbook = load_workbook(args['xlsx_file'])
except:
return False
df = DataFrame()
for name in workbook.sheetnames:
if 'Race' in name:
df = DataFrame(workbook[name].values)
if df.empty:
return False
df.drop(0, axis=0, inplace=True)
df.reset_index(inplace=True, drop=True)
end_col = (df.iloc[0, :].values == None).argmax()
df.drop(df.iloc[:, end_col:], axis=1, inplace=True)
df.columns = list(df.iloc[0, :])
df.drop(0, axis=0, inplace=True)
df.reset_index(inplace=True, drop=True)
end_row = (df.iloc[:, 0].values == None).argmax()
df.drop(df[end_row:].index, axis=0, inplace=True)
hyperlink_re = re.compile('(?<=,")(.+)(?=")')
df['Race'] = df['Race'].apply(lambda x: x if hyperlink_re.search(x) is
None else hyperlink_re.search(x).group(1))
df['Source'] = df['Source'].apply(lambda x: x if hyperlink_re.search(x) is
None else hyperlink_re.search(x).group(1))
data = {}
asi_re = re.compile('ASI: ([+-]\\d) \\(x(\\d)\\)(?:\\s{1}\\((.+)\\))?')
for index, row in df.iterrows():
row = dict(row)
race = row['Race']
subrace = row['Subrace']
if subrace:
if race in data:
if 'Subraces' not in data[race]:
data[race]['Subraces'] = {}
data[race]['Subraces'][subrace] = row
else:
data[race] = {'Subraces': {}}
data[race]['Subraces'][subrace] = row
else:
data[race] = row
if row['Additional'] is not None:
matches = asi_re.search(row['Additional'])
if matches:
asi = {'size': matches.group(1), 'number': matches.group(2)}
if matches.group(3):
if '-' in matches.group(3):
asi['not_allowed'] = matches.group(3).split('-')[1]
if '|' in matches.group(3):
asi['allowed'] = [x.capitalize() for x in matches.
group(3).split(' | ')]
if subrace:
data[race]['Subraces'][subrace]['ASI'] = asi
else:
data[race]['ASI'] = asi
with open('race_data.json', 'w') as fp:
json.dump(data, fp, indent=2)
return True
def run_mode(args):
"""
This method is the main method for running this program in Run mode.
This mode goes through the character simulation
Arguments:
:param args: (dict) A dictionary containing the needed arguments
"""
char = classes.Character('Human', None, ['Str', 'Dex', 'Con', 'Int',
'Wis', 'Cha'], classes.StatSelection.ROLL_4D6_DROP_ONE, classes.
HPSelection.ROLL_HP, classes.ASISelection.STRICT_FOCUS)
print(char.id)
print(char.stats)
char = classes.Character('Human', 'Variant', ['Str', 'Dex', 'Con',
'Int', 'Wis', 'Cha'], classes.StatSelection.ROLL_3D6, classes.
HPSelection.ROLL_HP, classes.ASISelection.FOCUS_ODD_TO_EVEN)
print(char.id)
print(char.stats)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def import_race_data(file_path):
"""
This method imports data from the inputed CSV and returns a dictionary containing
all of the data formated by race and subrace
Arguments:
:param import_data: (str) The filepath to the data
Returns:
dict: The dictionary of all of the data
"""
retval = {}
with open(file_path) as csv_file:
reader = csv.DictReader(csv_file)
for row in reader:
race = row['Race']
subrace = row['Subrace']
if subrace:
if race in retval:
if 'Subraces' not in retval[race]:
retval[race]['Subraces'] = {}
retval[race]['Subraces'][subrace] = row
else:
retval[race] = {'Subraces': {}}
retval[race]['Subraces'][subrace] = row
else:
retval[race] = row
return retval
def update_mode(args):
"""
This method is the main method for running this program in Update mode.
Update mode takes in a specifically formated XLSX file and outputs a JSON
file containing all of the data for races and subraces needed by the
program in run mode
Arguments:
:param args: (dict) A dictionary containing the needed arguments
Returns:
bool: Whether or not the update completed successfully or not
"""
try:
workbook = load_workbook(args['xlsx_file'])
except:
return False
df = DataFrame()
for name in workbook.sheetnames:
if 'Race' in name:
df = DataFrame(workbook[name].values)
if df.empty:
return False
df.drop(0, axis=0, inplace=True)
df.reset_index(inplace=True, drop=True)
end_col = (df.iloc[0, :].values == None).argmax()
df.drop(df.iloc[:, end_col:], axis=1, inplace=True)
df.columns = list(df.iloc[0, :])
df.drop(0, axis=0, inplace=True)
df.reset_index(inplace=True, drop=True)
end_row = (df.iloc[:, 0].values == None).argmax()
df.drop(df[end_row:].index, axis=0, inplace=True)
hyperlink_re = re.compile('(?<=,")(.+)(?=")')
df['Race'] = df['Race'].apply(lambda x: x if hyperlink_re.search(x) is
None else hyperlink_re.search(x).group(1))
df['Source'] = df['Source'].apply(lambda x: x if hyperlink_re.search(x) is
None else hyperlink_re.search(x).group(1))
data = {}
asi_re = re.compile('ASI: ([+-]\\d) \\(x(\\d)\\)(?:\\s{1}\\((.+)\\))?')
for index, row in df.iterrows():
row = dict(row)
race = row['Race']
subrace = row['Subrace']
if subrace:
if race in data:
if 'Subraces' not in data[race]:
data[race]['Subraces'] = {}
data[race]['Subraces'][subrace] = row
else:
data[race] = {'Subraces': {}}
data[race]['Subraces'][subrace] = row
else:
data[race] = row
if row['Additional'] is not None:
matches = asi_re.search(row['Additional'])
if matches:
asi = {'size': matches.group(1), 'number': matches.group(2)}
if matches.group(3):
if '-' in matches.group(3):
asi['not_allowed'] = matches.group(3).split('-')[1]
if '|' in matches.group(3):
asi['allowed'] = [x.capitalize() for x in matches.
group(3).split(' | ')]
if subrace:
data[race]['Subraces'][subrace]['ASI'] = asi
else:
data[race]['ASI'] = asi
with open('race_data.json', 'w') as fp:
json.dump(data, fp, indent=2)
return True
def run_mode(args):
"""
This method is the main method for running this program in Run mode.
This mode goes through the character simulation
Arguments:
:param args: (dict) A dictionary containing the needed arguments
"""
char = classes.Character('Human', None, ['Str', 'Dex', 'Con', 'Int',
'Wis', 'Cha'], classes.StatSelection.ROLL_4D6_DROP_ONE, classes.
HPSelection.ROLL_HP, classes.ASISelection.STRICT_FOCUS)
print(char.id)
print(char.stats)
char = classes.Character('Human', 'Variant', ['Str', 'Dex', 'Con',
'Int', 'Wis', 'Cha'], classes.StatSelection.ROLL_3D6, classes.
HPSelection.ROLL_HP, classes.ASISelection.FOCUS_ODD_TO_EVEN)
print(char.id)
print(char.stats)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def import_race_data(file_path):
"""
This method imports data from the inputed CSV and returns a dictionary containing
all of the data formated by race and subrace
Arguments:
:param import_data: (str) The filepath to the data
Returns:
dict: The dictionary of all of the data
"""
retval = {}
with open(file_path) as csv_file:
reader = csv.DictReader(csv_file)
for row in reader:
race = row['Race']
subrace = row['Subrace']
if subrace:
if race in retval:
if 'Subraces' not in retval[race]:
retval[race]['Subraces'] = {}
retval[race]['Subraces'][subrace] = row
else:
retval[race] = {'Subraces': {}}
retval[race]['Subraces'][subrace] = row
else:
retval[race] = row
return retval
def update_mode(args):
"""
This method is the main method for running this program in Update mode.
Update mode takes in a specifically formated XLSX file and outputs a JSON
file containing all of the data for races and subraces needed by the
program in run mode
Arguments:
:param args: (dict) A dictionary containing the needed arguments
Returns:
bool: Whether or not the update completed successfully or not
"""
try:
workbook = load_workbook(args['xlsx_file'])
except:
return False
df = DataFrame()
for name in workbook.sheetnames:
if 'Race' in name:
df = DataFrame(workbook[name].values)
if df.empty:
return False
df.drop(0, axis=0, inplace=True)
df.reset_index(inplace=True, drop=True)
end_col = (df.iloc[0, :].values == None).argmax()
df.drop(df.iloc[:, end_col:], axis=1, inplace=True)
df.columns = list(df.iloc[0, :])
df.drop(0, axis=0, inplace=True)
df.reset_index(inplace=True, drop=True)
end_row = (df.iloc[:, 0].values == None).argmax()
df.drop(df[end_row:].index, axis=0, inplace=True)
hyperlink_re = re.compile('(?<=,")(.+)(?=")')
df['Race'] = df['Race'].apply(lambda x: x if hyperlink_re.search(x) is
None else hyperlink_re.search(x).group(1))
df['Source'] = df['Source'].apply(lambda x: x if hyperlink_re.search(x) is
None else hyperlink_re.search(x).group(1))
data = {}
asi_re = re.compile('ASI: ([+-]\\d) \\(x(\\d)\\)(?:\\s{1}\\((.+)\\))?')
for index, row in df.iterrows():
row = dict(row)
race = row['Race']
subrace = row['Subrace']
if subrace:
if race in data:
if 'Subraces' not in data[race]:
data[race]['Subraces'] = {}
data[race]['Subraces'][subrace] = row
else:
data[race] = {'Subraces': {}}
data[race]['Subraces'][subrace] = row
else:
data[race] = row
if row['Additional'] is not None:
matches = asi_re.search(row['Additional'])
if matches:
asi = {'size': matches.group(1), 'number': matches.group(2)}
if matches.group(3):
if '-' in matches.group(3):
asi['not_allowed'] = matches.group(3).split('-')[1]
if '|' in matches.group(3):
asi['allowed'] = [x.capitalize() for x in matches.
group(3).split(' | ')]
if subrace:
data[race]['Subraces'][subrace]['ASI'] = asi
else:
data[race]['ASI'] = asi
with open('race_data.json', 'w') as fp:
json.dump(data, fp, indent=2)
return True
def run_mode(args):
"""
This method is the main method for running this program in Run mode.
This mode goes through the character simulation
Arguments:
:param args: (dict) A dictionary containing the needed arguments
"""
char = classes.Character('Human', None, ['Str', 'Dex', 'Con', 'Int',
'Wis', 'Cha'], classes.StatSelection.ROLL_4D6_DROP_ONE, classes.
HPSelection.ROLL_HP, classes.ASISelection.STRICT_FOCUS)
print(char.id)
print(char.stats)
char = classes.Character('Human', 'Variant', ['Str', 'Dex', 'Con',
'Int', 'Wis', 'Cha'], classes.StatSelection.ROLL_3D6, classes.
HPSelection.ROLL_HP, classes.ASISelection.FOCUS_ODD_TO_EVEN)
print(char.id)
print(char.stats)
if __name__ == '__main__':
main_parser = argparse.ArgumentParser(description='Character Simulator')
subparsers = main_parser.add_subparsers(help='Mode Help')
update_parser = subparsers.add_parser('update', help='Update Help')
update_parser.add_argument('xlsx_file', type=str, help=
'Path to the .xlsx race file')
run_parser = subparsers.add_parser('run', help='Run Help')
args = vars(main_parser.parse_args())
if 'xlsx_file' in args:
update_mode(args)
else:
run_mode(args)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import argparse
import csv
import json
import re
import time
from openpyxl import load_workbook
from pandas import DataFrame
from src import classes, util
def import_race_data(file_path):
"""
This method imports data from the inputed CSV and returns a dictionary containing
all of the data formated by race and subrace
Arguments:
:param import_data: (str) The filepath to the data
Returns:
dict: The dictionary of all of the data
"""
retval = {}
with open(file_path) as csv_file:
reader = csv.DictReader(csv_file)
for row in reader:
race = row['Race']
subrace = row['Subrace']
if subrace:
if race in retval:
if 'Subraces' not in retval[race]:
retval[race]['Subraces'] = {}
retval[race]['Subraces'][subrace] = row
else:
retval[race] = {'Subraces': {}}
retval[race]['Subraces'][subrace] = row
else:
retval[race] = row
return retval
def update_mode(args):
"""
This method is the main method for running this program in Update mode.
Update mode takes in a specifically formated XLSX file and outputs a JSON
file containing all of the data for races and subraces needed by the
program in run mode
Arguments:
:param args: (dict) A dictionary containing the needed arguments
Returns:
bool: Whether or not the update completed successfully or not
"""
try:
workbook = load_workbook(args['xlsx_file'])
except:
return False
df = DataFrame()
for name in workbook.sheetnames:
if 'Race' in name:
df = DataFrame(workbook[name].values)
if df.empty:
return False
df.drop(0, axis=0, inplace=True)
df.reset_index(inplace=True, drop=True)
end_col = (df.iloc[0, :].values == None).argmax()
df.drop(df.iloc[:, end_col:], axis=1, inplace=True)
df.columns = list(df.iloc[0, :])
df.drop(0, axis=0, inplace=True)
df.reset_index(inplace=True, drop=True)
end_row = (df.iloc[:, 0].values == None).argmax()
df.drop(df[end_row:].index, axis=0, inplace=True)
hyperlink_re = re.compile('(?<=,")(.+)(?=")')
df['Race'] = df['Race'].apply(lambda x: x if hyperlink_re.search(x) is
None else hyperlink_re.search(x).group(1))
df['Source'] = df['Source'].apply(lambda x: x if hyperlink_re.search(x) is
None else hyperlink_re.search(x).group(1))
data = {}
asi_re = re.compile('ASI: ([+-]\\d) \\(x(\\d)\\)(?:\\s{1}\\((.+)\\))?')
for index, row in df.iterrows():
row = dict(row)
race = row['Race']
subrace = row['Subrace']
if subrace:
if race in data:
if 'Subraces' not in data[race]:
data[race]['Subraces'] = {}
data[race]['Subraces'][subrace] = row
else:
data[race] = {'Subraces': {}}
data[race]['Subraces'][subrace] = row
else:
data[race] = row
if row['Additional'] is not None:
matches = asi_re.search(row['Additional'])
if matches:
asi = {'size': matches.group(1), 'number': matches.group(2)}
if matches.group(3):
if '-' in matches.group(3):
asi['not_allowed'] = matches.group(3).split('-')[1]
if '|' in matches.group(3):
asi['allowed'] = [x.capitalize() for x in matches.
group(3).split(' | ')]
if subrace:
data[race]['Subraces'][subrace]['ASI'] = asi
else:
data[race]['ASI'] = asi
with open('race_data.json', 'w') as fp:
json.dump(data, fp, indent=2)
return True
def run_mode(args):
"""
This method is the main method for running this program in Run mode.
This mode goes through the character simulation
Arguments:
:param args: (dict) A dictionary containing the needed arguments
"""
char = classes.Character('Human', None, ['Str', 'Dex', 'Con', 'Int',
'Wis', 'Cha'], classes.StatSelection.ROLL_4D6_DROP_ONE, classes.
HPSelection.ROLL_HP, classes.ASISelection.STRICT_FOCUS)
print(char.id)
print(char.stats)
char = classes.Character('Human', 'Variant', ['Str', 'Dex', 'Con',
'Int', 'Wis', 'Cha'], classes.StatSelection.ROLL_3D6, classes.
HPSelection.ROLL_HP, classes.ASISelection.FOCUS_ODD_TO_EVEN)
print(char.id)
print(char.stats)
if __name__ == '__main__':
main_parser = argparse.ArgumentParser(description='Character Simulator')
subparsers = main_parser.add_subparsers(help='Mode Help')
update_parser = subparsers.add_parser('update', help='Update Help')
update_parser.add_argument('xlsx_file', type=str, help=
'Path to the .xlsx race file')
run_parser = subparsers.add_parser('run', help='Run Help')
args = vars(main_parser.parse_args())
if 'xlsx_file' in args:
update_mode(args)
else:
run_mode(args)
<|reserved_special_token_1|>
'''
This program will simulate leveling a DnD character, showing their ending HP, and stats.
'''
import argparse
import csv
import json
import re
import time
from openpyxl import load_workbook
from pandas import DataFrame
from src import classes, util
def import_race_data(file_path):
'''
This method imports data from the inputed CSV and returns a dictionary containing
all of the data formated by race and subrace
Arguments:
:param import_data: (str) The filepath to the data
Returns:
dict: The dictionary of all of the data
'''
retval = {}
# Open csv file and read in all data
with open(file_path) as csv_file:
reader = csv.DictReader(csv_file)
for row in reader:
race = row['Race']
subrace = row['Subrace']
if(subrace):
if(race in retval):
if('Subraces' not in retval[race]):
retval[race]['Subraces'] = {}
retval[race]['Subraces'][subrace] = row
else:
retval[race] = {'Subraces':{}}
retval[race]['Subraces'][subrace] = row
else:
retval[race] = row
return retval
def update_mode(args):
'''
This method is the main method for running this program in Update mode.
Update mode takes in a specifically formated XLSX file and outputs a JSON
file containing all of the data for races and subraces needed by the
program in run mode
Arguments:
:param args: (dict) A dictionary containing the needed arguments
Returns:
bool: Whether or not the update completed successfully or not
'''
# Lets first open the workbook
try:
workbook = load_workbook(args['xlsx_file'])
except:
return False
# Now turn the Race sheet into a dataframe
df = DataFrame()
for name in workbook.sheetnames:
if('Race' in name):
df = DataFrame(workbook[name].values)
# If we find nothing, return failure
if(df.empty):
return False
# Lets remove the title row
df.drop(0, axis=0, inplace=True)
df.reset_index(inplace=True, drop=True)
# Now lets get the headers, find the last column, and remove this row
end_col = (df.iloc[0, :].values == None).argmax()
df.drop(df.iloc[:, end_col:], axis=1, inplace=True)
df.columns = list(df.iloc[0, :])
df.drop(0, axis=0, inplace=True)
df.reset_index(inplace=True, drop=True)
# Now lets resize this dataframe to only contain the information we want
# We first scroll down the rows to find the first blank cell, that is the
# end of the rows
end_row = (df.iloc[:, 0].values == None).argmax()
df.drop(df[end_row:].index, axis=0, inplace=True)
# Now let's get the race names and source names
hyperlink_re = re.compile(r'(?<=,")(.+)(?=")')
df['Race'] = df['Race'].apply(
lambda x: x if hyperlink_re.search(x) is None else hyperlink_re.search(x).group(1)
)
df['Source'] = df['Source'].apply(
lambda x: x if hyperlink_re.search(x) is None else hyperlink_re.search(x).group(1)
)
# Now make sure the stat fields are correct integers
# Loop through dataframe so we can assemble the json in the format we want
data = {}
asi_re = re.compile(r'ASI: ([+-]\d) \(x(\d)\)(?:\s{1}\((.+)\))?')
for index, row in df.iterrows():
# First lets index this record into the correct spot in the array
row = dict(row)
race = row['Race']
subrace = row['Subrace']
if(subrace):
if(race in data):
if('Subraces' not in data[race]):
data[race]['Subraces'] = {}
data[race]['Subraces'][subrace] = row
else:
data[race] = {'Subraces':{}}
data[race]['Subraces'][subrace] = row
else:
data[race] = row
# Now that we have added this row, check if there are any special ASI rules to note
if(row['Additional'] is not None):
matches = asi_re.search(row['Additional'])
if(matches):
# We found something
asi = {'size': matches.group(1), 'number': matches.group(2)}
# Check if we have restrictions
if(matches.group(3)):
# We either can put the point into a number of options, or not
# into one stat
if('-' in matches.group(3)):
# We cannot use this stat
asi['not_allowed'] = matches.group(3).split('-')[1]
if('|' in matches.group(3)):
# We can only use one or the other
asi['allowed'] = [x.capitalize() for x in matches.group(3).split(' | ')]
# Now add this to the row of data
if(subrace):
data[race]['Subraces'][subrace]['ASI'] = asi
else:
data[race]['ASI'] = asi
# Done! Let's dump this file
with open('race_data.json', 'w') as fp:
json.dump(data, fp, indent=2)
return True
def run_mode(args):
'''
This method is the main method for running this program in Run mode.
This mode goes through the character simulation
Arguments:
:param args: (dict) A dictionary containing the needed arguments
'''
char = classes.Character(
"Human", None, ['Str','Dex','Con','Int','Wis','Cha'],
classes.StatSelection.ROLL_4D6_DROP_ONE, classes.HPSelection.ROLL_HP,
classes.ASISelection.STRICT_FOCUS
)
print(char.id)
print(char.stats)
char = classes.Character(
"Human", "Variant", ['Str','Dex','Con','Int','Wis','Cha'],
classes.StatSelection.ROLL_3D6, classes.HPSelection.ROLL_HP,
classes.ASISelection.FOCUS_ODD_TO_EVEN
)
print(char.id)
print(char.stats)
if __name__ == "__main__":
# Setup argument parsers and parse arguments
main_parser = argparse.ArgumentParser(description='Character Simulator')
subparsers = main_parser.add_subparsers(help='Mode Help')
update_parser = subparsers.add_parser('update', help='Update Help')
update_parser.add_argument('xlsx_file', type=str, help='Path to the .xlsx race file')
run_parser = subparsers.add_parser('run', help='Run Help')
args = vars(main_parser.parse_args())
# If we are in update mode, update the json file
if('xlsx_file' in args):
update_mode(args)
else:
run_mode(args)
|
flexible
|
{
"blob_id": "022c8d6c31ad5494b03bfe93d17396eac25b011e",
"index": 8706,
"step-1": "<mask token>\n\n\ndef update_mode(args):\n \"\"\"\n This method is the main method for running this program in Update mode.\n\n Update mode takes in a specifically formated XLSX file and outputs a JSON\n file containing all of the data for races and subraces needed by the\n program in run mode\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n\n Returns:\n bool: Whether or not the update completed successfully or not\n \"\"\"\n try:\n workbook = load_workbook(args['xlsx_file'])\n except:\n return False\n df = DataFrame()\n for name in workbook.sheetnames:\n if 'Race' in name:\n df = DataFrame(workbook[name].values)\n if df.empty:\n return False\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n end_col = (df.iloc[0, :].values == None).argmax()\n df.drop(df.iloc[:, end_col:], axis=1, inplace=True)\n df.columns = list(df.iloc[0, :])\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n end_row = (df.iloc[:, 0].values == None).argmax()\n df.drop(df[end_row:].index, axis=0, inplace=True)\n hyperlink_re = re.compile('(?<=,\")(.+)(?=\")')\n df['Race'] = df['Race'].apply(lambda x: x if hyperlink_re.search(x) is\n None else hyperlink_re.search(x).group(1))\n df['Source'] = df['Source'].apply(lambda x: x if hyperlink_re.search(x) is\n None else hyperlink_re.search(x).group(1))\n data = {}\n asi_re = re.compile('ASI: ([+-]\\\\d) \\\\(x(\\\\d)\\\\)(?:\\\\s{1}\\\\((.+)\\\\))?')\n for index, row in df.iterrows():\n row = dict(row)\n race = row['Race']\n subrace = row['Subrace']\n if subrace:\n if race in data:\n if 'Subraces' not in data[race]:\n data[race]['Subraces'] = {}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = {'Subraces': {}}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = row\n if row['Additional'] is not None:\n matches = asi_re.search(row['Additional'])\n if matches:\n asi = {'size': matches.group(1), 'number': matches.group(2)}\n if matches.group(3):\n if '-' in matches.group(3):\n asi['not_allowed'] = matches.group(3).split('-')[1]\n if '|' in matches.group(3):\n asi['allowed'] = [x.capitalize() for x in matches.\n group(3).split(' | ')]\n if subrace:\n data[race]['Subraces'][subrace]['ASI'] = asi\n else:\n data[race]['ASI'] = asi\n with open('race_data.json', 'w') as fp:\n json.dump(data, fp, indent=2)\n return True\n\n\ndef run_mode(args):\n \"\"\"\n This method is the main method for running this program in Run mode.\n\n This mode goes through the character simulation\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n \"\"\"\n char = classes.Character('Human', None, ['Str', 'Dex', 'Con', 'Int',\n 'Wis', 'Cha'], classes.StatSelection.ROLL_4D6_DROP_ONE, classes.\n HPSelection.ROLL_HP, classes.ASISelection.STRICT_FOCUS)\n print(char.id)\n print(char.stats)\n char = classes.Character('Human', 'Variant', ['Str', 'Dex', 'Con',\n 'Int', 'Wis', 'Cha'], classes.StatSelection.ROLL_3D6, classes.\n HPSelection.ROLL_HP, classes.ASISelection.FOCUS_ODD_TO_EVEN)\n print(char.id)\n print(char.stats)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef import_race_data(file_path):\n \"\"\"\n This method imports data from the inputed CSV and returns a dictionary containing\n all of the data formated by race and subrace\n\n Arguments:\n :param import_data: (str) The filepath to the data\n\n Returns:\n dict: The dictionary of all of the data\n \"\"\"\n retval = {}\n with open(file_path) as csv_file:\n reader = csv.DictReader(csv_file)\n for row in reader:\n race = row['Race']\n subrace = row['Subrace']\n if subrace:\n if race in retval:\n if 'Subraces' not in retval[race]:\n retval[race]['Subraces'] = {}\n retval[race]['Subraces'][subrace] = row\n else:\n retval[race] = {'Subraces': {}}\n retval[race]['Subraces'][subrace] = row\n else:\n retval[race] = row\n return retval\n\n\ndef update_mode(args):\n \"\"\"\n This method is the main method for running this program in Update mode.\n\n Update mode takes in a specifically formated XLSX file and outputs a JSON\n file containing all of the data for races and subraces needed by the\n program in run mode\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n\n Returns:\n bool: Whether or not the update completed successfully or not\n \"\"\"\n try:\n workbook = load_workbook(args['xlsx_file'])\n except:\n return False\n df = DataFrame()\n for name in workbook.sheetnames:\n if 'Race' in name:\n df = DataFrame(workbook[name].values)\n if df.empty:\n return False\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n end_col = (df.iloc[0, :].values == None).argmax()\n df.drop(df.iloc[:, end_col:], axis=1, inplace=True)\n df.columns = list(df.iloc[0, :])\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n end_row = (df.iloc[:, 0].values == None).argmax()\n df.drop(df[end_row:].index, axis=0, inplace=True)\n hyperlink_re = re.compile('(?<=,\")(.+)(?=\")')\n df['Race'] = df['Race'].apply(lambda x: x if hyperlink_re.search(x) is\n None else hyperlink_re.search(x).group(1))\n df['Source'] = df['Source'].apply(lambda x: x if hyperlink_re.search(x) is\n None else hyperlink_re.search(x).group(1))\n data = {}\n asi_re = re.compile('ASI: ([+-]\\\\d) \\\\(x(\\\\d)\\\\)(?:\\\\s{1}\\\\((.+)\\\\))?')\n for index, row in df.iterrows():\n row = dict(row)\n race = row['Race']\n subrace = row['Subrace']\n if subrace:\n if race in data:\n if 'Subraces' not in data[race]:\n data[race]['Subraces'] = {}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = {'Subraces': {}}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = row\n if row['Additional'] is not None:\n matches = asi_re.search(row['Additional'])\n if matches:\n asi = {'size': matches.group(1), 'number': matches.group(2)}\n if matches.group(3):\n if '-' in matches.group(3):\n asi['not_allowed'] = matches.group(3).split('-')[1]\n if '|' in matches.group(3):\n asi['allowed'] = [x.capitalize() for x in matches.\n group(3).split(' | ')]\n if subrace:\n data[race]['Subraces'][subrace]['ASI'] = asi\n else:\n data[race]['ASI'] = asi\n with open('race_data.json', 'w') as fp:\n json.dump(data, fp, indent=2)\n return True\n\n\ndef run_mode(args):\n \"\"\"\n This method is the main method for running this program in Run mode.\n\n This mode goes through the character simulation\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n \"\"\"\n char = classes.Character('Human', None, ['Str', 'Dex', 'Con', 'Int',\n 'Wis', 'Cha'], classes.StatSelection.ROLL_4D6_DROP_ONE, classes.\n HPSelection.ROLL_HP, classes.ASISelection.STRICT_FOCUS)\n print(char.id)\n print(char.stats)\n char = classes.Character('Human', 'Variant', ['Str', 'Dex', 'Con',\n 'Int', 'Wis', 'Cha'], classes.StatSelection.ROLL_3D6, classes.\n HPSelection.ROLL_HP, classes.ASISelection.FOCUS_ODD_TO_EVEN)\n print(char.id)\n print(char.stats)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef import_race_data(file_path):\n \"\"\"\n This method imports data from the inputed CSV and returns a dictionary containing\n all of the data formated by race and subrace\n\n Arguments:\n :param import_data: (str) The filepath to the data\n\n Returns:\n dict: The dictionary of all of the data\n \"\"\"\n retval = {}\n with open(file_path) as csv_file:\n reader = csv.DictReader(csv_file)\n for row in reader:\n race = row['Race']\n subrace = row['Subrace']\n if subrace:\n if race in retval:\n if 'Subraces' not in retval[race]:\n retval[race]['Subraces'] = {}\n retval[race]['Subraces'][subrace] = row\n else:\n retval[race] = {'Subraces': {}}\n retval[race]['Subraces'][subrace] = row\n else:\n retval[race] = row\n return retval\n\n\ndef update_mode(args):\n \"\"\"\n This method is the main method for running this program in Update mode.\n\n Update mode takes in a specifically formated XLSX file and outputs a JSON\n file containing all of the data for races and subraces needed by the\n program in run mode\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n\n Returns:\n bool: Whether or not the update completed successfully or not\n \"\"\"\n try:\n workbook = load_workbook(args['xlsx_file'])\n except:\n return False\n df = DataFrame()\n for name in workbook.sheetnames:\n if 'Race' in name:\n df = DataFrame(workbook[name].values)\n if df.empty:\n return False\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n end_col = (df.iloc[0, :].values == None).argmax()\n df.drop(df.iloc[:, end_col:], axis=1, inplace=True)\n df.columns = list(df.iloc[0, :])\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n end_row = (df.iloc[:, 0].values == None).argmax()\n df.drop(df[end_row:].index, axis=0, inplace=True)\n hyperlink_re = re.compile('(?<=,\")(.+)(?=\")')\n df['Race'] = df['Race'].apply(lambda x: x if hyperlink_re.search(x) is\n None else hyperlink_re.search(x).group(1))\n df['Source'] = df['Source'].apply(lambda x: x if hyperlink_re.search(x) is\n None else hyperlink_re.search(x).group(1))\n data = {}\n asi_re = re.compile('ASI: ([+-]\\\\d) \\\\(x(\\\\d)\\\\)(?:\\\\s{1}\\\\((.+)\\\\))?')\n for index, row in df.iterrows():\n row = dict(row)\n race = row['Race']\n subrace = row['Subrace']\n if subrace:\n if race in data:\n if 'Subraces' not in data[race]:\n data[race]['Subraces'] = {}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = {'Subraces': {}}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = row\n if row['Additional'] is not None:\n matches = asi_re.search(row['Additional'])\n if matches:\n asi = {'size': matches.group(1), 'number': matches.group(2)}\n if matches.group(3):\n if '-' in matches.group(3):\n asi['not_allowed'] = matches.group(3).split('-')[1]\n if '|' in matches.group(3):\n asi['allowed'] = [x.capitalize() for x in matches.\n group(3).split(' | ')]\n if subrace:\n data[race]['Subraces'][subrace]['ASI'] = asi\n else:\n data[race]['ASI'] = asi\n with open('race_data.json', 'w') as fp:\n json.dump(data, fp, indent=2)\n return True\n\n\ndef run_mode(args):\n \"\"\"\n This method is the main method for running this program in Run mode.\n\n This mode goes through the character simulation\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n \"\"\"\n char = classes.Character('Human', None, ['Str', 'Dex', 'Con', 'Int',\n 'Wis', 'Cha'], classes.StatSelection.ROLL_4D6_DROP_ONE, classes.\n HPSelection.ROLL_HP, classes.ASISelection.STRICT_FOCUS)\n print(char.id)\n print(char.stats)\n char = classes.Character('Human', 'Variant', ['Str', 'Dex', 'Con',\n 'Int', 'Wis', 'Cha'], classes.StatSelection.ROLL_3D6, classes.\n HPSelection.ROLL_HP, classes.ASISelection.FOCUS_ODD_TO_EVEN)\n print(char.id)\n print(char.stats)\n\n\nif __name__ == '__main__':\n main_parser = argparse.ArgumentParser(description='Character Simulator')\n subparsers = main_parser.add_subparsers(help='Mode Help')\n update_parser = subparsers.add_parser('update', help='Update Help')\n update_parser.add_argument('xlsx_file', type=str, help=\n 'Path to the .xlsx race file')\n run_parser = subparsers.add_parser('run', help='Run Help')\n args = vars(main_parser.parse_args())\n if 'xlsx_file' in args:\n update_mode(args)\n else:\n run_mode(args)\n",
"step-4": "<mask token>\nimport argparse\nimport csv\nimport json\nimport re\nimport time\nfrom openpyxl import load_workbook\nfrom pandas import DataFrame\nfrom src import classes, util\n\n\ndef import_race_data(file_path):\n \"\"\"\n This method imports data from the inputed CSV and returns a dictionary containing\n all of the data formated by race and subrace\n\n Arguments:\n :param import_data: (str) The filepath to the data\n\n Returns:\n dict: The dictionary of all of the data\n \"\"\"\n retval = {}\n with open(file_path) as csv_file:\n reader = csv.DictReader(csv_file)\n for row in reader:\n race = row['Race']\n subrace = row['Subrace']\n if subrace:\n if race in retval:\n if 'Subraces' not in retval[race]:\n retval[race]['Subraces'] = {}\n retval[race]['Subraces'][subrace] = row\n else:\n retval[race] = {'Subraces': {}}\n retval[race]['Subraces'][subrace] = row\n else:\n retval[race] = row\n return retval\n\n\ndef update_mode(args):\n \"\"\"\n This method is the main method for running this program in Update mode.\n\n Update mode takes in a specifically formated XLSX file and outputs a JSON\n file containing all of the data for races and subraces needed by the\n program in run mode\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n\n Returns:\n bool: Whether or not the update completed successfully or not\n \"\"\"\n try:\n workbook = load_workbook(args['xlsx_file'])\n except:\n return False\n df = DataFrame()\n for name in workbook.sheetnames:\n if 'Race' in name:\n df = DataFrame(workbook[name].values)\n if df.empty:\n return False\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n end_col = (df.iloc[0, :].values == None).argmax()\n df.drop(df.iloc[:, end_col:], axis=1, inplace=True)\n df.columns = list(df.iloc[0, :])\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n end_row = (df.iloc[:, 0].values == None).argmax()\n df.drop(df[end_row:].index, axis=0, inplace=True)\n hyperlink_re = re.compile('(?<=,\")(.+)(?=\")')\n df['Race'] = df['Race'].apply(lambda x: x if hyperlink_re.search(x) is\n None else hyperlink_re.search(x).group(1))\n df['Source'] = df['Source'].apply(lambda x: x if hyperlink_re.search(x) is\n None else hyperlink_re.search(x).group(1))\n data = {}\n asi_re = re.compile('ASI: ([+-]\\\\d) \\\\(x(\\\\d)\\\\)(?:\\\\s{1}\\\\((.+)\\\\))?')\n for index, row in df.iterrows():\n row = dict(row)\n race = row['Race']\n subrace = row['Subrace']\n if subrace:\n if race in data:\n if 'Subraces' not in data[race]:\n data[race]['Subraces'] = {}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = {'Subraces': {}}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = row\n if row['Additional'] is not None:\n matches = asi_re.search(row['Additional'])\n if matches:\n asi = {'size': matches.group(1), 'number': matches.group(2)}\n if matches.group(3):\n if '-' in matches.group(3):\n asi['not_allowed'] = matches.group(3).split('-')[1]\n if '|' in matches.group(3):\n asi['allowed'] = [x.capitalize() for x in matches.\n group(3).split(' | ')]\n if subrace:\n data[race]['Subraces'][subrace]['ASI'] = asi\n else:\n data[race]['ASI'] = asi\n with open('race_data.json', 'w') as fp:\n json.dump(data, fp, indent=2)\n return True\n\n\ndef run_mode(args):\n \"\"\"\n This method is the main method for running this program in Run mode.\n\n This mode goes through the character simulation\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n \"\"\"\n char = classes.Character('Human', None, ['Str', 'Dex', 'Con', 'Int',\n 'Wis', 'Cha'], classes.StatSelection.ROLL_4D6_DROP_ONE, classes.\n HPSelection.ROLL_HP, classes.ASISelection.STRICT_FOCUS)\n print(char.id)\n print(char.stats)\n char = classes.Character('Human', 'Variant', ['Str', 'Dex', 'Con',\n 'Int', 'Wis', 'Cha'], classes.StatSelection.ROLL_3D6, classes.\n HPSelection.ROLL_HP, classes.ASISelection.FOCUS_ODD_TO_EVEN)\n print(char.id)\n print(char.stats)\n\n\nif __name__ == '__main__':\n main_parser = argparse.ArgumentParser(description='Character Simulator')\n subparsers = main_parser.add_subparsers(help='Mode Help')\n update_parser = subparsers.add_parser('update', help='Update Help')\n update_parser.add_argument('xlsx_file', type=str, help=\n 'Path to the .xlsx race file')\n run_parser = subparsers.add_parser('run', help='Run Help')\n args = vars(main_parser.parse_args())\n if 'xlsx_file' in args:\n update_mode(args)\n else:\n run_mode(args)\n",
"step-5": "'''\nThis program will simulate leveling a DnD character, showing their ending HP, and stats.\n'''\nimport argparse\nimport csv\nimport json\nimport re\nimport time\nfrom openpyxl import load_workbook\nfrom pandas import DataFrame\nfrom src import classes, util\n\n\ndef import_race_data(file_path):\n '''\n This method imports data from the inputed CSV and returns a dictionary containing\n all of the data formated by race and subrace\n\n Arguments:\n :param import_data: (str) The filepath to the data\n\n Returns:\n dict: The dictionary of all of the data\n '''\n retval = {}\n\n # Open csv file and read in all data\n with open(file_path) as csv_file:\n reader = csv.DictReader(csv_file)\n for row in reader:\n race = row['Race']\n subrace = row['Subrace']\n\n if(subrace):\n if(race in retval):\n if('Subraces' not in retval[race]):\n retval[race]['Subraces'] = {}\n retval[race]['Subraces'][subrace] = row\n else:\n retval[race] = {'Subraces':{}}\n retval[race]['Subraces'][subrace] = row\n else:\n retval[race] = row\n\n return retval\n\ndef update_mode(args):\n '''\n This method is the main method for running this program in Update mode.\n\n Update mode takes in a specifically formated XLSX file and outputs a JSON\n file containing all of the data for races and subraces needed by the\n program in run mode\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n\n Returns:\n bool: Whether or not the update completed successfully or not\n '''\n # Lets first open the workbook\n try:\n workbook = load_workbook(args['xlsx_file'])\n except:\n return False\n\n # Now turn the Race sheet into a dataframe\n df = DataFrame()\n for name in workbook.sheetnames:\n if('Race' in name):\n df = DataFrame(workbook[name].values)\n\n # If we find nothing, return failure\n if(df.empty):\n return False\n\n # Lets remove the title row\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n\n # Now lets get the headers, find the last column, and remove this row\n end_col = (df.iloc[0, :].values == None).argmax()\n df.drop(df.iloc[:, end_col:], axis=1, inplace=True)\n df.columns = list(df.iloc[0, :])\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n\n # Now lets resize this dataframe to only contain the information we want\n # We first scroll down the rows to find the first blank cell, that is the\n # end of the rows\n end_row = (df.iloc[:, 0].values == None).argmax()\n df.drop(df[end_row:].index, axis=0, inplace=True)\n\n # Now let's get the race names and source names\n hyperlink_re = re.compile(r'(?<=,\")(.+)(?=\")')\n df['Race'] = df['Race'].apply(\n lambda x: x if hyperlink_re.search(x) is None else hyperlink_re.search(x).group(1)\n )\n df['Source'] = df['Source'].apply(\n lambda x: x if hyperlink_re.search(x) is None else hyperlink_re.search(x).group(1)\n )\n\n # Now make sure the stat fields are correct integers\n\n # Loop through dataframe so we can assemble the json in the format we want\n data = {}\n asi_re = re.compile(r'ASI: ([+-]\\d) \\(x(\\d)\\)(?:\\s{1}\\((.+)\\))?')\n for index, row in df.iterrows():\n # First lets index this record into the correct spot in the array\n row = dict(row)\n race = row['Race']\n subrace = row['Subrace']\n\n if(subrace):\n if(race in data):\n if('Subraces' not in data[race]):\n data[race]['Subraces'] = {}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = {'Subraces':{}}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = row\n\n # Now that we have added this row, check if there are any special ASI rules to note\n if(row['Additional'] is not None):\n matches = asi_re.search(row['Additional'])\n if(matches):\n # We found something\n asi = {'size': matches.group(1), 'number': matches.group(2)}\n\n # Check if we have restrictions\n if(matches.group(3)):\n # We either can put the point into a number of options, or not\n # into one stat\n if('-' in matches.group(3)):\n # We cannot use this stat\n asi['not_allowed'] = matches.group(3).split('-')[1]\n\n if('|' in matches.group(3)):\n # We can only use one or the other\n asi['allowed'] = [x.capitalize() for x in matches.group(3).split(' | ')]\n \n # Now add this to the row of data\n if(subrace):\n data[race]['Subraces'][subrace]['ASI'] = asi\n else:\n data[race]['ASI'] = asi\n\n # Done! Let's dump this file\n with open('race_data.json', 'w') as fp:\n json.dump(data, fp, indent=2)\n\n return True\n\ndef run_mode(args):\n '''\n This method is the main method for running this program in Run mode.\n\n This mode goes through the character simulation\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n '''\n char = classes.Character(\n \"Human\", None, ['Str','Dex','Con','Int','Wis','Cha'], \n classes.StatSelection.ROLL_4D6_DROP_ONE, classes.HPSelection.ROLL_HP,\n classes.ASISelection.STRICT_FOCUS\n )\n print(char.id)\n print(char.stats)\n char = classes.Character(\n \"Human\", \"Variant\", ['Str','Dex','Con','Int','Wis','Cha'], \n classes.StatSelection.ROLL_3D6, classes.HPSelection.ROLL_HP,\n classes.ASISelection.FOCUS_ODD_TO_EVEN\n )\n print(char.id)\n print(char.stats)\n\n\nif __name__ == \"__main__\":\n # Setup argument parsers and parse arguments\n main_parser = argparse.ArgumentParser(description='Character Simulator')\n subparsers = main_parser.add_subparsers(help='Mode Help')\n\n update_parser = subparsers.add_parser('update', help='Update Help')\n update_parser.add_argument('xlsx_file', type=str, help='Path to the .xlsx race file')\n\n run_parser = subparsers.add_parser('run', help='Run Help')\n\n args = vars(main_parser.parse_args())\n\n # If we are in update mode, update the json file\n if('xlsx_file' in args):\n update_mode(args)\n else:\n run_mode(args)",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import requests
import json
data = json.load(open("dummy_data/data.json"))
for one in data:
print(one)
r = requests.post("http://localhost:8080/sumari", json=one)
print(r.text)
|
normal
|
{
"blob_id": "8bc40ed4fe1091ecdb40cd55ff9cf53010078823",
"index": 361,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor one in data:\n print(one)\n r = requests.post('http://localhost:8080/sumari', json=one)\n print(r.text)\n",
"step-3": "<mask token>\ndata = json.load(open('dummy_data/data.json'))\nfor one in data:\n print(one)\n r = requests.post('http://localhost:8080/sumari', json=one)\n print(r.text)\n",
"step-4": "import requests\nimport json\ndata = json.load(open('dummy_data/data.json'))\nfor one in data:\n print(one)\n r = requests.post('http://localhost:8080/sumari', json=one)\n print(r.text)\n",
"step-5": "import requests\nimport json\n\ndata = json.load(open(\"dummy_data/data.json\"))\n\nfor one in data:\n print(one)\n r = requests.post(\"http://localhost:8080/sumari\", json=one)\n print(r.text)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os
# didnt endup using this
import time
# from django.contrib.gis.utils import LayerMapping
from django.contrib.gis.geos import fromstr
# from models import Harbord
import csv
from pygeocoder import Geocoder
# from django.contrib.gis.geos import (Point, fromstr, fromfile,
# GEOSGeometry, MultiPoint, MultiPolygon, Polygon)
tree_csv = os.path.abspath('../harbordvillage/Inventory2009_test.csv')
#Setup
with open(tree_csv, "rU") as csvinput:
with open("../harbordvillage/outfile.csv","w+") as csvoutput:
writer = csv.writer(csvoutput,quoting=csv.QUOTE_NONNUMERIC)
reader = csv.reader(csvinput)
all = []
row = next(reader)
row.append('Address')
all.append(row)
for row in reader:
add=("%s %s %s %s" % (row[1], row[0], 'Toronto', 'Canada'))
# pygeocode stuff
# time.sleep(1)
results = Geocoder.geocode(add)
row[0] = results.route
# print(isinstance(results, basestring))
ind = results[0].coordinates
lat=ind[0]
lon=ind[1]
ind= str(lat) + ' ' + str(lon)
print(ind)
mypoint = fromstr('POINT('+ ind + ')')
# print(type(mypoint))
try:
row.append(mypoint)
except:
pass
all.append(row)
print(row)
# row.append(results.cooridnates)
# print(row)
writer.writerows(all)
|
normal
|
{
"blob_id": "40b9114e4348bab5d76d68a937b3abe95a90c230",
"index": 4130,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open(tree_csv, 'rU') as csvinput:\n with open('../harbordvillage/outfile.csv', 'w+') as csvoutput:\n writer = csv.writer(csvoutput, quoting=csv.QUOTE_NONNUMERIC)\n reader = csv.reader(csvinput)\n all = []\n row = next(reader)\n row.append('Address')\n all.append(row)\n for row in reader:\n add = '%s %s %s %s' % (row[1], row[0], 'Toronto', 'Canada')\n results = Geocoder.geocode(add)\n row[0] = results.route\n ind = results[0].coordinates\n lat = ind[0]\n lon = ind[1]\n ind = str(lat) + ' ' + str(lon)\n print(ind)\n mypoint = fromstr('POINT(' + ind + ')')\n try:\n row.append(mypoint)\n except:\n pass\n all.append(row)\n print(row)\n writer.writerows(all)\n",
"step-3": "<mask token>\ntree_csv = os.path.abspath('../harbordvillage/Inventory2009_test.csv')\nwith open(tree_csv, 'rU') as csvinput:\n with open('../harbordvillage/outfile.csv', 'w+') as csvoutput:\n writer = csv.writer(csvoutput, quoting=csv.QUOTE_NONNUMERIC)\n reader = csv.reader(csvinput)\n all = []\n row = next(reader)\n row.append('Address')\n all.append(row)\n for row in reader:\n add = '%s %s %s %s' % (row[1], row[0], 'Toronto', 'Canada')\n results = Geocoder.geocode(add)\n row[0] = results.route\n ind = results[0].coordinates\n lat = ind[0]\n lon = ind[1]\n ind = str(lat) + ' ' + str(lon)\n print(ind)\n mypoint = fromstr('POINT(' + ind + ')')\n try:\n row.append(mypoint)\n except:\n pass\n all.append(row)\n print(row)\n writer.writerows(all)\n",
"step-4": "import os\nimport time\nfrom django.contrib.gis.geos import fromstr\nimport csv\nfrom pygeocoder import Geocoder\ntree_csv = os.path.abspath('../harbordvillage/Inventory2009_test.csv')\nwith open(tree_csv, 'rU') as csvinput:\n with open('../harbordvillage/outfile.csv', 'w+') as csvoutput:\n writer = csv.writer(csvoutput, quoting=csv.QUOTE_NONNUMERIC)\n reader = csv.reader(csvinput)\n all = []\n row = next(reader)\n row.append('Address')\n all.append(row)\n for row in reader:\n add = '%s %s %s %s' % (row[1], row[0], 'Toronto', 'Canada')\n results = Geocoder.geocode(add)\n row[0] = results.route\n ind = results[0].coordinates\n lat = ind[0]\n lon = ind[1]\n ind = str(lat) + ' ' + str(lon)\n print(ind)\n mypoint = fromstr('POINT(' + ind + ')')\n try:\n row.append(mypoint)\n except:\n pass\n all.append(row)\n print(row)\n writer.writerows(all)\n",
"step-5": "import os\n# didnt endup using this\n\nimport time\n# from django.contrib.gis.utils import LayerMapping\nfrom django.contrib.gis.geos import fromstr\n# from models import Harbord\n\nimport csv\nfrom pygeocoder import Geocoder\n# from django.contrib.gis.geos import (Point, fromstr, fromfile, \n# GEOSGeometry, MultiPoint, MultiPolygon, Polygon)\n\n\ntree_csv = os.path.abspath('../harbordvillage/Inventory2009_test.csv')\n\n #Setup\nwith open(tree_csv, \"rU\") as csvinput:\n with open(\"../harbordvillage/outfile.csv\",\"w+\") as csvoutput:\n writer = csv.writer(csvoutput,quoting=csv.QUOTE_NONNUMERIC)\n reader = csv.reader(csvinput)\n\n all = []\n row = next(reader)\n row.append('Address')\n all.append(row)\n\n for row in reader:\n add=(\"%s %s %s %s\" % (row[1], row[0], 'Toronto', 'Canada'))\n\n # pygeocode stuff\n # time.sleep(1)\n results = Geocoder.geocode(add)\n row[0] = results.route\n # print(isinstance(results, basestring))\n ind = results[0].coordinates\n lat=ind[0]\n lon=ind[1]\n ind= str(lat) + ' ' + str(lon)\n print(ind)\n mypoint = fromstr('POINT('+ ind + ')')\n # print(type(mypoint))\n try:\n row.append(mypoint)\n except:\n pass\n\n\n all.append(row)\n print(row)\n # row.append(results.cooridnates)\n # print(row)\n\n writer.writerows(all)\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from CategoryReplacer.CategoryReplcaers import CountEncoder
from CategoryReplacer.CategoryReplcaers import CombinCountEncoder
from CategoryReplacer.CategoryReplcaers import FrequencyEncoder
from CategoryReplacer.CategoryReplcaers import NullCounter
from CategoryReplacer.CategoryReplcaers import AutoCalcEncoder
from CategoryReplacer.CategoryReplcaers import extract_obj_cols
__all__ = [
"CountEncoder",
"CombinCountEncoder",
"FrequencyEncoder",
"NullCounter",
"AutoCalcEncoder",
"extract_obj_cols"
]
|
normal
|
{
"blob_id": "d28e517e72c3689e973a5b1255d414648de418fb",
"index": 1658,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n__all__ = ['CountEncoder', 'CombinCountEncoder', 'FrequencyEncoder',\n 'NullCounter', 'AutoCalcEncoder', 'extract_obj_cols']\n",
"step-3": "from CategoryReplacer.CategoryReplcaers import CountEncoder\nfrom CategoryReplacer.CategoryReplcaers import CombinCountEncoder\nfrom CategoryReplacer.CategoryReplcaers import FrequencyEncoder\nfrom CategoryReplacer.CategoryReplcaers import NullCounter\nfrom CategoryReplacer.CategoryReplcaers import AutoCalcEncoder\nfrom CategoryReplacer.CategoryReplcaers import extract_obj_cols\n__all__ = ['CountEncoder', 'CombinCountEncoder', 'FrequencyEncoder',\n 'NullCounter', 'AutoCalcEncoder', 'extract_obj_cols']\n",
"step-4": "from CategoryReplacer.CategoryReplcaers import CountEncoder\nfrom CategoryReplacer.CategoryReplcaers import CombinCountEncoder\nfrom CategoryReplacer.CategoryReplcaers import FrequencyEncoder\nfrom CategoryReplacer.CategoryReplcaers import NullCounter\nfrom CategoryReplacer.CategoryReplcaers import AutoCalcEncoder\nfrom CategoryReplacer.CategoryReplcaers import extract_obj_cols\n\n__all__ = [\n \"CountEncoder\",\n \"CombinCountEncoder\",\n \"FrequencyEncoder\",\n \"NullCounter\",\n \"AutoCalcEncoder\",\n \"extract_obj_cols\"\n]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from numpy import empty
import pickle
from dataset import Dataset
from image import Image
f = open("./digitdata/trainingimages", "r")
reader = f.readlines()
labels = open("./digitdata/traininglabels", "r")
lreader = labels.readlines()
trainImageList = []
j = 0
i = 0
while(j < len(reader)):
image_array = empty([28, 28])
for r in range(0, 28):
row = reader[j]
j += 1
for c in range(0, 28):
if row[c] == '#' or row[c] == '+':
image_array[r][c] = 1
else:
image_array[r][c] = 0
label = lreader[i]
i += 1
image = Image(image_array, label)
trainImageList.append(image)
f = open("./digitdata/testimages", "r")
reader = f.readlines()
labels = open("./digitdata/testlabels", "r")
lreader = labels.readlines()
testImageList = []
j = 0
i = 0
while(j < len(reader)):
image_array = empty([28, 28])
for r in range(0, 28):
row = reader[j]
j += 1
for c in range(0, 28):
if row[c] == '#' or row[c] == '+':
image_array[r][c] = 1
else:
image_array[r][c] = 0
label = lreader[i]
i += 1
image = Image(image_array, label)
testImageList.append(image)
f = open("./digitdata/validationimages", "r")
reader = f.readlines()
labels = open("./digitdata/validationlabels", "r")
lreader = labels.readlines()
valImageList = []
j = 0
i = 0
while(j < len(reader)):
image_array = empty([28, 28])
for r in range(0, 28):
row = reader[j]
j += 1
for c in range(0, 28):
if row[c] == '#' or row[c] == '+':
image_array[r][c] = 1
else:
image_array[r][c] = 0
label = lreader[i]
i += 1
image = Image(image_array, label)
valImageList.append(image)
dataset = Dataset(trainImageList, testImageList, valImageList)
output_file = open('digits_dataset', 'wb')
pickle.dump(dataset, output_file)
|
normal
|
{
"blob_id": "aff439361716c35e5f492680a55e7470b4ee0c42",
"index": 5905,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile j < len(reader):\n image_array = empty([28, 28])\n for r in range(0, 28):\n row = reader[j]\n j += 1\n for c in range(0, 28):\n if row[c] == '#' or row[c] == '+':\n image_array[r][c] = 1\n else:\n image_array[r][c] = 0\n label = lreader[i]\n i += 1\n image = Image(image_array, label)\n trainImageList.append(image)\n<mask token>\nwhile j < len(reader):\n image_array = empty([28, 28])\n for r in range(0, 28):\n row = reader[j]\n j += 1\n for c in range(0, 28):\n if row[c] == '#' or row[c] == '+':\n image_array[r][c] = 1\n else:\n image_array[r][c] = 0\n label = lreader[i]\n i += 1\n image = Image(image_array, label)\n testImageList.append(image)\n<mask token>\nwhile j < len(reader):\n image_array = empty([28, 28])\n for r in range(0, 28):\n row = reader[j]\n j += 1\n for c in range(0, 28):\n if row[c] == '#' or row[c] == '+':\n image_array[r][c] = 1\n else:\n image_array[r][c] = 0\n label = lreader[i]\n i += 1\n image = Image(image_array, label)\n valImageList.append(image)\n<mask token>\npickle.dump(dataset, output_file)\n",
"step-3": "<mask token>\nf = open('./digitdata/trainingimages', 'r')\nreader = f.readlines()\nlabels = open('./digitdata/traininglabels', 'r')\nlreader = labels.readlines()\ntrainImageList = []\nj = 0\ni = 0\nwhile j < len(reader):\n image_array = empty([28, 28])\n for r in range(0, 28):\n row = reader[j]\n j += 1\n for c in range(0, 28):\n if row[c] == '#' or row[c] == '+':\n image_array[r][c] = 1\n else:\n image_array[r][c] = 0\n label = lreader[i]\n i += 1\n image = Image(image_array, label)\n trainImageList.append(image)\nf = open('./digitdata/testimages', 'r')\nreader = f.readlines()\nlabels = open('./digitdata/testlabels', 'r')\nlreader = labels.readlines()\ntestImageList = []\nj = 0\ni = 0\nwhile j < len(reader):\n image_array = empty([28, 28])\n for r in range(0, 28):\n row = reader[j]\n j += 1\n for c in range(0, 28):\n if row[c] == '#' or row[c] == '+':\n image_array[r][c] = 1\n else:\n image_array[r][c] = 0\n label = lreader[i]\n i += 1\n image = Image(image_array, label)\n testImageList.append(image)\nf = open('./digitdata/validationimages', 'r')\nreader = f.readlines()\nlabels = open('./digitdata/validationlabels', 'r')\nlreader = labels.readlines()\nvalImageList = []\nj = 0\ni = 0\nwhile j < len(reader):\n image_array = empty([28, 28])\n for r in range(0, 28):\n row = reader[j]\n j += 1\n for c in range(0, 28):\n if row[c] == '#' or row[c] == '+':\n image_array[r][c] = 1\n else:\n image_array[r][c] = 0\n label = lreader[i]\n i += 1\n image = Image(image_array, label)\n valImageList.append(image)\ndataset = Dataset(trainImageList, testImageList, valImageList)\noutput_file = open('digits_dataset', 'wb')\npickle.dump(dataset, output_file)\n",
"step-4": "from numpy import empty\nimport pickle\nfrom dataset import Dataset\nfrom image import Image\nf = open('./digitdata/trainingimages', 'r')\nreader = f.readlines()\nlabels = open('./digitdata/traininglabels', 'r')\nlreader = labels.readlines()\ntrainImageList = []\nj = 0\ni = 0\nwhile j < len(reader):\n image_array = empty([28, 28])\n for r in range(0, 28):\n row = reader[j]\n j += 1\n for c in range(0, 28):\n if row[c] == '#' or row[c] == '+':\n image_array[r][c] = 1\n else:\n image_array[r][c] = 0\n label = lreader[i]\n i += 1\n image = Image(image_array, label)\n trainImageList.append(image)\nf = open('./digitdata/testimages', 'r')\nreader = f.readlines()\nlabels = open('./digitdata/testlabels', 'r')\nlreader = labels.readlines()\ntestImageList = []\nj = 0\ni = 0\nwhile j < len(reader):\n image_array = empty([28, 28])\n for r in range(0, 28):\n row = reader[j]\n j += 1\n for c in range(0, 28):\n if row[c] == '#' or row[c] == '+':\n image_array[r][c] = 1\n else:\n image_array[r][c] = 0\n label = lreader[i]\n i += 1\n image = Image(image_array, label)\n testImageList.append(image)\nf = open('./digitdata/validationimages', 'r')\nreader = f.readlines()\nlabels = open('./digitdata/validationlabels', 'r')\nlreader = labels.readlines()\nvalImageList = []\nj = 0\ni = 0\nwhile j < len(reader):\n image_array = empty([28, 28])\n for r in range(0, 28):\n row = reader[j]\n j += 1\n for c in range(0, 28):\n if row[c] == '#' or row[c] == '+':\n image_array[r][c] = 1\n else:\n image_array[r][c] = 0\n label = lreader[i]\n i += 1\n image = Image(image_array, label)\n valImageList.append(image)\ndataset = Dataset(trainImageList, testImageList, valImageList)\noutput_file = open('digits_dataset', 'wb')\npickle.dump(dataset, output_file)\n",
"step-5": "from numpy import empty\nimport pickle\nfrom dataset import Dataset\nfrom image import Image\n\nf = open(\"./digitdata/trainingimages\", \"r\")\nreader = f.readlines()\n\nlabels = open(\"./digitdata/traininglabels\", \"r\")\nlreader = labels.readlines()\n\ntrainImageList = []\n\nj = 0\ni = 0\nwhile(j < len(reader)):\n image_array = empty([28, 28])\n for r in range(0, 28):\n row = reader[j]\n j += 1\n for c in range(0, 28):\n if row[c] == '#' or row[c] == '+':\n image_array[r][c] = 1\n else:\n image_array[r][c] = 0\n label = lreader[i]\n i += 1\n image = Image(image_array, label)\n trainImageList.append(image)\n\n\nf = open(\"./digitdata/testimages\", \"r\")\nreader = f.readlines()\n\nlabels = open(\"./digitdata/testlabels\", \"r\")\nlreader = labels.readlines()\n\ntestImageList = []\n\nj = 0\ni = 0\nwhile(j < len(reader)):\n image_array = empty([28, 28])\n for r in range(0, 28):\n row = reader[j]\n j += 1\n for c in range(0, 28):\n if row[c] == '#' or row[c] == '+':\n image_array[r][c] = 1\n else:\n image_array[r][c] = 0\n label = lreader[i]\n i += 1\n image = Image(image_array, label)\n testImageList.append(image)\n\nf = open(\"./digitdata/validationimages\", \"r\")\nreader = f.readlines()\n\nlabels = open(\"./digitdata/validationlabels\", \"r\")\nlreader = labels.readlines()\n\nvalImageList = []\n\nj = 0\ni = 0\nwhile(j < len(reader)):\n image_array = empty([28, 28])\n for r in range(0, 28):\n row = reader[j]\n j += 1\n for c in range(0, 28):\n if row[c] == '#' or row[c] == '+':\n image_array[r][c] = 1\n else:\n image_array[r][c] = 0\n label = lreader[i]\n i += 1\n image = Image(image_array, label)\n valImageList.append(image)\n\ndataset = Dataset(trainImageList, testImageList, valImageList)\noutput_file = open('digits_dataset', 'wb')\npickle.dump(dataset, output_file)\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import numpy as np
import matplotlib.pyplot as plt
def f(x:float,y:np.ndarray) -> np.ndarray:
"""
Работает с вектором { y , y'}
"""
# return some function result
return np.array([y[1], np.sqrt(abs(-np.exp(y[1])*y[0] + 2.71*y[0]**2/np.log(x)+1/x**2))])
# return np.array([y[1], -y[0]])
def dormand_prince(x_0,Y_0,h,N):
"""
https://en.wikipedia.org/wiki/Dormand%E2%80%93Prince_method <- таблица Бутчера
x_0: точка, где заданы функция и производная
Y_0: {y(x_0), y'(x_0)}
"""
x_n = x_0
Y_n = Y_0.copy()
xes, yes = [],[]
xes.append(x_n)
yes.append(Y_n[0])
for _ in range(int(N)):
k_1 = f(x_n, Y_n)
k_2 = f(x_n+h/5, Y_n+h*k_1/5)
k_3 = f(x_n+3*h/10, Y_n+h*k_1*3/40+h*k_2*9/40)
k_4 = f(x_n+4/5*h, Y_n+44*h*k_1/55 - 56*h*k_2/15 + 32*h*k_3/9)
k_5 = f(x_n+8/9*h, Y_n+19372*h*k_1/6561 - 25360/2187*h*k_2+ 64448/6561*h*k_3 - 212/729*h*k_4)
k_6 = f(x_n+h, Y_n+9017/3168*k_1*h - 355/33*k_2*h + 46732/5247*k_3*h +49/176*k_4*h - 5103/18656*h*k_5)
k_7 = f(x_n+h, Y_n+35/384*k_1*h +0+ 500/1113*k_3*h + 125/192*k_4*h-2187/6784*k_5*h + 11/84*h*k_6)
# print(k_1, k_2, k_3, k_4, k_5, k_6, k_7)
Y_n += h*(35/384*k_1 + 500/1113*k_3 + 125/192*k_4 -2187/6784*k_5 + 11/84*k_6)
x_n += h
xes.append(x_n)
yes.append(Y_n[0])
return np.array(xes), yes
x_0 = 2.71
Y_0 = np.array([2.71, 2.009],dtype = float) # функция и производная в точке х_0
"""
Из-за особенностей заданя, не представляется возмоность увеличить значение производной в начальной точке, поэтому 2
Так же, не стоит менять шаг, иначе все перестает работать ¯\_(ツ)_/¯
"""
L = [2.71, 7.34]
h_1 = 0.03
N_1 = (L[1]-L[0])/h_1
h_2 = 0.0005
N_2 = (L[1]-L[0])/h_2
# N = 100
xes_1 , yes_1 = dormand_prince(x_0,Y_0,h_2,N_2)
plt.scatter(xes_1, yes_1)
"""
Осталось задать значения функции в требуемых точках
"""
x_0 = 2.71
Y_0 = np.array([2.71, 2.009],dtype = float)
L_3 = [0.49, 2.71]
h_3 = -0.005
N_3 = (L_3[0]-L_3[1])/h_3
xes_2, yes_2 = dormand_prince(x_0, Y_0, h_3, N_3)
for i,x in enumerate(xes_2):
if abs(x-0.5)<1e-3:
print(x,yes_2[i])
if abs(x-1)<1e-3:
print(x,yes_2[i])
if abs(x-1.5)<1e-3:
print(x,yes_2[i])
if abs(x-2)<1e-3:
print(x,yes_2[i])
if abs(x-2.5)<1e-3:
print(x,yes_2[i])
plt.scatter(xes_2, yes_2)
plt.show()
|
normal
|
{
"blob_id": "daccc5aafb3e250e7fa7ac9db69a147b7e916736",
"index": 193,
"step-1": "<mask token>\n\n\ndef f(x: float, y: np.ndarray) ->np.ndarray:\n \"\"\"\n Работает с вектором { y , y'}\n \"\"\"\n return np.array([y[1], np.sqrt(abs(-np.exp(y[1]) * y[0] + 2.71 * y[0] **\n 2 / np.log(x) + 1 / x ** 2))])\n\n\ndef dormand_prince(x_0, Y_0, h, N):\n \"\"\"\n https://en.wikipedia.org/wiki/Dormand%E2%80%93Prince_method <- таблица Бутчера\n\n x_0: точка, где заданы функция и производная\n Y_0: {y(x_0), y'(x_0)}\n \"\"\"\n x_n = x_0\n Y_n = Y_0.copy()\n xes, yes = [], []\n xes.append(x_n)\n yes.append(Y_n[0])\n for _ in range(int(N)):\n k_1 = f(x_n, Y_n)\n k_2 = f(x_n + h / 5, Y_n + h * k_1 / 5)\n k_3 = f(x_n + 3 * h / 10, Y_n + h * k_1 * 3 / 40 + h * k_2 * 9 / 40)\n k_4 = f(x_n + 4 / 5 * h, Y_n + 44 * h * k_1 / 55 - 56 * h * k_2 / \n 15 + 32 * h * k_3 / 9)\n k_5 = f(x_n + 8 / 9 * h, Y_n + 19372 * h * k_1 / 6561 - 25360 / \n 2187 * h * k_2 + 64448 / 6561 * h * k_3 - 212 / 729 * h * k_4)\n k_6 = f(x_n + h, Y_n + 9017 / 3168 * k_1 * h - 355 / 33 * k_2 * h +\n 46732 / 5247 * k_3 * h + 49 / 176 * k_4 * h - 5103 / 18656 * h *\n k_5)\n k_7 = f(x_n + h, Y_n + 35 / 384 * k_1 * h + 0 + 500 / 1113 * k_3 *\n h + 125 / 192 * k_4 * h - 2187 / 6784 * k_5 * h + 11 / 84 * h * k_6\n )\n Y_n += h * (35 / 384 * k_1 + 500 / 1113 * k_3 + 125 / 192 * k_4 - \n 2187 / 6784 * k_5 + 11 / 84 * k_6)\n x_n += h\n xes.append(x_n)\n yes.append(Y_n[0])\n return np.array(xes), yes\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef f(x: float, y: np.ndarray) ->np.ndarray:\n \"\"\"\n Работает с вектором { y , y'}\n \"\"\"\n return np.array([y[1], np.sqrt(abs(-np.exp(y[1]) * y[0] + 2.71 * y[0] **\n 2 / np.log(x) + 1 / x ** 2))])\n\n\ndef dormand_prince(x_0, Y_0, h, N):\n \"\"\"\n https://en.wikipedia.org/wiki/Dormand%E2%80%93Prince_method <- таблица Бутчера\n\n x_0: точка, где заданы функция и производная\n Y_0: {y(x_0), y'(x_0)}\n \"\"\"\n x_n = x_0\n Y_n = Y_0.copy()\n xes, yes = [], []\n xes.append(x_n)\n yes.append(Y_n[0])\n for _ in range(int(N)):\n k_1 = f(x_n, Y_n)\n k_2 = f(x_n + h / 5, Y_n + h * k_1 / 5)\n k_3 = f(x_n + 3 * h / 10, Y_n + h * k_1 * 3 / 40 + h * k_2 * 9 / 40)\n k_4 = f(x_n + 4 / 5 * h, Y_n + 44 * h * k_1 / 55 - 56 * h * k_2 / \n 15 + 32 * h * k_3 / 9)\n k_5 = f(x_n + 8 / 9 * h, Y_n + 19372 * h * k_1 / 6561 - 25360 / \n 2187 * h * k_2 + 64448 / 6561 * h * k_3 - 212 / 729 * h * k_4)\n k_6 = f(x_n + h, Y_n + 9017 / 3168 * k_1 * h - 355 / 33 * k_2 * h +\n 46732 / 5247 * k_3 * h + 49 / 176 * k_4 * h - 5103 / 18656 * h *\n k_5)\n k_7 = f(x_n + h, Y_n + 35 / 384 * k_1 * h + 0 + 500 / 1113 * k_3 *\n h + 125 / 192 * k_4 * h - 2187 / 6784 * k_5 * h + 11 / 84 * h * k_6\n )\n Y_n += h * (35 / 384 * k_1 + 500 / 1113 * k_3 + 125 / 192 * k_4 - \n 2187 / 6784 * k_5 + 11 / 84 * k_6)\n x_n += h\n xes.append(x_n)\n yes.append(Y_n[0])\n return np.array(xes), yes\n\n\n<mask token>\nplt.scatter(xes_1, yes_1)\n<mask token>\nfor i, x in enumerate(xes_2):\n if abs(x - 0.5) < 0.001:\n print(x, yes_2[i])\n if abs(x - 1) < 0.001:\n print(x, yes_2[i])\n if abs(x - 1.5) < 0.001:\n print(x, yes_2[i])\n if abs(x - 2) < 0.001:\n print(x, yes_2[i])\n if abs(x - 2.5) < 0.001:\n print(x, yes_2[i])\nplt.scatter(xes_2, yes_2)\nplt.show()\n",
"step-3": "<mask token>\n\n\ndef f(x: float, y: np.ndarray) ->np.ndarray:\n \"\"\"\n Работает с вектором { y , y'}\n \"\"\"\n return np.array([y[1], np.sqrt(abs(-np.exp(y[1]) * y[0] + 2.71 * y[0] **\n 2 / np.log(x) + 1 / x ** 2))])\n\n\ndef dormand_prince(x_0, Y_0, h, N):\n \"\"\"\n https://en.wikipedia.org/wiki/Dormand%E2%80%93Prince_method <- таблица Бутчера\n\n x_0: точка, где заданы функция и производная\n Y_0: {y(x_0), y'(x_0)}\n \"\"\"\n x_n = x_0\n Y_n = Y_0.copy()\n xes, yes = [], []\n xes.append(x_n)\n yes.append(Y_n[0])\n for _ in range(int(N)):\n k_1 = f(x_n, Y_n)\n k_2 = f(x_n + h / 5, Y_n + h * k_1 / 5)\n k_3 = f(x_n + 3 * h / 10, Y_n + h * k_1 * 3 / 40 + h * k_2 * 9 / 40)\n k_4 = f(x_n + 4 / 5 * h, Y_n + 44 * h * k_1 / 55 - 56 * h * k_2 / \n 15 + 32 * h * k_3 / 9)\n k_5 = f(x_n + 8 / 9 * h, Y_n + 19372 * h * k_1 / 6561 - 25360 / \n 2187 * h * k_2 + 64448 / 6561 * h * k_3 - 212 / 729 * h * k_4)\n k_6 = f(x_n + h, Y_n + 9017 / 3168 * k_1 * h - 355 / 33 * k_2 * h +\n 46732 / 5247 * k_3 * h + 49 / 176 * k_4 * h - 5103 / 18656 * h *\n k_5)\n k_7 = f(x_n + h, Y_n + 35 / 384 * k_1 * h + 0 + 500 / 1113 * k_3 *\n h + 125 / 192 * k_4 * h - 2187 / 6784 * k_5 * h + 11 / 84 * h * k_6\n )\n Y_n += h * (35 / 384 * k_1 + 500 / 1113 * k_3 + 125 / 192 * k_4 - \n 2187 / 6784 * k_5 + 11 / 84 * k_6)\n x_n += h\n xes.append(x_n)\n yes.append(Y_n[0])\n return np.array(xes), yes\n\n\nx_0 = 2.71\nY_0 = np.array([2.71, 2.009], dtype=float)\n<mask token>\nL = [2.71, 7.34]\nh_1 = 0.03\nN_1 = (L[1] - L[0]) / h_1\nh_2 = 0.0005\nN_2 = (L[1] - L[0]) / h_2\nxes_1, yes_1 = dormand_prince(x_0, Y_0, h_2, N_2)\nplt.scatter(xes_1, yes_1)\n<mask token>\nx_0 = 2.71\nY_0 = np.array([2.71, 2.009], dtype=float)\nL_3 = [0.49, 2.71]\nh_3 = -0.005\nN_3 = (L_3[0] - L_3[1]) / h_3\nxes_2, yes_2 = dormand_prince(x_0, Y_0, h_3, N_3)\nfor i, x in enumerate(xes_2):\n if abs(x - 0.5) < 0.001:\n print(x, yes_2[i])\n if abs(x - 1) < 0.001:\n print(x, yes_2[i])\n if abs(x - 1.5) < 0.001:\n print(x, yes_2[i])\n if abs(x - 2) < 0.001:\n print(x, yes_2[i])\n if abs(x - 2.5) < 0.001:\n print(x, yes_2[i])\nplt.scatter(xes_2, yes_2)\nplt.show()\n",
"step-4": "import numpy as np\nimport matplotlib.pyplot as plt\n\n\ndef f(x: float, y: np.ndarray) ->np.ndarray:\n \"\"\"\n Работает с вектором { y , y'}\n \"\"\"\n return np.array([y[1], np.sqrt(abs(-np.exp(y[1]) * y[0] + 2.71 * y[0] **\n 2 / np.log(x) + 1 / x ** 2))])\n\n\ndef dormand_prince(x_0, Y_0, h, N):\n \"\"\"\n https://en.wikipedia.org/wiki/Dormand%E2%80%93Prince_method <- таблица Бутчера\n\n x_0: точка, где заданы функция и производная\n Y_0: {y(x_0), y'(x_0)}\n \"\"\"\n x_n = x_0\n Y_n = Y_0.copy()\n xes, yes = [], []\n xes.append(x_n)\n yes.append(Y_n[0])\n for _ in range(int(N)):\n k_1 = f(x_n, Y_n)\n k_2 = f(x_n + h / 5, Y_n + h * k_1 / 5)\n k_3 = f(x_n + 3 * h / 10, Y_n + h * k_1 * 3 / 40 + h * k_2 * 9 / 40)\n k_4 = f(x_n + 4 / 5 * h, Y_n + 44 * h * k_1 / 55 - 56 * h * k_2 / \n 15 + 32 * h * k_3 / 9)\n k_5 = f(x_n + 8 / 9 * h, Y_n + 19372 * h * k_1 / 6561 - 25360 / \n 2187 * h * k_2 + 64448 / 6561 * h * k_3 - 212 / 729 * h * k_4)\n k_6 = f(x_n + h, Y_n + 9017 / 3168 * k_1 * h - 355 / 33 * k_2 * h +\n 46732 / 5247 * k_3 * h + 49 / 176 * k_4 * h - 5103 / 18656 * h *\n k_5)\n k_7 = f(x_n + h, Y_n + 35 / 384 * k_1 * h + 0 + 500 / 1113 * k_3 *\n h + 125 / 192 * k_4 * h - 2187 / 6784 * k_5 * h + 11 / 84 * h * k_6\n )\n Y_n += h * (35 / 384 * k_1 + 500 / 1113 * k_3 + 125 / 192 * k_4 - \n 2187 / 6784 * k_5 + 11 / 84 * k_6)\n x_n += h\n xes.append(x_n)\n yes.append(Y_n[0])\n return np.array(xes), yes\n\n\nx_0 = 2.71\nY_0 = np.array([2.71, 2.009], dtype=float)\n<mask token>\nL = [2.71, 7.34]\nh_1 = 0.03\nN_1 = (L[1] - L[0]) / h_1\nh_2 = 0.0005\nN_2 = (L[1] - L[0]) / h_2\nxes_1, yes_1 = dormand_prince(x_0, Y_0, h_2, N_2)\nplt.scatter(xes_1, yes_1)\n<mask token>\nx_0 = 2.71\nY_0 = np.array([2.71, 2.009], dtype=float)\nL_3 = [0.49, 2.71]\nh_3 = -0.005\nN_3 = (L_3[0] - L_3[1]) / h_3\nxes_2, yes_2 = dormand_prince(x_0, Y_0, h_3, N_3)\nfor i, x in enumerate(xes_2):\n if abs(x - 0.5) < 0.001:\n print(x, yes_2[i])\n if abs(x - 1) < 0.001:\n print(x, yes_2[i])\n if abs(x - 1.5) < 0.001:\n print(x, yes_2[i])\n if abs(x - 2) < 0.001:\n print(x, yes_2[i])\n if abs(x - 2.5) < 0.001:\n print(x, yes_2[i])\nplt.scatter(xes_2, yes_2)\nplt.show()\n",
"step-5": "import numpy as np\r\nimport matplotlib.pyplot as plt\r\n\r\n\r\ndef f(x:float,y:np.ndarray) -> np.ndarray:\r\n \"\"\"\r\n Работает с вектором { y , y'}\r\n \"\"\"\r\n # return some function result\r\n\r\n return np.array([y[1], np.sqrt(abs(-np.exp(y[1])*y[0] + 2.71*y[0]**2/np.log(x)+1/x**2))])\r\n # return np.array([y[1], -y[0]])\r\ndef dormand_prince(x_0,Y_0,h,N):\r\n \"\"\"\r\n https://en.wikipedia.org/wiki/Dormand%E2%80%93Prince_method <- таблица Бутчера\r\n\r\n x_0: точка, где заданы функция и производная\r\n Y_0: {y(x_0), y'(x_0)}\r\n \"\"\"\r\n x_n = x_0\r\n Y_n = Y_0.copy()\r\n xes, yes = [],[]\r\n xes.append(x_n)\r\n yes.append(Y_n[0])\r\n for _ in range(int(N)):\r\n k_1 = f(x_n, Y_n)\r\n k_2 = f(x_n+h/5, Y_n+h*k_1/5)\r\n k_3 = f(x_n+3*h/10, Y_n+h*k_1*3/40+h*k_2*9/40)\r\n k_4 = f(x_n+4/5*h, Y_n+44*h*k_1/55 - 56*h*k_2/15 + 32*h*k_3/9)\r\n k_5 = f(x_n+8/9*h, Y_n+19372*h*k_1/6561 - 25360/2187*h*k_2+ 64448/6561*h*k_3 - 212/729*h*k_4)\r\n k_6 = f(x_n+h, Y_n+9017/3168*k_1*h - 355/33*k_2*h + 46732/5247*k_3*h +49/176*k_4*h - 5103/18656*h*k_5)\r\n k_7 = f(x_n+h, Y_n+35/384*k_1*h +0+ 500/1113*k_3*h + 125/192*k_4*h-2187/6784*k_5*h + 11/84*h*k_6)\r\n # print(k_1, k_2, k_3, k_4, k_5, k_6, k_7)\r\n Y_n += h*(35/384*k_1 + 500/1113*k_3 + 125/192*k_4 -2187/6784*k_5 + 11/84*k_6)\r\n x_n += h\r\n xes.append(x_n)\r\n yes.append(Y_n[0])\r\n return np.array(xes), yes\r\n\r\nx_0 = 2.71\r\nY_0 = np.array([2.71, 2.009],dtype = float) # функция и производная в точке х_0\r\n\"\"\"\r\nИз-за особенностей заданя, не представляется возмоность увеличить значение производной в начальной точке, поэтому 2\r\nТак же, не стоит менять шаг, иначе все перестает работать ¯\\_(ツ)_/¯\r\n\"\"\"\r\nL = [2.71, 7.34]\r\nh_1 = 0.03\r\nN_1 = (L[1]-L[0])/h_1\r\nh_2 = 0.0005\r\nN_2 = (L[1]-L[0])/h_2\r\n# N = 100\r\n\r\nxes_1 , yes_1 = dormand_prince(x_0,Y_0,h_2,N_2)\r\nplt.scatter(xes_1, yes_1)\r\n\"\"\"\r\nОсталось задать значения функции в требуемых точках\r\n\"\"\"\r\nx_0 = 2.71\r\nY_0 = np.array([2.71, 2.009],dtype = float)\r\nL_3 = [0.49, 2.71]\r\nh_3 = -0.005\r\nN_3 = (L_3[0]-L_3[1])/h_3\r\n\r\nxes_2, yes_2 = dormand_prince(x_0, Y_0, h_3, N_3)\r\nfor i,x in enumerate(xes_2):\r\n if abs(x-0.5)<1e-3:\r\n print(x,yes_2[i])\r\n if abs(x-1)<1e-3:\r\n print(x,yes_2[i])\r\n if abs(x-1.5)<1e-3:\r\n print(x,yes_2[i])\r\n if abs(x-2)<1e-3:\r\n print(x,yes_2[i])\r\n if abs(x-2.5)<1e-3:\r\n print(x,yes_2[i])\r\nplt.scatter(xes_2, yes_2)\r\nplt.show()\r\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import pycmc
# open project, get Crag, CragVolumes, and intensity images
crag = ...
cragVolumes = ...
raw = ...
membrane = ...
nodeFeatures = ...
edgeFeatures = ...
statisticsFeatureProvider = pycmc.StatisticsFeatureProvider(cragVolumes, raw, "raw")
shapeFeatureProvider = pycmc.ShapeFeatureProvider(cragVolumes)
featureProvider = pycmc.CompositeFeatureProvider()
featureProvider.add(shapeFeatureProvider)
featureProvider.add(statisticsFeatureProvider)
featureExtractor = pycmc.FeatureExtractor(crag)
featureExtractor.extractFeatures(nodeFeatures, edgeFeatures, featureProvider)
|
normal
|
{
"blob_id": "37d817436ce977339594867ef917177e7371a212",
"index": 6847,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfeatureProvider.add(shapeFeatureProvider)\nfeatureProvider.add(statisticsFeatureProvider)\n<mask token>\nfeatureExtractor.extractFeatures(nodeFeatures, edgeFeatures, featureProvider)\n",
"step-3": "<mask token>\ncrag = ...\ncragVolumes = ...\nraw = ...\nmembrane = ...\nnodeFeatures = ...\nedgeFeatures = ...\nstatisticsFeatureProvider = pycmc.StatisticsFeatureProvider(cragVolumes,\n raw, 'raw')\nshapeFeatureProvider = pycmc.ShapeFeatureProvider(cragVolumes)\nfeatureProvider = pycmc.CompositeFeatureProvider()\nfeatureProvider.add(shapeFeatureProvider)\nfeatureProvider.add(statisticsFeatureProvider)\nfeatureExtractor = pycmc.FeatureExtractor(crag)\nfeatureExtractor.extractFeatures(nodeFeatures, edgeFeatures, featureProvider)\n",
"step-4": "import pycmc\ncrag = ...\ncragVolumes = ...\nraw = ...\nmembrane = ...\nnodeFeatures = ...\nedgeFeatures = ...\nstatisticsFeatureProvider = pycmc.StatisticsFeatureProvider(cragVolumes,\n raw, 'raw')\nshapeFeatureProvider = pycmc.ShapeFeatureProvider(cragVolumes)\nfeatureProvider = pycmc.CompositeFeatureProvider()\nfeatureProvider.add(shapeFeatureProvider)\nfeatureProvider.add(statisticsFeatureProvider)\nfeatureExtractor = pycmc.FeatureExtractor(crag)\nfeatureExtractor.extractFeatures(nodeFeatures, edgeFeatures, featureProvider)\n",
"step-5": "import pycmc\n\n# open project, get Crag, CragVolumes, and intensity images\ncrag = ...\ncragVolumes = ...\nraw = ...\nmembrane = ...\nnodeFeatures = ...\nedgeFeatures = ...\n\nstatisticsFeatureProvider = pycmc.StatisticsFeatureProvider(cragVolumes, raw, \"raw\")\nshapeFeatureProvider = pycmc.ShapeFeatureProvider(cragVolumes)\n\nfeatureProvider = pycmc.CompositeFeatureProvider()\nfeatureProvider.add(shapeFeatureProvider)\nfeatureProvider.add(statisticsFeatureProvider)\n\nfeatureExtractor = pycmc.FeatureExtractor(crag)\nfeatureExtractor.extractFeatures(nodeFeatures, edgeFeatures, featureProvider)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
urlpatterns = [path('admin/', admin.site.urls), url('^$', IndexView.as_view
(), name='index'), url('^register/', RegistrationView.as_view(), name=
'register'), url('^home/', HomeView.as_view(), name='home'), url(
'^home_adddog_form/', AddDogFormView.as_view(), name='home_adddog_form'
), url('^booking_delete_view/', DeleteBookingView.as_view(), name=
'booking_delete_view'), url('^dog_delete_view/', DeleteDogView.as_view(
), name='dog_delete_view'), url('^fetch_date_view/', FetchDateView.
as_view()), url('^login/$', auth_views.LoginView.as_view(template_name=
'login.html'), name='login'), url('^logout/$', auth_views.LogoutView.
as_view(), name='logout'), url('^profile/$', UserUpdateView.as_view(),
name='profile'), url('^booking_update_view/', BookingUpdateView.as_view
(), name='bookingmodify'), url('^dog_update_view/', DogUpdateView.
as_view(), name='dogupdateview')]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from django.contrib import admin
from django.conf.urls import url
from django.urls import path
from django.contrib.auth import views as auth_views
from .views import *
urlpatterns = [path('admin/', admin.site.urls), url('^$', IndexView.as_view
(), name='index'), url('^register/', RegistrationView.as_view(), name=
'register'), url('^home/', HomeView.as_view(), name='home'), url(
'^home_adddog_form/', AddDogFormView.as_view(), name='home_adddog_form'
), url('^booking_delete_view/', DeleteBookingView.as_view(), name=
'booking_delete_view'), url('^dog_delete_view/', DeleteDogView.as_view(
), name='dog_delete_view'), url('^fetch_date_view/', FetchDateView.
as_view()), url('^login/$', auth_views.LoginView.as_view(template_name=
'login.html'), name='login'), url('^logout/$', auth_views.LogoutView.
as_view(), name='logout'), url('^profile/$', UserUpdateView.as_view(),
name='profile'), url('^booking_update_view/', BookingUpdateView.as_view
(), name='bookingmodify'), url('^dog_update_view/', DogUpdateView.
as_view(), name='dogupdateview')]
<|reserved_special_token_1|>
"""DogGroom URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.conf.urls import url
from django.urls import path
from django.contrib.auth import views as auth_views
from .views import *
urlpatterns = [
path('admin/', admin.site.urls),
url(r'^$', IndexView.as_view(), name = 'index'),
url(r'^register/', RegistrationView.as_view(), name = 'register'),
url(r'^home/', HomeView.as_view(), name = 'home'),
url(r'^home_adddog_form/', AddDogFormView.as_view(), name = 'home_adddog_form'),
url(r'^booking_delete_view/', DeleteBookingView.as_view(), name = 'booking_delete_view'),
url(r'^dog_delete_view/', DeleteDogView.as_view(), name='dog_delete_view'),
url(r'^fetch_date_view/', FetchDateView.as_view()),
url(r'^login/$', auth_views.LoginView.as_view(template_name='login.html'), name='login'),
url(r'^logout/$', auth_views.LogoutView.as_view(), name='logout'),
url(r'^profile/$', UserUpdateView.as_view(), name='profile'),
url(r'^booking_update_view/', BookingUpdateView.as_view(), name='bookingmodify'),
url(r'^dog_update_view/', DogUpdateView.as_view(), name='dogupdateview'),
]
|
flexible
|
{
"blob_id": "da062dfe494b363c8ef3ec9f19af912736aaf77b",
"index": 9018,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('admin/', admin.site.urls), url('^$', IndexView.as_view\n (), name='index'), url('^register/', RegistrationView.as_view(), name=\n 'register'), url('^home/', HomeView.as_view(), name='home'), url(\n '^home_adddog_form/', AddDogFormView.as_view(), name='home_adddog_form'\n ), url('^booking_delete_view/', DeleteBookingView.as_view(), name=\n 'booking_delete_view'), url('^dog_delete_view/', DeleteDogView.as_view(\n ), name='dog_delete_view'), url('^fetch_date_view/', FetchDateView.\n as_view()), url('^login/$', auth_views.LoginView.as_view(template_name=\n 'login.html'), name='login'), url('^logout/$', auth_views.LogoutView.\n as_view(), name='logout'), url('^profile/$', UserUpdateView.as_view(),\n name='profile'), url('^booking_update_view/', BookingUpdateView.as_view\n (), name='bookingmodify'), url('^dog_update_view/', DogUpdateView.\n as_view(), name='dogupdateview')]\n",
"step-3": "<mask token>\nfrom django.contrib import admin\nfrom django.conf.urls import url\nfrom django.urls import path\nfrom django.contrib.auth import views as auth_views\nfrom .views import *\nurlpatterns = [path('admin/', admin.site.urls), url('^$', IndexView.as_view\n (), name='index'), url('^register/', RegistrationView.as_view(), name=\n 'register'), url('^home/', HomeView.as_view(), name='home'), url(\n '^home_adddog_form/', AddDogFormView.as_view(), name='home_adddog_form'\n ), url('^booking_delete_view/', DeleteBookingView.as_view(), name=\n 'booking_delete_view'), url('^dog_delete_view/', DeleteDogView.as_view(\n ), name='dog_delete_view'), url('^fetch_date_view/', FetchDateView.\n as_view()), url('^login/$', auth_views.LoginView.as_view(template_name=\n 'login.html'), name='login'), url('^logout/$', auth_views.LogoutView.\n as_view(), name='logout'), url('^profile/$', UserUpdateView.as_view(),\n name='profile'), url('^booking_update_view/', BookingUpdateView.as_view\n (), name='bookingmodify'), url('^dog_update_view/', DogUpdateView.\n as_view(), name='dogupdateview')]\n",
"step-4": "\"\"\"DogGroom URL Configuration\n\nThe `urlpatterns` list routes URLs to views. For more information please see:\n https://docs.djangoproject.com/en/2.0/topics/http/urls/\nExamples:\nFunction views\n 1. Add an import: from my_app import views\n 2. Add a URL to urlpatterns: path('', views.home, name='home')\nClass-based views\n 1. Add an import: from other_app.views import Home\n 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')\nIncluding another URLconf\n 1. Import the include() function: from django.urls import include, path\n 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))\n\"\"\"\nfrom django.contrib import admin\nfrom django.conf.urls import url\nfrom django.urls import path\nfrom django.contrib.auth import views as auth_views\nfrom .views import *\n\nurlpatterns = [\n path('admin/', admin.site.urls),\n url(r'^$', IndexView.as_view(), name = 'index'),\n url(r'^register/', RegistrationView.as_view(), name = 'register'),\n url(r'^home/', HomeView.as_view(), name = 'home'),\n url(r'^home_adddog_form/', AddDogFormView.as_view(), name = 'home_adddog_form'),\n url(r'^booking_delete_view/', DeleteBookingView.as_view(), name = 'booking_delete_view'),\n url(r'^dog_delete_view/', DeleteDogView.as_view(), name='dog_delete_view'),\n url(r'^fetch_date_view/', FetchDateView.as_view()),\n url(r'^login/$', auth_views.LoginView.as_view(template_name='login.html'), name='login'),\n url(r'^logout/$', auth_views.LogoutView.as_view(), name='logout'),\n url(r'^profile/$', UserUpdateView.as_view(), name='profile'),\n url(r'^booking_update_view/', BookingUpdateView.as_view(), name='bookingmodify'),\n url(r'^dog_update_view/', DogUpdateView.as_view(), name='dogupdateview'),\n\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for i in range(1, n + 1, 1):
tempo = int(input('Digite o tempo:'))
if i == 1:
tempo1 = tempo
elif i == n:
f = tempo + 10
<|reserved_special_token_0|>
print(x)
<|reserved_special_token_1|>
pessoas = int(input('Digite o numero de pessoas que passa pela esada rolante:')
)
for i in range(1, n + 1, 1):
tempo = int(input('Digite o tempo:'))
if i == 1:
tempo1 = tempo
elif i == n:
f = tempo + 10
X = f - tempo1
print(x)
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
pessoas=int(input('Digite o numero de pessoas que passa pela esada rolante:'))
for i in range(1,n+1,1):
tempo=int(input('Digite o tempo:'))
if i==1:
tempo1=tempo
elif i==n:
f=tempo+10
X=f-tempo1
print(x)
|
flexible
|
{
"blob_id": "f98120d191e9e4b92984a6b59b25b1331b5d8c3a",
"index": 1970,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(1, n + 1, 1):\n tempo = int(input('Digite o tempo:'))\n if i == 1:\n tempo1 = tempo\n elif i == n:\n f = tempo + 10\n<mask token>\nprint(x)\n",
"step-3": "pessoas = int(input('Digite o numero de pessoas que passa pela esada rolante:')\n )\nfor i in range(1, n + 1, 1):\n tempo = int(input('Digite o tempo:'))\n if i == 1:\n tempo1 = tempo\n elif i == n:\n f = tempo + 10\nX = f - tempo1\nprint(x)\n",
"step-4": "# -*- coding: utf-8 -*-\npessoas=int(input('Digite o numero de pessoas que passa pela esada rolante:'))\nfor i in range(1,n+1,1):\n tempo=int(input('Digite o tempo:'))\n if i==1:\n tempo1=tempo\n elif i==n:\n f=tempo+10\nX=f-tempo1\nprint(x)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def test_fails_1():
assert long_repeat('') == 0, 'Empty String'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_fails_1():
assert long_repeat('') == 0, 'Empty String'
def test_fails_2():
assert long_repeat('aa') == 2
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_long_repeat():
assert long_repeat('sdsffffse') == 4, 'First'
assert long_repeat('ddvvrwwwrggg') == 3, 'Second'
def test_fails_1():
assert long_repeat('') == 0, 'Empty String'
def test_fails_2():
assert long_repeat('aa') == 2
<|reserved_special_token_1|>
from checkio.home.long_repeat import long_repeat
def test_long_repeat():
assert long_repeat('sdsffffse') == 4, 'First'
assert long_repeat('ddvvrwwwrggg') == 3, 'Second'
def test_fails_1():
assert long_repeat('') == 0, 'Empty String'
def test_fails_2():
assert long_repeat('aa') == 2
<|reserved_special_token_1|>
from checkio.home.long_repeat import long_repeat
def test_long_repeat():
assert long_repeat("sdsffffse") == 4, "First"
assert long_repeat("ddvvrwwwrggg") == 3, "Second"
def test_fails_1():
assert long_repeat("") == 0, "Empty String"
def test_fails_2():
assert long_repeat("aa") == 2
|
flexible
|
{
"blob_id": "b459919e779063247c176e127368c687c903cf0f",
"index": 7869,
"step-1": "<mask token>\n\n\ndef test_fails_1():\n assert long_repeat('') == 0, 'Empty String'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_fails_1():\n assert long_repeat('') == 0, 'Empty String'\n\n\ndef test_fails_2():\n assert long_repeat('aa') == 2\n",
"step-3": "<mask token>\n\n\ndef test_long_repeat():\n assert long_repeat('sdsffffse') == 4, 'First'\n assert long_repeat('ddvvrwwwrggg') == 3, 'Second'\n\n\ndef test_fails_1():\n assert long_repeat('') == 0, 'Empty String'\n\n\ndef test_fails_2():\n assert long_repeat('aa') == 2\n",
"step-4": "from checkio.home.long_repeat import long_repeat\n\n\ndef test_long_repeat():\n assert long_repeat('sdsffffse') == 4, 'First'\n assert long_repeat('ddvvrwwwrggg') == 3, 'Second'\n\n\ndef test_fails_1():\n assert long_repeat('') == 0, 'Empty String'\n\n\ndef test_fails_2():\n assert long_repeat('aa') == 2\n",
"step-5": "from checkio.home.long_repeat import long_repeat\n\n\ndef test_long_repeat():\n assert long_repeat(\"sdsffffse\") == 4, \"First\"\n assert long_repeat(\"ddvvrwwwrggg\") == 3, \"Second\"\n\n\ndef test_fails_1():\n assert long_repeat(\"\") == 0, \"Empty String\"\n\n\ndef test_fails_2():\n assert long_repeat(\"aa\") == 2\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class ArticleLinkedUrl(TimeStampedModel):
article = models.ForeignKey(Article, on_delete=models.CASCADE)
url = models.CharField(max_length=2000, unique=True)
title = models.CharField(max_length=500)
content_box = models.ForeignKey(ArticleContentBox, null=True, on_delete
=models.SET_NULL)
def __str__(self):
return self.title
class Meta:
db_table = 'article_linked_urls'
class LinkedArticle(TimeStampedModel):
article = models.ForeignKey(Article, on_delete=models.CASCADE,
related_name='related_articles')
linked_article = models.ForeignKey(Article, on_delete=models.CASCADE,
related_name='related_article')
title = models.CharField(max_length=500, null=True, blank=False)
content_box = models.ForeignKey(ArticleContentBox, null=True, on_delete
=models.SET_NULL)
def __str__(self):
return '{}-{}'.format(self.article.title, self.linked_article.title)
class Meta:
db_table = 'linked_articles'
unique_together = ('article', 'linked_article'),
class MedicineSpecialization(TimeStampedModel):
medicine = models.ForeignKey(Article, on_delete=models.CASCADE)
specialization = models.ForeignKey(PracticeSpecialization, on_delete=
models.CASCADE, null=True, blank=True)
def __str__(self):
return self.medicine.title + ' ' + self.specialization.name
class Meta:
db_table = 'medicine_specialization'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Article(TimeStampedModel, CreatedByModel):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __str__(self):
return self.title
class Meta:
db_table = 'article'
class ArticleImage(TimeStampedModel, CreatedByModel):
name = models.ImageField(upload_to='article/images')
def image_tag(self):
if self.name:
return mark_safe('<img src="%s" width="150" height="150" />' %
self.name.url)
return ''
def __str__(self):
if self.name:
return self.name.url
return ''
class Meta:
db_table = 'article_image'
class ArticleContentBox(TimeStampedModel):
name = models.CharField(max_length=1000)
title = models.CharField(max_length=1000)
rank = models.PositiveSmallIntegerField(default=0, blank=True)
def __str__(self):
return self.name
class Meta:
db_table = 'article_content_box'
class ArticleLinkedUrl(TimeStampedModel):
article = models.ForeignKey(Article, on_delete=models.CASCADE)
url = models.CharField(max_length=2000, unique=True)
title = models.CharField(max_length=500)
content_box = models.ForeignKey(ArticleContentBox, null=True, on_delete
=models.SET_NULL)
def __str__(self):
return self.title
class Meta:
db_table = 'article_linked_urls'
class LinkedArticle(TimeStampedModel):
article = models.ForeignKey(Article, on_delete=models.CASCADE,
related_name='related_articles')
linked_article = models.ForeignKey(Article, on_delete=models.CASCADE,
related_name='related_article')
title = models.CharField(max_length=500, null=True, blank=False)
content_box = models.ForeignKey(ArticleContentBox, null=True, on_delete
=models.SET_NULL)
def __str__(self):
return '{}-{}'.format(self.article.title, self.linked_article.title)
class Meta:
db_table = 'linked_articles'
unique_together = ('article', 'linked_article'),
class MedicineSpecialization(TimeStampedModel):
medicine = models.ForeignKey(Article, on_delete=models.CASCADE)
specialization = models.ForeignKey(PracticeSpecialization, on_delete=
models.CASCADE, null=True, blank=True)
def __str__(self):
return self.medicine.title + ' ' + self.specialization.name
class Meta:
db_table = 'medicine_specialization'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Article(TimeStampedModel, CreatedByModel):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def get_absolute_url(self):
content_type = ContentType.objects.get_for_model(self)
return reverse('admin:%s_%s_change' % (content_type.app_label,
content_type.model), args=[self.id])
def icon_tag(self):
if self.icon:
return mark_safe('<img src="%s" width="150" height="150" />' %
self.icon.url)
return ''
def save(self, *args, **kwargs):
self.published_date = (self.published_date if self.published_date else
datetime.date.today())
if hasattr(self, 'url'):
self.url = self.url.strip('/').lower()
super().save(*args, **kwargs)
def __str__(self):
return self.title
class Meta:
db_table = 'article'
class ArticleImage(TimeStampedModel, CreatedByModel):
name = models.ImageField(upload_to='article/images')
def image_tag(self):
if self.name:
return mark_safe('<img src="%s" width="150" height="150" />' %
self.name.url)
return ''
def __str__(self):
if self.name:
return self.name.url
return ''
class Meta:
db_table = 'article_image'
class ArticleContentBox(TimeStampedModel):
name = models.CharField(max_length=1000)
title = models.CharField(max_length=1000)
rank = models.PositiveSmallIntegerField(default=0, blank=True)
def __str__(self):
return self.name
class Meta:
db_table = 'article_content_box'
class ArticleLinkedUrl(TimeStampedModel):
article = models.ForeignKey(Article, on_delete=models.CASCADE)
url = models.CharField(max_length=2000, unique=True)
title = models.CharField(max_length=500)
content_box = models.ForeignKey(ArticleContentBox, null=True, on_delete
=models.SET_NULL)
def __str__(self):
return self.title
class Meta:
db_table = 'article_linked_urls'
class LinkedArticle(TimeStampedModel):
article = models.ForeignKey(Article, on_delete=models.CASCADE,
related_name='related_articles')
linked_article = models.ForeignKey(Article, on_delete=models.CASCADE,
related_name='related_article')
title = models.CharField(max_length=500, null=True, blank=False)
content_box = models.ForeignKey(ArticleContentBox, null=True, on_delete
=models.SET_NULL)
def __str__(self):
return '{}-{}'.format(self.article.title, self.linked_article.title)
class Meta:
db_table = 'linked_articles'
unique_together = ('article', 'linked_article'),
class MedicineSpecialization(TimeStampedModel):
medicine = models.ForeignKey(Article, on_delete=models.CASCADE)
specialization = models.ForeignKey(PracticeSpecialization, on_delete=
models.CASCADE, null=True, blank=True)
def __str__(self):
return self.medicine.title + ' ' + self.specialization.name
class Meta:
db_table = 'medicine_specialization'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Article(TimeStampedModel, CreatedByModel):
title = models.CharField(blank=False, null=False, max_length=500,
unique=True)
url = models.CharField(blank=False, null=True, max_length=500, unique=True)
heading_title = models.CharField(blank=True, null=False, max_length=500)
body = models.CharField(blank=False, null=False, max_length=200000)
category = models.ForeignKey(ArticleCategory, null=True, related_name=
'articles', on_delete=models.SET_NULL)
header_image = models.ImageField(upload_to='articles/header/images',
null=True, blank=True, default='')
header_image_alt = models.CharField(max_length=512, blank=True, null=
True, default='')
icon = models.ImageField(upload_to='articles/icons', null=True, blank=
True, default='')
is_published = models.BooleanField(default=False, verbose_name='Published')
description = models.CharField(max_length=500, blank=True, null=True)
keywords = models.CharField(max_length=256, blank=True, null=True)
author_name = models.CharField(max_length=256, null=True, blank=True)
author = models.ForeignKey(Doctor, null=True, blank=True, related_name=
'published_articles', on_delete=models.SET_NULL)
published_date = models.DateField(default=datetime.date.today)
linked_articles = models.ManyToManyField('self', symmetrical=False,
through='LinkedArticle', through_fields=('article', 'linked_article'))
pharmeasy_url = models.TextField(blank=True, null=True)
pharmeasy_product_id = models.PositiveIntegerField(null=True, blank=True)
is_widget_available = models.NullBooleanField()
def get_absolute_url(self):
content_type = ContentType.objects.get_for_model(self)
return reverse('admin:%s_%s_change' % (content_type.app_label,
content_type.model), args=[self.id])
def icon_tag(self):
if self.icon:
return mark_safe('<img src="%s" width="150" height="150" />' %
self.icon.url)
return ''
def save(self, *args, **kwargs):
self.published_date = (self.published_date if self.published_date else
datetime.date.today())
if hasattr(self, 'url'):
self.url = self.url.strip('/').lower()
super().save(*args, **kwargs)
def __str__(self):
return self.title
class Meta:
db_table = 'article'
class ArticleImage(TimeStampedModel, CreatedByModel):
name = models.ImageField(upload_to='article/images')
def image_tag(self):
if self.name:
return mark_safe('<img src="%s" width="150" height="150" />' %
self.name.url)
return ''
def __str__(self):
if self.name:
return self.name.url
return ''
class Meta:
db_table = 'article_image'
class ArticleContentBox(TimeStampedModel):
name = models.CharField(max_length=1000)
title = models.CharField(max_length=1000)
rank = models.PositiveSmallIntegerField(default=0, blank=True)
def __str__(self):
return self.name
class Meta:
db_table = 'article_content_box'
class ArticleLinkedUrl(TimeStampedModel):
article = models.ForeignKey(Article, on_delete=models.CASCADE)
url = models.CharField(max_length=2000, unique=True)
title = models.CharField(max_length=500)
content_box = models.ForeignKey(ArticleContentBox, null=True, on_delete
=models.SET_NULL)
def __str__(self):
return self.title
class Meta:
db_table = 'article_linked_urls'
class LinkedArticle(TimeStampedModel):
article = models.ForeignKey(Article, on_delete=models.CASCADE,
related_name='related_articles')
linked_article = models.ForeignKey(Article, on_delete=models.CASCADE,
related_name='related_article')
title = models.CharField(max_length=500, null=True, blank=False)
content_box = models.ForeignKey(ArticleContentBox, null=True, on_delete
=models.SET_NULL)
def __str__(self):
return '{}-{}'.format(self.article.title, self.linked_article.title)
class Meta:
db_table = 'linked_articles'
unique_together = ('article', 'linked_article'),
class MedicineSpecialization(TimeStampedModel):
medicine = models.ForeignKey(Article, on_delete=models.CASCADE)
specialization = models.ForeignKey(PracticeSpecialization, on_delete=
models.CASCADE, null=True, blank=True)
def __str__(self):
return self.medicine.title + ' ' + self.specialization.name
class Meta:
db_table = 'medicine_specialization'
<|reserved_special_token_1|>
from django.db import models
from django.utils.safestring import mark_safe
from ondoc.authentication.models import TimeStampedModel, CreatedByModel, Image
import datetime
from django.contrib.contenttypes.models import ContentType
from django.urls import reverse
from ondoc.doctor.models import Doctor, PracticeSpecialization
class ArticleCategory(TimeStampedModel):
name = models.CharField(blank=False, null=False, max_length=500)
identifier = models.CharField(max_length=48, blank=False, null=True)
url = models.CharField(blank=False, null=True, max_length=500, unique=True)
title = models.CharField(max_length=500, null=True, blank=True)
description = models.CharField(max_length=200000, null=True, blank=True)
def __str__(self):
return self.name
class Meta:
db_table = "article_categories"
def save(self, *args, **kwargs):
if hasattr(self, 'url'):
self.url = self.url.strip('/').lower()
super(ArticleCategory, self).save(*args, **kwargs)
class Article(TimeStampedModel, CreatedByModel):
title = models.CharField(blank=False, null=False, max_length=500, unique=True)
url = models.CharField(blank=False, null=True, max_length=500, unique=True)
heading_title = models.CharField(blank=True, null=False, max_length=500)
body = models.CharField(blank=False, null=False, max_length=200000)
category = models.ForeignKey(ArticleCategory, null=True, related_name='articles', on_delete=models.SET_NULL)
header_image = models.ImageField(upload_to='articles/header/images', null=True, blank=True, default='')
header_image_alt = models.CharField(max_length=512, blank=True, null=True, default='')
icon = models.ImageField(upload_to='articles/icons', null=True, blank=True, default='')
is_published = models.BooleanField(default=False, verbose_name='Published')
description = models.CharField(max_length=500, blank=True, null=True)
keywords = models.CharField(max_length=256, blank=True, null=True)
author_name = models.CharField(max_length=256, null=True, blank=True)
author = models.ForeignKey(Doctor, null=True, blank=True, related_name='published_articles', on_delete=models.SET_NULL)
published_date = models.DateField(default=datetime.date.today)
linked_articles = models.ManyToManyField('self', symmetrical=False, through='LinkedArticle',
through_fields=('article', 'linked_article'))
pharmeasy_url = models.TextField(blank=True, null=True)
pharmeasy_product_id = models.PositiveIntegerField(null=True, blank=True)
is_widget_available = models.NullBooleanField()
def get_absolute_url(self):
content_type = ContentType.objects.get_for_model(self)
return reverse('admin:%s_%s_change' % (content_type.app_label, content_type.model), args=[self.id])
def icon_tag(self):
if self.icon:
return mark_safe('<img src="%s" width="150" height="150" />' % (self.icon.url))
return ""
def save(self, *args, **kwargs):
self.published_date = self.published_date if self.published_date else datetime.date.today()
if hasattr(self, 'url'):
self.url = self.url.strip('/').lower()
super().save(*args, **kwargs)
def __str__(self):
return self.title
class Meta:
db_table = "article"
class ArticleImage(TimeStampedModel, CreatedByModel):
name = models.ImageField(upload_to='article/images')
def image_tag(self):
if self.name:
return mark_safe('<img src="%s" width="150" height="150" />' % (self.name.url))
return ""
def __str__(self):
if self.name:
return self.name.url
return ""
class Meta:
db_table = "article_image"
class ArticleContentBox(TimeStampedModel):
name = models.CharField(max_length=1000)
title = models.CharField(max_length=1000)
rank = models.PositiveSmallIntegerField(default=0, blank=True)
def __str__(self):
return self.name
class Meta:
db_table = 'article_content_box'
class ArticleLinkedUrl(TimeStampedModel):
article = models.ForeignKey(Article, on_delete=models.CASCADE)
url = models.CharField(max_length=2000, unique=True)
title = models.CharField(max_length=500)
content_box = models.ForeignKey(ArticleContentBox,null=True, on_delete=models.SET_NULL)
def __str__(self):
return self.title
class Meta:
db_table = 'article_linked_urls'
class LinkedArticle(TimeStampedModel):
article = models.ForeignKey(Article, on_delete=models.CASCADE, related_name='related_articles')
linked_article = models.ForeignKey(Article, on_delete=models.CASCADE, related_name='related_article')
title = models.CharField(max_length=500, null=True, blank=False)
content_box = models.ForeignKey(ArticleContentBox,null=True, on_delete=models.SET_NULL)
def __str__(self):
return "{}-{}".format(self.article.title, self.linked_article.title)
class Meta:
db_table = 'linked_articles'
unique_together = (('article', 'linked_article'),)
class MedicineSpecialization(TimeStampedModel):
medicine = models.ForeignKey(Article, on_delete=models.CASCADE)
specialization = models.ForeignKey(PracticeSpecialization, on_delete=models.CASCADE, null=True,
blank=True)
def __str__(self):
return self.medicine.title + " " + self.specialization.name
class Meta:
db_table = "medicine_specialization"
|
flexible
|
{
"blob_id": "9bc15f063adc7d2a5ea81d090736ab6ce66a03d4",
"index": 5028,
"step-1": "<mask token>\n\n\nclass ArticleLinkedUrl(TimeStampedModel):\n article = models.ForeignKey(Article, on_delete=models.CASCADE)\n url = models.CharField(max_length=2000, unique=True)\n title = models.CharField(max_length=500)\n content_box = models.ForeignKey(ArticleContentBox, null=True, on_delete\n =models.SET_NULL)\n\n def __str__(self):\n return self.title\n\n\n class Meta:\n db_table = 'article_linked_urls'\n\n\nclass LinkedArticle(TimeStampedModel):\n article = models.ForeignKey(Article, on_delete=models.CASCADE,\n related_name='related_articles')\n linked_article = models.ForeignKey(Article, on_delete=models.CASCADE,\n related_name='related_article')\n title = models.CharField(max_length=500, null=True, blank=False)\n content_box = models.ForeignKey(ArticleContentBox, null=True, on_delete\n =models.SET_NULL)\n\n def __str__(self):\n return '{}-{}'.format(self.article.title, self.linked_article.title)\n\n\n class Meta:\n db_table = 'linked_articles'\n unique_together = ('article', 'linked_article'),\n\n\nclass MedicineSpecialization(TimeStampedModel):\n medicine = models.ForeignKey(Article, on_delete=models.CASCADE)\n specialization = models.ForeignKey(PracticeSpecialization, on_delete=\n models.CASCADE, null=True, blank=True)\n\n def __str__(self):\n return self.medicine.title + ' ' + self.specialization.name\n\n\n class Meta:\n db_table = 'medicine_specialization'\n",
"step-2": "<mask token>\n\n\nclass Article(TimeStampedModel, CreatedByModel):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.title\n\n\n class Meta:\n db_table = 'article'\n\n\nclass ArticleImage(TimeStampedModel, CreatedByModel):\n name = models.ImageField(upload_to='article/images')\n\n def image_tag(self):\n if self.name:\n return mark_safe('<img src=\"%s\" width=\"150\" height=\"150\" />' %\n self.name.url)\n return ''\n\n def __str__(self):\n if self.name:\n return self.name.url\n return ''\n\n\n class Meta:\n db_table = 'article_image'\n\n\nclass ArticleContentBox(TimeStampedModel):\n name = models.CharField(max_length=1000)\n title = models.CharField(max_length=1000)\n rank = models.PositiveSmallIntegerField(default=0, blank=True)\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n db_table = 'article_content_box'\n\n\nclass ArticleLinkedUrl(TimeStampedModel):\n article = models.ForeignKey(Article, on_delete=models.CASCADE)\n url = models.CharField(max_length=2000, unique=True)\n title = models.CharField(max_length=500)\n content_box = models.ForeignKey(ArticleContentBox, null=True, on_delete\n =models.SET_NULL)\n\n def __str__(self):\n return self.title\n\n\n class Meta:\n db_table = 'article_linked_urls'\n\n\nclass LinkedArticle(TimeStampedModel):\n article = models.ForeignKey(Article, on_delete=models.CASCADE,\n related_name='related_articles')\n linked_article = models.ForeignKey(Article, on_delete=models.CASCADE,\n related_name='related_article')\n title = models.CharField(max_length=500, null=True, blank=False)\n content_box = models.ForeignKey(ArticleContentBox, null=True, on_delete\n =models.SET_NULL)\n\n def __str__(self):\n return '{}-{}'.format(self.article.title, self.linked_article.title)\n\n\n class Meta:\n db_table = 'linked_articles'\n unique_together = ('article', 'linked_article'),\n\n\nclass MedicineSpecialization(TimeStampedModel):\n medicine = models.ForeignKey(Article, on_delete=models.CASCADE)\n specialization = models.ForeignKey(PracticeSpecialization, on_delete=\n models.CASCADE, null=True, blank=True)\n\n def __str__(self):\n return self.medicine.title + ' ' + self.specialization.name\n\n\n class Meta:\n db_table = 'medicine_specialization'\n",
"step-3": "<mask token>\n\n\nclass Article(TimeStampedModel, CreatedByModel):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def get_absolute_url(self):\n content_type = ContentType.objects.get_for_model(self)\n return reverse('admin:%s_%s_change' % (content_type.app_label,\n content_type.model), args=[self.id])\n\n def icon_tag(self):\n if self.icon:\n return mark_safe('<img src=\"%s\" width=\"150\" height=\"150\" />' %\n self.icon.url)\n return ''\n\n def save(self, *args, **kwargs):\n self.published_date = (self.published_date if self.published_date else\n datetime.date.today())\n if hasattr(self, 'url'):\n self.url = self.url.strip('/').lower()\n super().save(*args, **kwargs)\n\n def __str__(self):\n return self.title\n\n\n class Meta:\n db_table = 'article'\n\n\nclass ArticleImage(TimeStampedModel, CreatedByModel):\n name = models.ImageField(upload_to='article/images')\n\n def image_tag(self):\n if self.name:\n return mark_safe('<img src=\"%s\" width=\"150\" height=\"150\" />' %\n self.name.url)\n return ''\n\n def __str__(self):\n if self.name:\n return self.name.url\n return ''\n\n\n class Meta:\n db_table = 'article_image'\n\n\nclass ArticleContentBox(TimeStampedModel):\n name = models.CharField(max_length=1000)\n title = models.CharField(max_length=1000)\n rank = models.PositiveSmallIntegerField(default=0, blank=True)\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n db_table = 'article_content_box'\n\n\nclass ArticleLinkedUrl(TimeStampedModel):\n article = models.ForeignKey(Article, on_delete=models.CASCADE)\n url = models.CharField(max_length=2000, unique=True)\n title = models.CharField(max_length=500)\n content_box = models.ForeignKey(ArticleContentBox, null=True, on_delete\n =models.SET_NULL)\n\n def __str__(self):\n return self.title\n\n\n class Meta:\n db_table = 'article_linked_urls'\n\n\nclass LinkedArticle(TimeStampedModel):\n article = models.ForeignKey(Article, on_delete=models.CASCADE,\n related_name='related_articles')\n linked_article = models.ForeignKey(Article, on_delete=models.CASCADE,\n related_name='related_article')\n title = models.CharField(max_length=500, null=True, blank=False)\n content_box = models.ForeignKey(ArticleContentBox, null=True, on_delete\n =models.SET_NULL)\n\n def __str__(self):\n return '{}-{}'.format(self.article.title, self.linked_article.title)\n\n\n class Meta:\n db_table = 'linked_articles'\n unique_together = ('article', 'linked_article'),\n\n\nclass MedicineSpecialization(TimeStampedModel):\n medicine = models.ForeignKey(Article, on_delete=models.CASCADE)\n specialization = models.ForeignKey(PracticeSpecialization, on_delete=\n models.CASCADE, null=True, blank=True)\n\n def __str__(self):\n return self.medicine.title + ' ' + self.specialization.name\n\n\n class Meta:\n db_table = 'medicine_specialization'\n",
"step-4": "<mask token>\n\n\nclass Article(TimeStampedModel, CreatedByModel):\n title = models.CharField(blank=False, null=False, max_length=500,\n unique=True)\n url = models.CharField(blank=False, null=True, max_length=500, unique=True)\n heading_title = models.CharField(blank=True, null=False, max_length=500)\n body = models.CharField(blank=False, null=False, max_length=200000)\n category = models.ForeignKey(ArticleCategory, null=True, related_name=\n 'articles', on_delete=models.SET_NULL)\n header_image = models.ImageField(upload_to='articles/header/images',\n null=True, blank=True, default='')\n header_image_alt = models.CharField(max_length=512, blank=True, null=\n True, default='')\n icon = models.ImageField(upload_to='articles/icons', null=True, blank=\n True, default='')\n is_published = models.BooleanField(default=False, verbose_name='Published')\n description = models.CharField(max_length=500, blank=True, null=True)\n keywords = models.CharField(max_length=256, blank=True, null=True)\n author_name = models.CharField(max_length=256, null=True, blank=True)\n author = models.ForeignKey(Doctor, null=True, blank=True, related_name=\n 'published_articles', on_delete=models.SET_NULL)\n published_date = models.DateField(default=datetime.date.today)\n linked_articles = models.ManyToManyField('self', symmetrical=False,\n through='LinkedArticle', through_fields=('article', 'linked_article'))\n pharmeasy_url = models.TextField(blank=True, null=True)\n pharmeasy_product_id = models.PositiveIntegerField(null=True, blank=True)\n is_widget_available = models.NullBooleanField()\n\n def get_absolute_url(self):\n content_type = ContentType.objects.get_for_model(self)\n return reverse('admin:%s_%s_change' % (content_type.app_label,\n content_type.model), args=[self.id])\n\n def icon_tag(self):\n if self.icon:\n return mark_safe('<img src=\"%s\" width=\"150\" height=\"150\" />' %\n self.icon.url)\n return ''\n\n def save(self, *args, **kwargs):\n self.published_date = (self.published_date if self.published_date else\n datetime.date.today())\n if hasattr(self, 'url'):\n self.url = self.url.strip('/').lower()\n super().save(*args, **kwargs)\n\n def __str__(self):\n return self.title\n\n\n class Meta:\n db_table = 'article'\n\n\nclass ArticleImage(TimeStampedModel, CreatedByModel):\n name = models.ImageField(upload_to='article/images')\n\n def image_tag(self):\n if self.name:\n return mark_safe('<img src=\"%s\" width=\"150\" height=\"150\" />' %\n self.name.url)\n return ''\n\n def __str__(self):\n if self.name:\n return self.name.url\n return ''\n\n\n class Meta:\n db_table = 'article_image'\n\n\nclass ArticleContentBox(TimeStampedModel):\n name = models.CharField(max_length=1000)\n title = models.CharField(max_length=1000)\n rank = models.PositiveSmallIntegerField(default=0, blank=True)\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n db_table = 'article_content_box'\n\n\nclass ArticleLinkedUrl(TimeStampedModel):\n article = models.ForeignKey(Article, on_delete=models.CASCADE)\n url = models.CharField(max_length=2000, unique=True)\n title = models.CharField(max_length=500)\n content_box = models.ForeignKey(ArticleContentBox, null=True, on_delete\n =models.SET_NULL)\n\n def __str__(self):\n return self.title\n\n\n class Meta:\n db_table = 'article_linked_urls'\n\n\nclass LinkedArticle(TimeStampedModel):\n article = models.ForeignKey(Article, on_delete=models.CASCADE,\n related_name='related_articles')\n linked_article = models.ForeignKey(Article, on_delete=models.CASCADE,\n related_name='related_article')\n title = models.CharField(max_length=500, null=True, blank=False)\n content_box = models.ForeignKey(ArticleContentBox, null=True, on_delete\n =models.SET_NULL)\n\n def __str__(self):\n return '{}-{}'.format(self.article.title, self.linked_article.title)\n\n\n class Meta:\n db_table = 'linked_articles'\n unique_together = ('article', 'linked_article'),\n\n\nclass MedicineSpecialization(TimeStampedModel):\n medicine = models.ForeignKey(Article, on_delete=models.CASCADE)\n specialization = models.ForeignKey(PracticeSpecialization, on_delete=\n models.CASCADE, null=True, blank=True)\n\n def __str__(self):\n return self.medicine.title + ' ' + self.specialization.name\n\n\n class Meta:\n db_table = 'medicine_specialization'\n",
"step-5": "from django.db import models\nfrom django.utils.safestring import mark_safe\nfrom ondoc.authentication.models import TimeStampedModel, CreatedByModel, Image\nimport datetime\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.urls import reverse\n\nfrom ondoc.doctor.models import Doctor, PracticeSpecialization\n\n\nclass ArticleCategory(TimeStampedModel):\n\n name = models.CharField(blank=False, null=False, max_length=500)\n identifier = models.CharField(max_length=48, blank=False, null=True)\n url = models.CharField(blank=False, null=True, max_length=500, unique=True)\n title = models.CharField(max_length=500, null=True, blank=True)\n description = models.CharField(max_length=200000, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n class Meta:\n db_table = \"article_categories\"\n\n def save(self, *args, **kwargs):\n if hasattr(self, 'url'):\n self.url = self.url.strip('/').lower()\n super(ArticleCategory, self).save(*args, **kwargs)\n\n\nclass Article(TimeStampedModel, CreatedByModel):\n title = models.CharField(blank=False, null=False, max_length=500, unique=True)\n url = models.CharField(blank=False, null=True, max_length=500, unique=True)\n heading_title = models.CharField(blank=True, null=False, max_length=500)\n body = models.CharField(blank=False, null=False, max_length=200000)\n category = models.ForeignKey(ArticleCategory, null=True, related_name='articles', on_delete=models.SET_NULL)\n header_image = models.ImageField(upload_to='articles/header/images', null=True, blank=True, default='')\n header_image_alt = models.CharField(max_length=512, blank=True, null=True, default='')\n icon = models.ImageField(upload_to='articles/icons', null=True, blank=True, default='')\n is_published = models.BooleanField(default=False, verbose_name='Published')\n description = models.CharField(max_length=500, blank=True, null=True)\n keywords = models.CharField(max_length=256, blank=True, null=True)\n author_name = models.CharField(max_length=256, null=True, blank=True)\n author = models.ForeignKey(Doctor, null=True, blank=True, related_name='published_articles', on_delete=models.SET_NULL)\n published_date = models.DateField(default=datetime.date.today)\n linked_articles = models.ManyToManyField('self', symmetrical=False, through='LinkedArticle',\n through_fields=('article', 'linked_article'))\n pharmeasy_url = models.TextField(blank=True, null=True)\n pharmeasy_product_id = models.PositiveIntegerField(null=True, blank=True)\n is_widget_available = models.NullBooleanField()\n\n def get_absolute_url(self):\n content_type = ContentType.objects.get_for_model(self)\n return reverse('admin:%s_%s_change' % (content_type.app_label, content_type.model), args=[self.id])\n\n def icon_tag(self):\n if self.icon:\n return mark_safe('<img src=\"%s\" width=\"150\" height=\"150\" />' % (self.icon.url))\n return \"\"\n\n def save(self, *args, **kwargs):\n self.published_date = self.published_date if self.published_date else datetime.date.today()\n if hasattr(self, 'url'):\n self.url = self.url.strip('/').lower()\n super().save(*args, **kwargs)\n\n def __str__(self):\n return self.title\n\n class Meta:\n db_table = \"article\"\n\n\nclass ArticleImage(TimeStampedModel, CreatedByModel):\n name = models.ImageField(upload_to='article/images')\n\n def image_tag(self):\n if self.name:\n return mark_safe('<img src=\"%s\" width=\"150\" height=\"150\" />' % (self.name.url))\n return \"\"\n\n def __str__(self):\n if self.name:\n return self.name.url\n return \"\"\n\n class Meta:\n db_table = \"article_image\"\n\nclass ArticleContentBox(TimeStampedModel):\n name = models.CharField(max_length=1000)\n title = models.CharField(max_length=1000)\n rank = models.PositiveSmallIntegerField(default=0, blank=True)\n\n def __str__(self):\n return self.name\n\n class Meta:\n db_table = 'article_content_box'\n\n\nclass ArticleLinkedUrl(TimeStampedModel):\n article = models.ForeignKey(Article, on_delete=models.CASCADE)\n url = models.CharField(max_length=2000, unique=True)\n title = models.CharField(max_length=500)\n content_box = models.ForeignKey(ArticleContentBox,null=True, on_delete=models.SET_NULL)\n\n def __str__(self):\n return self.title\n\n class Meta:\n db_table = 'article_linked_urls'\n\n\nclass LinkedArticle(TimeStampedModel):\n article = models.ForeignKey(Article, on_delete=models.CASCADE, related_name='related_articles')\n linked_article = models.ForeignKey(Article, on_delete=models.CASCADE, related_name='related_article')\n title = models.CharField(max_length=500, null=True, blank=False)\n content_box = models.ForeignKey(ArticleContentBox,null=True, on_delete=models.SET_NULL)\n\n def __str__(self):\n return \"{}-{}\".format(self.article.title, self.linked_article.title)\n\n class Meta:\n db_table = 'linked_articles'\n unique_together = (('article', 'linked_article'),)\n\n\nclass MedicineSpecialization(TimeStampedModel):\n medicine = models.ForeignKey(Article, on_delete=models.CASCADE)\n specialization = models.ForeignKey(PracticeSpecialization, on_delete=models.CASCADE, null=True,\n blank=True)\n\n def __str__(self):\n return self.medicine.title + \" \" + self.specialization.name\n\n class Meta:\n db_table = \"medicine_specialization\"\n\n",
"step-ids": [
9,
18,
21,
22,
28
]
}
|
[
9,
18,
21,
22,
28
] |
<|reserved_special_token_0|>
class TestSets(unittest.TestCase):
def test_is_set(self):
"""Test set validator (Exercise 3a)."""
cards = numpy.array([[1, 1, 1, 2, 0], [0, 1, 2, 2, 2], [0, 1, 2, 2,
2], [0, 1, 2, 2, 2]])
self.assertTrue(set_solver.is_set(cards, [0, 1, 2]))
self.assertFalse(set_solver.is_set(cards, [0, 1, 3]))
self.assertTrue(set_solver.is_set(cards, [2, 3, 4]))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestSets(unittest.TestCase):
def test_is_set(self):
"""Test set validator (Exercise 3a)."""
cards = numpy.array([[1, 1, 1, 2, 0], [0, 1, 2, 2, 2], [0, 1, 2, 2,
2], [0, 1, 2, 2, 2]])
self.assertTrue(set_solver.is_set(cards, [0, 1, 2]))
self.assertFalse(set_solver.is_set(cards, [0, 1, 3]))
self.assertTrue(set_solver.is_set(cards, [2, 3, 4]))
def test_find_sets(self):
"""Test solver (Exercise 3b)."""
cards = numpy.array([[1, 1, 1, 2, 0], [0, 1, 2, 2, 2], [0, 1, 2, 2,
2], [0, 1, 2, 2, 2]])
set_indices = set_solver.find_sets(cards)
self.assertEqual(len(set_indices), 2)
self.assertTrue((0, 1, 2) in set_indices)
self.assertTrue((2, 3, 4) in set_indices)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestSets(unittest.TestCase):
def test_is_set(self):
"""Test set validator (Exercise 3a)."""
cards = numpy.array([[1, 1, 1, 2, 0], [0, 1, 2, 2, 2], [0, 1, 2, 2,
2], [0, 1, 2, 2, 2]])
self.assertTrue(set_solver.is_set(cards, [0, 1, 2]))
self.assertFalse(set_solver.is_set(cards, [0, 1, 3]))
self.assertTrue(set_solver.is_set(cards, [2, 3, 4]))
def test_find_sets(self):
"""Test solver (Exercise 3b)."""
cards = numpy.array([[1, 1, 1, 2, 0], [0, 1, 2, 2, 2], [0, 1, 2, 2,
2], [0, 1, 2, 2, 2]])
set_indices = set_solver.find_sets(cards)
self.assertEqual(len(set_indices), 2)
self.assertTrue((0, 1, 2) in set_indices)
self.assertTrue((2, 3, 4) in set_indices)
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
import unittest
import numpy
import set_solver
class TestSets(unittest.TestCase):
def test_is_set(self):
"""Test set validator (Exercise 3a)."""
cards = numpy.array([[1, 1, 1, 2, 0], [0, 1, 2, 2, 2], [0, 1, 2, 2,
2], [0, 1, 2, 2, 2]])
self.assertTrue(set_solver.is_set(cards, [0, 1, 2]))
self.assertFalse(set_solver.is_set(cards, [0, 1, 3]))
self.assertTrue(set_solver.is_set(cards, [2, 3, 4]))
def test_find_sets(self):
"""Test solver (Exercise 3b)."""
cards = numpy.array([[1, 1, 1, 2, 0], [0, 1, 2, 2, 2], [0, 1, 2, 2,
2], [0, 1, 2, 2, 2]])
set_indices = set_solver.find_sets(cards)
self.assertEqual(len(set_indices), 2)
self.assertTrue((0, 1, 2) in set_indices)
self.assertTrue((2, 3, 4) in set_indices)
if __name__ == '__main__':
unittest.main()
|
flexible
|
{
"blob_id": "6065fae2a11f6b525ef10346e297505ec9d4e9d5",
"index": 8550,
"step-1": "<mask token>\n\n\nclass TestSets(unittest.TestCase):\n\n def test_is_set(self):\n \"\"\"Test set validator (Exercise 3a).\"\"\"\n cards = numpy.array([[1, 1, 1, 2, 0], [0, 1, 2, 2, 2], [0, 1, 2, 2,\n 2], [0, 1, 2, 2, 2]])\n self.assertTrue(set_solver.is_set(cards, [0, 1, 2]))\n self.assertFalse(set_solver.is_set(cards, [0, 1, 3]))\n self.assertTrue(set_solver.is_set(cards, [2, 3, 4]))\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestSets(unittest.TestCase):\n\n def test_is_set(self):\n \"\"\"Test set validator (Exercise 3a).\"\"\"\n cards = numpy.array([[1, 1, 1, 2, 0], [0, 1, 2, 2, 2], [0, 1, 2, 2,\n 2], [0, 1, 2, 2, 2]])\n self.assertTrue(set_solver.is_set(cards, [0, 1, 2]))\n self.assertFalse(set_solver.is_set(cards, [0, 1, 3]))\n self.assertTrue(set_solver.is_set(cards, [2, 3, 4]))\n\n def test_find_sets(self):\n \"\"\"Test solver (Exercise 3b).\"\"\"\n cards = numpy.array([[1, 1, 1, 2, 0], [0, 1, 2, 2, 2], [0, 1, 2, 2,\n 2], [0, 1, 2, 2, 2]])\n set_indices = set_solver.find_sets(cards)\n self.assertEqual(len(set_indices), 2)\n self.assertTrue((0, 1, 2) in set_indices)\n self.assertTrue((2, 3, 4) in set_indices)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TestSets(unittest.TestCase):\n\n def test_is_set(self):\n \"\"\"Test set validator (Exercise 3a).\"\"\"\n cards = numpy.array([[1, 1, 1, 2, 0], [0, 1, 2, 2, 2], [0, 1, 2, 2,\n 2], [0, 1, 2, 2, 2]])\n self.assertTrue(set_solver.is_set(cards, [0, 1, 2]))\n self.assertFalse(set_solver.is_set(cards, [0, 1, 3]))\n self.assertTrue(set_solver.is_set(cards, [2, 3, 4]))\n\n def test_find_sets(self):\n \"\"\"Test solver (Exercise 3b).\"\"\"\n cards = numpy.array([[1, 1, 1, 2, 0], [0, 1, 2, 2, 2], [0, 1, 2, 2,\n 2], [0, 1, 2, 2, 2]])\n set_indices = set_solver.find_sets(cards)\n self.assertEqual(len(set_indices), 2)\n self.assertTrue((0, 1, 2) in set_indices)\n self.assertTrue((2, 3, 4) in set_indices)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "import unittest\nimport numpy\nimport set_solver\n\n\nclass TestSets(unittest.TestCase):\n\n def test_is_set(self):\n \"\"\"Test set validator (Exercise 3a).\"\"\"\n cards = numpy.array([[1, 1, 1, 2, 0], [0, 1, 2, 2, 2], [0, 1, 2, 2,\n 2], [0, 1, 2, 2, 2]])\n self.assertTrue(set_solver.is_set(cards, [0, 1, 2]))\n self.assertFalse(set_solver.is_set(cards, [0, 1, 3]))\n self.assertTrue(set_solver.is_set(cards, [2, 3, 4]))\n\n def test_find_sets(self):\n \"\"\"Test solver (Exercise 3b).\"\"\"\n cards = numpy.array([[1, 1, 1, 2, 0], [0, 1, 2, 2, 2], [0, 1, 2, 2,\n 2], [0, 1, 2, 2, 2]])\n set_indices = set_solver.find_sets(cards)\n self.assertEqual(len(set_indices), 2)\n self.assertTrue((0, 1, 2) in set_indices)\n self.assertTrue((2, 3, 4) in set_indices)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": null,
"step-ids": [
2,
3,
4,
5
]
}
|
[
2,
3,
4,
5
] |
# coding: utf-8
"""
StockX API
PRERELEASE API - Subject to change before release. Provides access to StockX's public services, allowing end users to query for product and order information. # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import io_stockx
from io_stockx.models.portfolio_id_del_response_portfolio_item_product_media import PortfolioIdDelResponsePortfolioItemProductMedia # noqa: E501
from io_stockx.rest import ApiException
class TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):
"""PortfolioIdDelResponsePortfolioItemProductMedia unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPortfolioIdDelResponsePortfolioItemProductMedia(self):
"""Test PortfolioIdDelResponsePortfolioItemProductMedia"""
# FIXME: construct object with mandatory attributes with example values
# model = io_stockx.models.portfolio_id_del_response_portfolio_item_product_media.PortfolioIdDelResponsePortfolioItemProductMedia() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
normal
|
{
"blob_id": "ae88418ccfdaa4b357a2491f6450dbcda55b1c21",
"index": 2013,
"step-1": "<mask token>\n\n\nclass TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):\n <mask token>\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testPortfolioIdDelResponsePortfolioItemProductMedia(self):\n \"\"\"Test PortfolioIdDelResponsePortfolioItemProductMedia\"\"\"\n pass\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):\n \"\"\"PortfolioIdDelResponsePortfolioItemProductMedia unit test stubs\"\"\"\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testPortfolioIdDelResponsePortfolioItemProductMedia(self):\n \"\"\"Test PortfolioIdDelResponsePortfolioItemProductMedia\"\"\"\n pass\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):\n \"\"\"PortfolioIdDelResponsePortfolioItemProductMedia unit test stubs\"\"\"\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testPortfolioIdDelResponsePortfolioItemProductMedia(self):\n \"\"\"Test PortfolioIdDelResponsePortfolioItemProductMedia\"\"\"\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "<mask token>\nfrom __future__ import absolute_import\nimport unittest\nimport io_stockx\nfrom io_stockx.models.portfolio_id_del_response_portfolio_item_product_media import PortfolioIdDelResponsePortfolioItemProductMedia\nfrom io_stockx.rest import ApiException\n\n\nclass TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):\n \"\"\"PortfolioIdDelResponsePortfolioItemProductMedia unit test stubs\"\"\"\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testPortfolioIdDelResponsePortfolioItemProductMedia(self):\n \"\"\"Test PortfolioIdDelResponsePortfolioItemProductMedia\"\"\"\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "# coding: utf-8\n\n\"\"\"\n StockX API\n\n PRERELEASE API - Subject to change before release. Provides access to StockX's public services, allowing end users to query for product and order information. # noqa: E501\n\n OpenAPI spec version: 1.0.0\n \n Generated by: https://github.com/swagger-api/swagger-codegen.git\n\"\"\"\n\n\nfrom __future__ import absolute_import\n\nimport unittest\n\nimport io_stockx\nfrom io_stockx.models.portfolio_id_del_response_portfolio_item_product_media import PortfolioIdDelResponsePortfolioItemProductMedia # noqa: E501\nfrom io_stockx.rest import ApiException\n\n\nclass TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):\n \"\"\"PortfolioIdDelResponsePortfolioItemProductMedia unit test stubs\"\"\"\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testPortfolioIdDelResponsePortfolioItemProductMedia(self):\n \"\"\"Test PortfolioIdDelResponsePortfolioItemProductMedia\"\"\"\n # FIXME: construct object with mandatory attributes with example values\n # model = io_stockx.models.portfolio_id_del_response_portfolio_item_product_media.PortfolioIdDelResponsePortfolioItemProductMedia() # noqa: E501\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
# Copyright (c) 2016 EMC Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cinder.i18n import _
from cinder.volume.drivers.coprhd.helpers import commoncoprhdapi as common
class VirtualArray(common.CoprHDResource):
# Commonly used URIs for the 'varrays' module
URI_VIRTUALARRAY = '/vdc/varrays'
URI_VIRTUALARRAY_BY_VDC_ID = '/vdc/varrays?vdc-id={0}'
URI_VIRTUALARRAY_URI = '/vdc/varrays/{0}'
def varray_query(self, name):
"""Returns the UID of the varray specified by the name."""
if common.is_uri(name):
return name
uris = self.varray_list()
for uri in uris:
varray = self.varray_show(uri)
if varray and varray['name'] == name:
return varray['id']
raise common.CoprHdError(common.CoprHdError.NOT_FOUND_ERR,
(_("varray %s: not found") % name))
def varray_list(self, vdcname=None):
"""Returns all the varrays in a vdc.
:param vdcname: Name of the Virtual Data Center
:returns: JSON payload of varray list
"""
vdcrestapi = None
if vdcname is not None:
vdcrestapi = VirtualArray.URI_VIRTUALARRAY_BY_VDC_ID.format(
vdcname)
else:
vdcrestapi = VirtualArray.URI_VIRTUALARRAY
(s, h) = common.service_json_request(
self.ipaddr, self.port, "GET",
vdcrestapi, None)
o = common.json_decode(s)
returnlst = []
for item in o['varray']:
returnlst.append(item['id'])
return returnlst
def varray_show(self, label):
"""Makes REST API call to retrieve varray details based on name."""
uri = self.varray_query(label)
(s, h) = common.service_json_request(
self.ipaddr, self.port, "GET",
VirtualArray.URI_VIRTUALARRAY_URI.format(uri),
None)
o = common.json_decode(s)
if 'inactive' in o and o['inactive'] is True:
return None
else:
return o
|
normal
|
{
"blob_id": "2d48a343ca7f0f8ba7de8b520aad71d774d9b4ba",
"index": 9302,
"step-1": "<mask token>\n\n\nclass VirtualArray(common.CoprHDResource):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def varray_list(self, vdcname=None):\n \"\"\"Returns all the varrays in a vdc.\n\n :param vdcname: Name of the Virtual Data Center\n :returns: JSON payload of varray list\n \"\"\"\n vdcrestapi = None\n if vdcname is not None:\n vdcrestapi = VirtualArray.URI_VIRTUALARRAY_BY_VDC_ID.format(vdcname\n )\n else:\n vdcrestapi = VirtualArray.URI_VIRTUALARRAY\n s, h = common.service_json_request(self.ipaddr, self.port, 'GET',\n vdcrestapi, None)\n o = common.json_decode(s)\n returnlst = []\n for item in o['varray']:\n returnlst.append(item['id'])\n return returnlst\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass VirtualArray(common.CoprHDResource):\n <mask token>\n <mask token>\n <mask token>\n\n def varray_query(self, name):\n \"\"\"Returns the UID of the varray specified by the name.\"\"\"\n if common.is_uri(name):\n return name\n uris = self.varray_list()\n for uri in uris:\n varray = self.varray_show(uri)\n if varray and varray['name'] == name:\n return varray['id']\n raise common.CoprHdError(common.CoprHdError.NOT_FOUND_ERR, _(\n 'varray %s: not found') % name)\n\n def varray_list(self, vdcname=None):\n \"\"\"Returns all the varrays in a vdc.\n\n :param vdcname: Name of the Virtual Data Center\n :returns: JSON payload of varray list\n \"\"\"\n vdcrestapi = None\n if vdcname is not None:\n vdcrestapi = VirtualArray.URI_VIRTUALARRAY_BY_VDC_ID.format(vdcname\n )\n else:\n vdcrestapi = VirtualArray.URI_VIRTUALARRAY\n s, h = common.service_json_request(self.ipaddr, self.port, 'GET',\n vdcrestapi, None)\n o = common.json_decode(s)\n returnlst = []\n for item in o['varray']:\n returnlst.append(item['id'])\n return returnlst\n\n def varray_show(self, label):\n \"\"\"Makes REST API call to retrieve varray details based on name.\"\"\"\n uri = self.varray_query(label)\n s, h = common.service_json_request(self.ipaddr, self.port, 'GET',\n VirtualArray.URI_VIRTUALARRAY_URI.format(uri), None)\n o = common.json_decode(s)\n if 'inactive' in o and o['inactive'] is True:\n return None\n else:\n return o\n",
"step-3": "<mask token>\n\n\nclass VirtualArray(common.CoprHDResource):\n URI_VIRTUALARRAY = '/vdc/varrays'\n URI_VIRTUALARRAY_BY_VDC_ID = '/vdc/varrays?vdc-id={0}'\n URI_VIRTUALARRAY_URI = '/vdc/varrays/{0}'\n\n def varray_query(self, name):\n \"\"\"Returns the UID of the varray specified by the name.\"\"\"\n if common.is_uri(name):\n return name\n uris = self.varray_list()\n for uri in uris:\n varray = self.varray_show(uri)\n if varray and varray['name'] == name:\n return varray['id']\n raise common.CoprHdError(common.CoprHdError.NOT_FOUND_ERR, _(\n 'varray %s: not found') % name)\n\n def varray_list(self, vdcname=None):\n \"\"\"Returns all the varrays in a vdc.\n\n :param vdcname: Name of the Virtual Data Center\n :returns: JSON payload of varray list\n \"\"\"\n vdcrestapi = None\n if vdcname is not None:\n vdcrestapi = VirtualArray.URI_VIRTUALARRAY_BY_VDC_ID.format(vdcname\n )\n else:\n vdcrestapi = VirtualArray.URI_VIRTUALARRAY\n s, h = common.service_json_request(self.ipaddr, self.port, 'GET',\n vdcrestapi, None)\n o = common.json_decode(s)\n returnlst = []\n for item in o['varray']:\n returnlst.append(item['id'])\n return returnlst\n\n def varray_show(self, label):\n \"\"\"Makes REST API call to retrieve varray details based on name.\"\"\"\n uri = self.varray_query(label)\n s, h = common.service_json_request(self.ipaddr, self.port, 'GET',\n VirtualArray.URI_VIRTUALARRAY_URI.format(uri), None)\n o = common.json_decode(s)\n if 'inactive' in o and o['inactive'] is True:\n return None\n else:\n return o\n",
"step-4": "from cinder.i18n import _\nfrom cinder.volume.drivers.coprhd.helpers import commoncoprhdapi as common\n\n\nclass VirtualArray(common.CoprHDResource):\n URI_VIRTUALARRAY = '/vdc/varrays'\n URI_VIRTUALARRAY_BY_VDC_ID = '/vdc/varrays?vdc-id={0}'\n URI_VIRTUALARRAY_URI = '/vdc/varrays/{0}'\n\n def varray_query(self, name):\n \"\"\"Returns the UID of the varray specified by the name.\"\"\"\n if common.is_uri(name):\n return name\n uris = self.varray_list()\n for uri in uris:\n varray = self.varray_show(uri)\n if varray and varray['name'] == name:\n return varray['id']\n raise common.CoprHdError(common.CoprHdError.NOT_FOUND_ERR, _(\n 'varray %s: not found') % name)\n\n def varray_list(self, vdcname=None):\n \"\"\"Returns all the varrays in a vdc.\n\n :param vdcname: Name of the Virtual Data Center\n :returns: JSON payload of varray list\n \"\"\"\n vdcrestapi = None\n if vdcname is not None:\n vdcrestapi = VirtualArray.URI_VIRTUALARRAY_BY_VDC_ID.format(vdcname\n )\n else:\n vdcrestapi = VirtualArray.URI_VIRTUALARRAY\n s, h = common.service_json_request(self.ipaddr, self.port, 'GET',\n vdcrestapi, None)\n o = common.json_decode(s)\n returnlst = []\n for item in o['varray']:\n returnlst.append(item['id'])\n return returnlst\n\n def varray_show(self, label):\n \"\"\"Makes REST API call to retrieve varray details based on name.\"\"\"\n uri = self.varray_query(label)\n s, h = common.service_json_request(self.ipaddr, self.port, 'GET',\n VirtualArray.URI_VIRTUALARRAY_URI.format(uri), None)\n o = common.json_decode(s)\n if 'inactive' in o and o['inactive'] is True:\n return None\n else:\n return o\n",
"step-5": "# Copyright (c) 2016 EMC Corporation\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nfrom cinder.i18n import _\nfrom cinder.volume.drivers.coprhd.helpers import commoncoprhdapi as common\n\n\nclass VirtualArray(common.CoprHDResource):\n\n # Commonly used URIs for the 'varrays' module\n URI_VIRTUALARRAY = '/vdc/varrays'\n URI_VIRTUALARRAY_BY_VDC_ID = '/vdc/varrays?vdc-id={0}'\n URI_VIRTUALARRAY_URI = '/vdc/varrays/{0}'\n\n def varray_query(self, name):\n \"\"\"Returns the UID of the varray specified by the name.\"\"\"\n if common.is_uri(name):\n return name\n\n uris = self.varray_list()\n\n for uri in uris:\n varray = self.varray_show(uri)\n if varray and varray['name'] == name:\n return varray['id']\n\n raise common.CoprHdError(common.CoprHdError.NOT_FOUND_ERR,\n (_(\"varray %s: not found\") % name))\n\n def varray_list(self, vdcname=None):\n \"\"\"Returns all the varrays in a vdc.\n\n :param vdcname: Name of the Virtual Data Center\n :returns: JSON payload of varray list\n \"\"\"\n vdcrestapi = None\n if vdcname is not None:\n vdcrestapi = VirtualArray.URI_VIRTUALARRAY_BY_VDC_ID.format(\n vdcname)\n else:\n vdcrestapi = VirtualArray.URI_VIRTUALARRAY\n (s, h) = common.service_json_request(\n self.ipaddr, self.port, \"GET\",\n vdcrestapi, None)\n\n o = common.json_decode(s)\n\n returnlst = []\n for item in o['varray']:\n returnlst.append(item['id'])\n\n return returnlst\n\n def varray_show(self, label):\n \"\"\"Makes REST API call to retrieve varray details based on name.\"\"\"\n uri = self.varray_query(label)\n\n (s, h) = common.service_json_request(\n self.ipaddr, self.port, \"GET\",\n VirtualArray.URI_VIRTUALARRAY_URI.format(uri),\n None)\n\n o = common.json_decode(s)\n if 'inactive' in o and o['inactive'] is True:\n return None\n else:\n return o\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
class Dot(LibFcn):
name = prefix + 'dot'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.
Double())}], P.Array(P.Double())), Sig([{'x': P.Map(P.Map(P.Double(
)))}, {'y': P.Map(P.Double())}], P.Map(P.Double())), Sig([{'x': P.
Array(P.Array(P.Double()))}, {'y': P.Array(P.Array(P.Double()))}],
P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))},
{'y': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24050
def __call__(self, state, scope, pos, paramTypes, x, y):
if paramTypes[1]['type'] == 'array':
if isinstance(paramTypes[1]['items'], dict) and paramTypes[1][
'items']['type'] == 'array':
bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for
row in x) or any(any(math.isnan(z) or math.isinf(z) for
z in row) for row in y)
xmat = arraysToMatrix(x)
ymat = arraysToMatrix(y)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0
] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 1, self.name, pos)
try:
if bad:
raise PFARuntimeException('contains non-finite value',
self.errcodeBase + 2, self.name, pos)
return matrixToArrays(np().dot(xmat, ymat))
except ValueError:
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
else:
bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for
row in x) or any(math.isnan(z) or math.isinf(z) for z in y)
xmat = arraysToMatrix(x)
ymat = arrayToRowVector(y)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0
] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 1, self.name, pos)
try:
if bad:
raise PFARuntimeException('contains non-finite value',
self.errcodeBase + 2, self.name, pos)
return rowVectorToArray(np().dot(xmat, ymat))
except ValueError:
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
elif paramTypes[1]['type'] == 'map':
if isinstance(paramTypes[1]['values'], dict) and paramTypes[1][
'values']['type'] == 'map':
bad = any(any(math.isnan(z) or math.isinf(z) for z in list(
row.values())) for row in list(x.values())) or any(any(
math.isnan(z) or math.isinf(z) for z in list(row.values
())) for row in list(y.values()))
rows = list(rowKeys(x))
inter = list(colKeys(x).union(rowKeys(y)))
cols = list(colKeys(y))
xmat = mapsToMatrix(x, rows, inter)
ymat = mapsToMatrix(y, inter, cols)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0
] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 1, self.name, pos)
if bad:
raise PFARuntimeException('contains non-finite value',
self.errcodeBase + 2, self.name, pos)
return matrixToMaps(np().dot(xmat, ymat), rows, cols)
else:
bad = any(any(math.isnan(z) or math.isinf(z) for z in list(
row.values())) for row in list(x.values())) or any(math
.isnan(z) or math.isinf(z) for z in list(y.values()))
rows = list(rowKeys(x))
cols = list(colKeys(x).union(rowKeys(y)))
xmat = mapsToMatrix(x, rows, cols)
ymat = mapToRowVector(y, cols)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0
] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 1, self.name, pos)
if bad:
raise PFARuntimeException('contains non-finite value',
self.errcodeBase + 2, self.name, pos)
return rowVectorToMap(np().dot(xmat, ymat), rows)
<|reserved_special_token_0|>
class Transpose(LibFcn):
name = prefix + 'transpose'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(
P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P
.Double())))])
errcodeBase = 24060
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
return [[x[r][c] for r in range(rows)] for c in range(cols)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
rows = rowKeys(x)
cols = colKeys(x)
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedMap(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)
<|reserved_special_token_0|>
class Inverse(LibFcn):
name = prefix + 'inverse'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(
P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P
.Double())))])
errcodeBase = 24070
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
return matrixToArrays(arraysToMatrix(x).I)
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
rows = list(rowKeys(x))
cols = list(colKeys(x))
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
xmat = mapsToMatrix(x, rows, cols)
return matrixToMaps(xmat.I, cols, rows)
<|reserved_special_token_0|>
class Trace(LibFcn):
name = prefix + 'trace'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig
([{'x': P.Map(P.Map(P.Double()))}], P.Double())])
errcodeBase = 24080
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows == 0:
return 0.0
else:
cols = len(x[0])
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 0, self.name, pos)
return sum(x[i][i] for i in range(min(rows, cols)))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
keys = rowKeys(x).intersection(colKeys(x))
return sum(x[i][i] for i in keys)
<|reserved_special_token_0|>
class Det(LibFcn):
name = prefix + 'det'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig
([{'x': P.Map(P.Map(P.Double()))}], P.Double())])
errcodeBase = 24090
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException('non-square matrix', self.
errcodeBase + 2, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in
x):
return float('nan')
else:
return float(np().linalg.det(arraysToMatrix(x)))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in list(row.
values())) for row in list(x.values())):
return float('nan')
else:
return float(np().linalg.det(mapsToMatrix(x, keys, keys)))
<|reserved_special_token_0|>
class Symmetric(LibFcn):
name = prefix + 'symmetric'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'tol': P.Double(
)}], P.Boolean()), Sig([{'x': P.Map(P.Map(P.Double()))}, {'tol': P.
Double()}], P.Boolean())])
errcodeBase = 24100
@staticmethod
def same(x, y, tol):
if math.isinf(x) and math.isinf(y) and (x > 0.0 and y > 0.0 or x <
0.0 and y < 0.0):
return True
elif math.isnan(x) and math.isnan(y):
return True
elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y
) and not math.isnan(y):
return abs(x - y) < tol
else:
return False
def __call__(self, state, scope, pos, paramTypes, x, tol):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException('non-square matrix', self.
errcodeBase + 2, self.name, pos)
return all(all(self.same(x[i][j], x[j][i], tol) for j in range(
cols)) for i in range(rows))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {})
.get(i, 0.0), tol) for j in keys) for i in keys)
<|reserved_special_token_0|>
class EigenBasis(LibFcn):
name = prefix + 'eigenBasis'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(
P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P
.Double())))])
errcodeBase = 24110
def calculate(self, x, size):
symm = (x + x.T) * 0.5
evals, evects = np().linalg.eig(symm)
evects = np().array(evects)
evects2 = [(evects[:, i] * (-1.0 if evects[0, i] < 0.0 else 1.0)) for
i in range(size)]
eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]
order = np().argsort(eigvalm2)
out = np().empty((size, size), dtype=np().double)
for i in range(size):
for j in range(size):
out[i, j] = evects2[order[i]][j] * eigvalm2[order[i]]
return out
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException('non-square matrix', self.
errcodeBase + 2, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in
x):
raise PFARuntimeException('non-finite matrix', self.
errcodeBase + 3, self.name, pos)
return matrixToArrays(self.calculate(arraysToMatrix(x), rows))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in list(row.
values())) for row in list(x.values())):
raise PFARuntimeException('non-finite matrix', self.
errcodeBase + 3, self.name, pos)
return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys),
len(keys)), list(map(str, range(len(keys)))), keys)
<|reserved_special_token_0|>
class Truncate(LibFcn):
name = prefix + 'truncate'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'keep': P.Int()}
], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()
))}, {'keep': P.Array(P.String())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24120
def __call__(self, state, scope, pos, paramTypes, x, keep):
if isinstance(keep, int) and keep < 0:
keep = 0
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
return x[:keep]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
rows = rowKeys(x)
cols = colKeys(x)
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
return dict((k, x[k]) for k in rows if k in keep)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def matrixToMaps(x, rows, cols):
return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x
.tolist()))
<|reserved_special_token_0|>
class MapApply(LibFcn):
name = prefix + 'map'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'fcn': P.Fcn([P.
Double()], P.Double())}], P.Array(P.Array(P.Double()))), Sig([{'x':
P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.Double()], P.Double())}
], P.Map(P.Map(P.Double())))])
errcodeBase = 24000
def __call__(self, state, scope, pos, paramTypes, x, fcn):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x
]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for
j, xj in list(xi.items()))) for i, xi in list(x.items()))
<|reserved_special_token_0|>
class Scale(LibFcn):
name = prefix + 'scale'
sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'alpha': P.Double()}], P.
Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))}, {
'alpha': P.Double()}], P.Array(P.Array(P.Double()))), Sig([{'x': P.
Map(P.Double())}, {'alpha': P.Double()}], P.Map(P.Double())), Sig([
{'x': P.Map(P.Map(P.Double()))}, {'alpha': P.Double()}], P.Map(P.
Map(P.Double())))])
errcodeBase = 24010
def __call__(self, state, scope, pos, paramTypes, x, alpha):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
return [[(xj * alpha) for xj in xi] for xi in x]
elif isinstance(x, (list, tuple)):
return [(xi * alpha) for xi in x]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):
return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items
()))) for i, xi in list(x.items()))
else:
return dict((i, xi * alpha) for i, xi in list(x.items()))
<|reserved_special_token_0|>
class ZipMap(LibFcn):
name = prefix + 'zipmap'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.
Array(P.Double()))}, {'fcn': P.Fcn([P.Double(), P.Double()], P.
Double())}], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(
P.Double()))}, {'y': P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.
Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24020
def __call__(self, state, scope, pos, paramTypes, x, y, fcn):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x) and isinstance(y, (list, tuple)) and all(
isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip
(x, y)):
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip
(xi, yi)] for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],
dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {
}).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for
i in rows)
<|reserved_special_token_0|>
class Add(LibFcn):
name = prefix + 'add'
sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}
], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},
{'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.
Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map
(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24030
def __call__(self, state, scope, pos, paramTypes, x, y):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x) and isinstance(y, (list, tuple)) and all(
isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip
(x, y)):
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
return [[(xj + yj) for xj, yj in zip(xi, yi)] for xi, yi in zip
(x, y)]
elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):
if len(x) != len(y):
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
return [(xi + yi) for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],
dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}
).get(j, 0.0)) for j in cols)) for i in rows)
else:
rows = rowKeys(x).union(rowKeys(y))
return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)
<|reserved_special_token_0|>
class Sub(LibFcn):
name = prefix + 'sub'
sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}
], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},
{'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.
Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map
(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24040
def __call__(self, state, scope, pos, paramTypes, x, y):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x) and isinstance(y, (list, tuple)) and all(
isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip
(x, y)):
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
return [[(xj - yj) for xj, yj in zip(xi, yi)] for xi, yi in zip
(x, y)]
elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):
if len(x) != len(y):
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
return [(xi - yi) for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],
dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}
).get(j, 0.0)) for j in cols)) for i in rows)
else:
rows = rowKeys(x).union(rowKeys(y))
return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)
<|reserved_special_token_0|>
class Dot(LibFcn):
name = prefix + 'dot'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.
Double())}], P.Array(P.Double())), Sig([{'x': P.Map(P.Map(P.Double(
)))}, {'y': P.Map(P.Double())}], P.Map(P.Double())), Sig([{'x': P.
Array(P.Array(P.Double()))}, {'y': P.Array(P.Array(P.Double()))}],
P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))},
{'y': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24050
def __call__(self, state, scope, pos, paramTypes, x, y):
if paramTypes[1]['type'] == 'array':
if isinstance(paramTypes[1]['items'], dict) and paramTypes[1][
'items']['type'] == 'array':
bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for
row in x) or any(any(math.isnan(z) or math.isinf(z) for
z in row) for row in y)
xmat = arraysToMatrix(x)
ymat = arraysToMatrix(y)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0
] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 1, self.name, pos)
try:
if bad:
raise PFARuntimeException('contains non-finite value',
self.errcodeBase + 2, self.name, pos)
return matrixToArrays(np().dot(xmat, ymat))
except ValueError:
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
else:
bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for
row in x) or any(math.isnan(z) or math.isinf(z) for z in y)
xmat = arraysToMatrix(x)
ymat = arrayToRowVector(y)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0
] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 1, self.name, pos)
try:
if bad:
raise PFARuntimeException('contains non-finite value',
self.errcodeBase + 2, self.name, pos)
return rowVectorToArray(np().dot(xmat, ymat))
except ValueError:
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
elif paramTypes[1]['type'] == 'map':
if isinstance(paramTypes[1]['values'], dict) and paramTypes[1][
'values']['type'] == 'map':
bad = any(any(math.isnan(z) or math.isinf(z) for z in list(
row.values())) for row in list(x.values())) or any(any(
math.isnan(z) or math.isinf(z) for z in list(row.values
())) for row in list(y.values()))
rows = list(rowKeys(x))
inter = list(colKeys(x).union(rowKeys(y)))
cols = list(colKeys(y))
xmat = mapsToMatrix(x, rows, inter)
ymat = mapsToMatrix(y, inter, cols)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0
] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 1, self.name, pos)
if bad:
raise PFARuntimeException('contains non-finite value',
self.errcodeBase + 2, self.name, pos)
return matrixToMaps(np().dot(xmat, ymat), rows, cols)
else:
bad = any(any(math.isnan(z) or math.isinf(z) for z in list(
row.values())) for row in list(x.values())) or any(math
.isnan(z) or math.isinf(z) for z in list(y.values()))
rows = list(rowKeys(x))
cols = list(colKeys(x).union(rowKeys(y)))
xmat = mapsToMatrix(x, rows, cols)
ymat = mapToRowVector(y, cols)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0
] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 1, self.name, pos)
if bad:
raise PFARuntimeException('contains non-finite value',
self.errcodeBase + 2, self.name, pos)
return rowVectorToMap(np().dot(xmat, ymat), rows)
<|reserved_special_token_0|>
class Transpose(LibFcn):
name = prefix + 'transpose'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(
P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P
.Double())))])
errcodeBase = 24060
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
return [[x[r][c] for r in range(rows)] for c in range(cols)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
rows = rowKeys(x)
cols = colKeys(x)
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedMap(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)
<|reserved_special_token_0|>
class Inverse(LibFcn):
name = prefix + 'inverse'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(
P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P
.Double())))])
errcodeBase = 24070
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
return matrixToArrays(arraysToMatrix(x).I)
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
rows = list(rowKeys(x))
cols = list(colKeys(x))
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
xmat = mapsToMatrix(x, rows, cols)
return matrixToMaps(xmat.I, cols, rows)
<|reserved_special_token_0|>
class Trace(LibFcn):
name = prefix + 'trace'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig
([{'x': P.Map(P.Map(P.Double()))}], P.Double())])
errcodeBase = 24080
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows == 0:
return 0.0
else:
cols = len(x[0])
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 0, self.name, pos)
return sum(x[i][i] for i in range(min(rows, cols)))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
keys = rowKeys(x).intersection(colKeys(x))
return sum(x[i][i] for i in keys)
<|reserved_special_token_0|>
class Det(LibFcn):
name = prefix + 'det'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig
([{'x': P.Map(P.Map(P.Double()))}], P.Double())])
errcodeBase = 24090
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException('non-square matrix', self.
errcodeBase + 2, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in
x):
return float('nan')
else:
return float(np().linalg.det(arraysToMatrix(x)))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in list(row.
values())) for row in list(x.values())):
return float('nan')
else:
return float(np().linalg.det(mapsToMatrix(x, keys, keys)))
<|reserved_special_token_0|>
class Symmetric(LibFcn):
name = prefix + 'symmetric'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'tol': P.Double(
)}], P.Boolean()), Sig([{'x': P.Map(P.Map(P.Double()))}, {'tol': P.
Double()}], P.Boolean())])
errcodeBase = 24100
@staticmethod
def same(x, y, tol):
if math.isinf(x) and math.isinf(y) and (x > 0.0 and y > 0.0 or x <
0.0 and y < 0.0):
return True
elif math.isnan(x) and math.isnan(y):
return True
elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y
) and not math.isnan(y):
return abs(x - y) < tol
else:
return False
def __call__(self, state, scope, pos, paramTypes, x, tol):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException('non-square matrix', self.
errcodeBase + 2, self.name, pos)
return all(all(self.same(x[i][j], x[j][i], tol) for j in range(
cols)) for i in range(rows))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {})
.get(i, 0.0), tol) for j in keys) for i in keys)
<|reserved_special_token_0|>
class EigenBasis(LibFcn):
name = prefix + 'eigenBasis'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(
P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P
.Double())))])
errcodeBase = 24110
def calculate(self, x, size):
symm = (x + x.T) * 0.5
evals, evects = np().linalg.eig(symm)
evects = np().array(evects)
evects2 = [(evects[:, i] * (-1.0 if evects[0, i] < 0.0 else 1.0)) for
i in range(size)]
eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]
order = np().argsort(eigvalm2)
out = np().empty((size, size), dtype=np().double)
for i in range(size):
for j in range(size):
out[i, j] = evects2[order[i]][j] * eigvalm2[order[i]]
return out
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException('non-square matrix', self.
errcodeBase + 2, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in
x):
raise PFARuntimeException('non-finite matrix', self.
errcodeBase + 3, self.name, pos)
return matrixToArrays(self.calculate(arraysToMatrix(x), rows))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in list(row.
values())) for row in list(x.values())):
raise PFARuntimeException('non-finite matrix', self.
errcodeBase + 3, self.name, pos)
return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys),
len(keys)), list(map(str, range(len(keys)))), keys)
<|reserved_special_token_0|>
class Truncate(LibFcn):
name = prefix + 'truncate'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'keep': P.Int()}
], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()
))}, {'keep': P.Array(P.String())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24120
def __call__(self, state, scope, pos, paramTypes, x, keep):
if isinstance(keep, int) and keep < 0:
keep = 0
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
return x[:keep]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
rows = rowKeys(x)
cols = colKeys(x)
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
return dict((k, x[k]) for k in rows if k in keep)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def np():
import numpy
return numpy
def rowKeys(x):
return set(x.keys())
def colKeys(x):
if len(x) == 0:
return set()
else:
return reduce(lambda a, b: a.union(b), [set(xi.keys()) for xi in
list(x.values())])
<|reserved_special_token_0|>
def arrayToRowVector(x):
return np().matrix(x, dtype=np().double).T
<|reserved_special_token_0|>
def matrixToMaps(x, rows, cols):
return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x
.tolist()))
<|reserved_special_token_0|>
def raggedMap(x):
return len(set(len(xi) for xi in list(x.values()))) != 1
class MapApply(LibFcn):
name = prefix + 'map'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'fcn': P.Fcn([P.
Double()], P.Double())}], P.Array(P.Array(P.Double()))), Sig([{'x':
P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.Double()], P.Double())}
], P.Map(P.Map(P.Double())))])
errcodeBase = 24000
def __call__(self, state, scope, pos, paramTypes, x, fcn):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x
]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for
j, xj in list(xi.items()))) for i, xi in list(x.items()))
<|reserved_special_token_0|>
class Scale(LibFcn):
name = prefix + 'scale'
sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'alpha': P.Double()}], P.
Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))}, {
'alpha': P.Double()}], P.Array(P.Array(P.Double()))), Sig([{'x': P.
Map(P.Double())}, {'alpha': P.Double()}], P.Map(P.Double())), Sig([
{'x': P.Map(P.Map(P.Double()))}, {'alpha': P.Double()}], P.Map(P.
Map(P.Double())))])
errcodeBase = 24010
def __call__(self, state, scope, pos, paramTypes, x, alpha):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
return [[(xj * alpha) for xj in xi] for xi in x]
elif isinstance(x, (list, tuple)):
return [(xi * alpha) for xi in x]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):
return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items
()))) for i, xi in list(x.items()))
else:
return dict((i, xi * alpha) for i, xi in list(x.items()))
<|reserved_special_token_0|>
class ZipMap(LibFcn):
name = prefix + 'zipmap'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.
Array(P.Double()))}, {'fcn': P.Fcn([P.Double(), P.Double()], P.
Double())}], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(
P.Double()))}, {'y': P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.
Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24020
def __call__(self, state, scope, pos, paramTypes, x, y, fcn):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x) and isinstance(y, (list, tuple)) and all(
isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip
(x, y)):
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip
(xi, yi)] for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],
dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {
}).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for
i in rows)
<|reserved_special_token_0|>
class Add(LibFcn):
name = prefix + 'add'
sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}
], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},
{'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.
Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map
(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24030
def __call__(self, state, scope, pos, paramTypes, x, y):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x) and isinstance(y, (list, tuple)) and all(
isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip
(x, y)):
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
return [[(xj + yj) for xj, yj in zip(xi, yi)] for xi, yi in zip
(x, y)]
elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):
if len(x) != len(y):
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
return [(xi + yi) for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],
dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}
).get(j, 0.0)) for j in cols)) for i in rows)
else:
rows = rowKeys(x).union(rowKeys(y))
return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)
<|reserved_special_token_0|>
class Sub(LibFcn):
name = prefix + 'sub'
sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}
], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},
{'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.
Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map
(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24040
def __call__(self, state, scope, pos, paramTypes, x, y):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x) and isinstance(y, (list, tuple)) and all(
isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip
(x, y)):
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
return [[(xj - yj) for xj, yj in zip(xi, yi)] for xi, yi in zip
(x, y)]
elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):
if len(x) != len(y):
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
return [(xi - yi) for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],
dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}
).get(j, 0.0)) for j in cols)) for i in rows)
else:
rows = rowKeys(x).union(rowKeys(y))
return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)
<|reserved_special_token_0|>
class Dot(LibFcn):
name = prefix + 'dot'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.
Double())}], P.Array(P.Double())), Sig([{'x': P.Map(P.Map(P.Double(
)))}, {'y': P.Map(P.Double())}], P.Map(P.Double())), Sig([{'x': P.
Array(P.Array(P.Double()))}, {'y': P.Array(P.Array(P.Double()))}],
P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))},
{'y': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24050
def __call__(self, state, scope, pos, paramTypes, x, y):
if paramTypes[1]['type'] == 'array':
if isinstance(paramTypes[1]['items'], dict) and paramTypes[1][
'items']['type'] == 'array':
bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for
row in x) or any(any(math.isnan(z) or math.isinf(z) for
z in row) for row in y)
xmat = arraysToMatrix(x)
ymat = arraysToMatrix(y)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0
] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 1, self.name, pos)
try:
if bad:
raise PFARuntimeException('contains non-finite value',
self.errcodeBase + 2, self.name, pos)
return matrixToArrays(np().dot(xmat, ymat))
except ValueError:
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
else:
bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for
row in x) or any(math.isnan(z) or math.isinf(z) for z in y)
xmat = arraysToMatrix(x)
ymat = arrayToRowVector(y)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0
] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 1, self.name, pos)
try:
if bad:
raise PFARuntimeException('contains non-finite value',
self.errcodeBase + 2, self.name, pos)
return rowVectorToArray(np().dot(xmat, ymat))
except ValueError:
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
elif paramTypes[1]['type'] == 'map':
if isinstance(paramTypes[1]['values'], dict) and paramTypes[1][
'values']['type'] == 'map':
bad = any(any(math.isnan(z) or math.isinf(z) for z in list(
row.values())) for row in list(x.values())) or any(any(
math.isnan(z) or math.isinf(z) for z in list(row.values
())) for row in list(y.values()))
rows = list(rowKeys(x))
inter = list(colKeys(x).union(rowKeys(y)))
cols = list(colKeys(y))
xmat = mapsToMatrix(x, rows, inter)
ymat = mapsToMatrix(y, inter, cols)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0
] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 1, self.name, pos)
if bad:
raise PFARuntimeException('contains non-finite value',
self.errcodeBase + 2, self.name, pos)
return matrixToMaps(np().dot(xmat, ymat), rows, cols)
else:
bad = any(any(math.isnan(z) or math.isinf(z) for z in list(
row.values())) for row in list(x.values())) or any(math
.isnan(z) or math.isinf(z) for z in list(y.values()))
rows = list(rowKeys(x))
cols = list(colKeys(x).union(rowKeys(y)))
xmat = mapsToMatrix(x, rows, cols)
ymat = mapToRowVector(y, cols)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0
] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 1, self.name, pos)
if bad:
raise PFARuntimeException('contains non-finite value',
self.errcodeBase + 2, self.name, pos)
return rowVectorToMap(np().dot(xmat, ymat), rows)
<|reserved_special_token_0|>
class Transpose(LibFcn):
name = prefix + 'transpose'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(
P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P
.Double())))])
errcodeBase = 24060
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
return [[x[r][c] for r in range(rows)] for c in range(cols)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
rows = rowKeys(x)
cols = colKeys(x)
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedMap(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)
<|reserved_special_token_0|>
class Inverse(LibFcn):
name = prefix + 'inverse'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(
P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P
.Double())))])
errcodeBase = 24070
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
return matrixToArrays(arraysToMatrix(x).I)
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
rows = list(rowKeys(x))
cols = list(colKeys(x))
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
xmat = mapsToMatrix(x, rows, cols)
return matrixToMaps(xmat.I, cols, rows)
<|reserved_special_token_0|>
class Trace(LibFcn):
name = prefix + 'trace'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig
([{'x': P.Map(P.Map(P.Double()))}], P.Double())])
errcodeBase = 24080
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows == 0:
return 0.0
else:
cols = len(x[0])
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 0, self.name, pos)
return sum(x[i][i] for i in range(min(rows, cols)))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
keys = rowKeys(x).intersection(colKeys(x))
return sum(x[i][i] for i in keys)
<|reserved_special_token_0|>
class Det(LibFcn):
name = prefix + 'det'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig
([{'x': P.Map(P.Map(P.Double()))}], P.Double())])
errcodeBase = 24090
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException('non-square matrix', self.
errcodeBase + 2, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in
x):
return float('nan')
else:
return float(np().linalg.det(arraysToMatrix(x)))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in list(row.
values())) for row in list(x.values())):
return float('nan')
else:
return float(np().linalg.det(mapsToMatrix(x, keys, keys)))
<|reserved_special_token_0|>
class Symmetric(LibFcn):
name = prefix + 'symmetric'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'tol': P.Double(
)}], P.Boolean()), Sig([{'x': P.Map(P.Map(P.Double()))}, {'tol': P.
Double()}], P.Boolean())])
errcodeBase = 24100
@staticmethod
def same(x, y, tol):
if math.isinf(x) and math.isinf(y) and (x > 0.0 and y > 0.0 or x <
0.0 and y < 0.0):
return True
elif math.isnan(x) and math.isnan(y):
return True
elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y
) and not math.isnan(y):
return abs(x - y) < tol
else:
return False
def __call__(self, state, scope, pos, paramTypes, x, tol):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException('non-square matrix', self.
errcodeBase + 2, self.name, pos)
return all(all(self.same(x[i][j], x[j][i], tol) for j in range(
cols)) for i in range(rows))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {})
.get(i, 0.0), tol) for j in keys) for i in keys)
<|reserved_special_token_0|>
class EigenBasis(LibFcn):
name = prefix + 'eigenBasis'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(
P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P
.Double())))])
errcodeBase = 24110
def calculate(self, x, size):
symm = (x + x.T) * 0.5
evals, evects = np().linalg.eig(symm)
evects = np().array(evects)
evects2 = [(evects[:, i] * (-1.0 if evects[0, i] < 0.0 else 1.0)) for
i in range(size)]
eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]
order = np().argsort(eigvalm2)
out = np().empty((size, size), dtype=np().double)
for i in range(size):
for j in range(size):
out[i, j] = evects2[order[i]][j] * eigvalm2[order[i]]
return out
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException('non-square matrix', self.
errcodeBase + 2, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in
x):
raise PFARuntimeException('non-finite matrix', self.
errcodeBase + 3, self.name, pos)
return matrixToArrays(self.calculate(arraysToMatrix(x), rows))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in list(row.
values())) for row in list(x.values())):
raise PFARuntimeException('non-finite matrix', self.
errcodeBase + 3, self.name, pos)
return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys),
len(keys)), list(map(str, range(len(keys)))), keys)
<|reserved_special_token_0|>
class Truncate(LibFcn):
name = prefix + 'truncate'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'keep': P.Int()}
], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()
))}, {'keep': P.Array(P.String())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24120
def __call__(self, state, scope, pos, paramTypes, x, keep):
if isinstance(keep, int) and keep < 0:
keep = 0
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
return x[:keep]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
rows = rowKeys(x)
cols = colKeys(x)
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
return dict((k, x[k]) for k in rows if k in keep)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def np():
import numpy
return numpy
def rowKeys(x):
return set(x.keys())
def colKeys(x):
if len(x) == 0:
return set()
else:
return reduce(lambda a, b: a.union(b), [set(xi.keys()) for xi in
list(x.values())])
def arraysToMatrix(x):
return np().matrix(x, dtype=np().double)
def arrayToRowVector(x):
return np().matrix(x, dtype=np().double).T
<|reserved_special_token_0|>
def matrixToArrays(x):
return x.tolist()
def mapsToMatrix(x, rows, cols):
return np().matrix([[x.get(i, {}).get(j, 0.0) for j in cols] for i in
rows], dtype=np().double)
def mapToRowVector(x, keys):
return np().matrix([x.get(k, 0.0) for k in keys], dtype=np().double).T
def rowVectorToMap(x, keys):
return dict(list(zip(keys, x.T.tolist()[0])))
def matrixToMaps(x, rows, cols):
return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x
.tolist()))
def raggedArray(x):
collens = list(map(len, x))
return max(collens) != min(collens)
def raggedMap(x):
return len(set(len(xi) for xi in list(x.values()))) != 1
class MapApply(LibFcn):
name = prefix + 'map'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'fcn': P.Fcn([P.
Double()], P.Double())}], P.Array(P.Array(P.Double()))), Sig([{'x':
P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.Double()], P.Double())}
], P.Map(P.Map(P.Double())))])
errcodeBase = 24000
def __call__(self, state, scope, pos, paramTypes, x, fcn):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x
]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for
j, xj in list(xi.items()))) for i, xi in list(x.items()))
<|reserved_special_token_0|>
class Scale(LibFcn):
name = prefix + 'scale'
sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'alpha': P.Double()}], P.
Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))}, {
'alpha': P.Double()}], P.Array(P.Array(P.Double()))), Sig([{'x': P.
Map(P.Double())}, {'alpha': P.Double()}], P.Map(P.Double())), Sig([
{'x': P.Map(P.Map(P.Double()))}, {'alpha': P.Double()}], P.Map(P.
Map(P.Double())))])
errcodeBase = 24010
def __call__(self, state, scope, pos, paramTypes, x, alpha):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
return [[(xj * alpha) for xj in xi] for xi in x]
elif isinstance(x, (list, tuple)):
return [(xi * alpha) for xi in x]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):
return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items
()))) for i, xi in list(x.items()))
else:
return dict((i, xi * alpha) for i, xi in list(x.items()))
<|reserved_special_token_0|>
class ZipMap(LibFcn):
name = prefix + 'zipmap'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.
Array(P.Double()))}, {'fcn': P.Fcn([P.Double(), P.Double()], P.
Double())}], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(
P.Double()))}, {'y': P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.
Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24020
def __call__(self, state, scope, pos, paramTypes, x, y, fcn):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x) and isinstance(y, (list, tuple)) and all(
isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip
(x, y)):
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip
(xi, yi)] for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],
dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {
}).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for
i in rows)
<|reserved_special_token_0|>
class Add(LibFcn):
name = prefix + 'add'
sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}
], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},
{'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.
Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map
(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24030
def __call__(self, state, scope, pos, paramTypes, x, y):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x) and isinstance(y, (list, tuple)) and all(
isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip
(x, y)):
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
return [[(xj + yj) for xj, yj in zip(xi, yi)] for xi, yi in zip
(x, y)]
elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):
if len(x) != len(y):
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
return [(xi + yi) for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],
dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}
).get(j, 0.0)) for j in cols)) for i in rows)
else:
rows = rowKeys(x).union(rowKeys(y))
return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)
<|reserved_special_token_0|>
class Sub(LibFcn):
name = prefix + 'sub'
sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}
], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},
{'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.
Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map
(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24040
def __call__(self, state, scope, pos, paramTypes, x, y):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x) and isinstance(y, (list, tuple)) and all(
isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip
(x, y)):
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
return [[(xj - yj) for xj, yj in zip(xi, yi)] for xi, yi in zip
(x, y)]
elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):
if len(x) != len(y):
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
return [(xi - yi) for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],
dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}
).get(j, 0.0)) for j in cols)) for i in rows)
else:
rows = rowKeys(x).union(rowKeys(y))
return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)
<|reserved_special_token_0|>
class Dot(LibFcn):
name = prefix + 'dot'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.
Double())}], P.Array(P.Double())), Sig([{'x': P.Map(P.Map(P.Double(
)))}, {'y': P.Map(P.Double())}], P.Map(P.Double())), Sig([{'x': P.
Array(P.Array(P.Double()))}, {'y': P.Array(P.Array(P.Double()))}],
P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))},
{'y': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24050
def __call__(self, state, scope, pos, paramTypes, x, y):
if paramTypes[1]['type'] == 'array':
if isinstance(paramTypes[1]['items'], dict) and paramTypes[1][
'items']['type'] == 'array':
bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for
row in x) or any(any(math.isnan(z) or math.isinf(z) for
z in row) for row in y)
xmat = arraysToMatrix(x)
ymat = arraysToMatrix(y)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0
] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 1, self.name, pos)
try:
if bad:
raise PFARuntimeException('contains non-finite value',
self.errcodeBase + 2, self.name, pos)
return matrixToArrays(np().dot(xmat, ymat))
except ValueError:
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
else:
bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for
row in x) or any(math.isnan(z) or math.isinf(z) for z in y)
xmat = arraysToMatrix(x)
ymat = arrayToRowVector(y)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0
] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 1, self.name, pos)
try:
if bad:
raise PFARuntimeException('contains non-finite value',
self.errcodeBase + 2, self.name, pos)
return rowVectorToArray(np().dot(xmat, ymat))
except ValueError:
raise PFARuntimeException('misaligned matrices', self.
errcodeBase + 0, self.name, pos)
elif paramTypes[1]['type'] == 'map':
if isinstance(paramTypes[1]['values'], dict) and paramTypes[1][
'values']['type'] == 'map':
bad = any(any(math.isnan(z) or math.isinf(z) for z in list(
row.values())) for row in list(x.values())) or any(any(
math.isnan(z) or math.isinf(z) for z in list(row.values
())) for row in list(y.values()))
rows = list(rowKeys(x))
inter = list(colKeys(x).union(rowKeys(y)))
cols = list(colKeys(y))
xmat = mapsToMatrix(x, rows, inter)
ymat = mapsToMatrix(y, inter, cols)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0
] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 1, self.name, pos)
if bad:
raise PFARuntimeException('contains non-finite value',
self.errcodeBase + 2, self.name, pos)
return matrixToMaps(np().dot(xmat, ymat), rows, cols)
else:
bad = any(any(math.isnan(z) or math.isinf(z) for z in list(
row.values())) for row in list(x.values())) or any(math
.isnan(z) or math.isinf(z) for z in list(y.values()))
rows = list(rowKeys(x))
cols = list(colKeys(x).union(rowKeys(y)))
xmat = mapsToMatrix(x, rows, cols)
ymat = mapToRowVector(y, cols)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0
] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 1, self.name, pos)
if bad:
raise PFARuntimeException('contains non-finite value',
self.errcodeBase + 2, self.name, pos)
return rowVectorToMap(np().dot(xmat, ymat), rows)
<|reserved_special_token_0|>
class Transpose(LibFcn):
name = prefix + 'transpose'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(
P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P
.Double())))])
errcodeBase = 24060
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
return [[x[r][c] for r in range(rows)] for c in range(cols)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
rows = rowKeys(x)
cols = colKeys(x)
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedMap(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)
<|reserved_special_token_0|>
class Inverse(LibFcn):
name = prefix + 'inverse'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(
P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P
.Double())))])
errcodeBase = 24070
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
return matrixToArrays(arraysToMatrix(x).I)
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
rows = list(rowKeys(x))
cols = list(colKeys(x))
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
xmat = mapsToMatrix(x, rows, cols)
return matrixToMaps(xmat.I, cols, rows)
<|reserved_special_token_0|>
class Trace(LibFcn):
name = prefix + 'trace'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig
([{'x': P.Map(P.Map(P.Double()))}], P.Double())])
errcodeBase = 24080
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows == 0:
return 0.0
else:
cols = len(x[0])
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 0, self.name, pos)
return sum(x[i][i] for i in range(min(rows, cols)))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
keys = rowKeys(x).intersection(colKeys(x))
return sum(x[i][i] for i in keys)
<|reserved_special_token_0|>
class Det(LibFcn):
name = prefix + 'det'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig
([{'x': P.Map(P.Map(P.Double()))}], P.Double())])
errcodeBase = 24090
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException('non-square matrix', self.
errcodeBase + 2, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in
x):
return float('nan')
else:
return float(np().linalg.det(arraysToMatrix(x)))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in list(row.
values())) for row in list(x.values())):
return float('nan')
else:
return float(np().linalg.det(mapsToMatrix(x, keys, keys)))
<|reserved_special_token_0|>
class Symmetric(LibFcn):
name = prefix + 'symmetric'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'tol': P.Double(
)}], P.Boolean()), Sig([{'x': P.Map(P.Map(P.Double()))}, {'tol': P.
Double()}], P.Boolean())])
errcodeBase = 24100
@staticmethod
def same(x, y, tol):
if math.isinf(x) and math.isinf(y) and (x > 0.0 and y > 0.0 or x <
0.0 and y < 0.0):
return True
elif math.isnan(x) and math.isnan(y):
return True
elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y
) and not math.isnan(y):
return abs(x - y) < tol
else:
return False
def __call__(self, state, scope, pos, paramTypes, x, tol):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException('non-square matrix', self.
errcodeBase + 2, self.name, pos)
return all(all(self.same(x[i][j], x[j][i], tol) for j in range(
cols)) for i in range(rows))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {})
.get(i, 0.0), tol) for j in keys) for i in keys)
<|reserved_special_token_0|>
class EigenBasis(LibFcn):
name = prefix + 'eigenBasis'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(
P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P
.Double())))])
errcodeBase = 24110
def calculate(self, x, size):
symm = (x + x.T) * 0.5
evals, evects = np().linalg.eig(symm)
evects = np().array(evects)
evects2 = [(evects[:, i] * (-1.0 if evects[0, i] < 0.0 else 1.0)) for
i in range(size)]
eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]
order = np().argsort(eigvalm2)
out = np().empty((size, size), dtype=np().double)
for i in range(size):
for j in range(size):
out[i, j] = evects2[order[i]][j] * eigvalm2[order[i]]
return out
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException('non-square matrix', self.
errcodeBase + 2, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in
x):
raise PFARuntimeException('non-finite matrix', self.
errcodeBase + 3, self.name, pos)
return matrixToArrays(self.calculate(arraysToMatrix(x), rows))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in list(row.
values())) for row in list(x.values())):
raise PFARuntimeException('non-finite matrix', self.
errcodeBase + 3, self.name, pos)
return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys),
len(keys)), list(map(str, range(len(keys)))), keys)
<|reserved_special_token_0|>
class Truncate(LibFcn):
name = prefix + 'truncate'
sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'keep': P.Int()}
], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()
))}, {'keep': P.Array(P.String())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24120
def __call__(self, state, scope, pos, paramTypes, x, keep):
if isinstance(keep, int) and keep < 0:
keep = 0
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple
)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException('ragged columns', self.
errcodeBase + 1, self.name, pos)
return x[:keep]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in
list(x.keys())):
rows = rowKeys(x)
cols = colKeys(x)
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException('too few rows/cols', self.
errcodeBase + 0, self.name, pos)
return dict((k, x[k]) for k in rows if k in keep)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
#!/usr/bin/env python
# Copyright (C) 2014 Open Data ("Open Data" refers to
# one or more of the following companies: Open Data Partners LLC,
# Open Data Research LLC, or Open Data Capital LLC.)
#
# This file is part of Hadrian.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
from titus.fcn import Fcn
from titus.fcn import LibFcn
from titus.signature import Sig
from titus.signature import Sigs
from titus.datatype import *
from titus.errors import *
from titus.util import callfcn, div
import titus.P as P
from functools import reduce
provides = {}
def provide(fcn):
provides[fcn.name] = fcn
prefix = "la."
def np():
import numpy
return numpy
def rowKeys(x):
return set(x.keys())
def colKeys(x):
if len(x) == 0:
return set()
else:
return reduce(lambda a, b: a.union(b), [set(xi.keys()) for xi in list(x.values())])
def arraysToMatrix(x):
return np().matrix(x, dtype=np().double)
def arrayToRowVector(x):
return np().matrix(x, dtype=np().double).T
def rowVectorToArray(x):
return x.T.tolist()[0]
def matrixToArrays(x):
return x.tolist()
def mapsToMatrix(x, rows, cols):
return np().matrix([[x.get(i, {}).get(j, 0.0) for j in cols] for i in rows], dtype=np().double)
def mapToRowVector(x, keys):
return np().matrix([x.get(k, 0.0) for k in keys], dtype=np().double).T
def rowVectorToMap(x, keys):
return dict(list(zip(keys, x.T.tolist()[0])))
def matrixToMaps(x, rows, cols):
return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x.tolist()))
def raggedArray(x):
collens = list(map(len, x))
return max(collens) != min(collens)
def raggedMap(x):
return len(set(len(xi) for xi in list(x.values()))) != 1
class MapApply(LibFcn):
name = prefix + "map"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"fcn": P.Fcn([P.Double()], P.Double())}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"fcn": P.Fcn([P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24000
def __call__(self, state, scope, pos, paramTypes, x, fcn):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for j, xj in list(xi.items()))) for i, xi in list(x.items()))
provide(MapApply())
class Scale(LibFcn):
name = prefix + "scale"
sig = Sigs([Sig([{"x": P.Array(P.Double())}, {"alpha": P.Double()}], P.Array(P.Double())),
Sig([{"x": P.Array(P.Array(P.Double()))}, {"alpha": P.Double()}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Double())}, {"alpha": P.Double()}], P.Map(P.Double())),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"alpha": P.Double()}], P.Map(P.Map(P.Double())))])
errcodeBase = 24010
def __call__(self, state, scope, pos, paramTypes, x, alpha):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
return [[xj * alpha for xj in xi] for xi in x]
elif isinstance(x, (list, tuple)):
return [xi * alpha for xi in x]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):
return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items()))) for i, xi in list(x.items()))
else:
return dict((i, xi * alpha) for i, xi in list(x.items()))
provide(Scale())
class ZipMap(LibFcn):
name = prefix + "zipmap"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Array(P.Double()))}, {"fcn": P.Fcn([P.Double(), P.Double()], P.Double())}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Map(P.Double()))}, {"fcn": P.Fcn([P.Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24020
def __call__(self, state, scope, pos, paramTypes, x, y, fcn):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \
isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \
isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {}).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for i in rows)
provide(ZipMap())
class Add(LibFcn):
name = prefix + "add"
sig = Sigs([Sig([{"x": P.Array(P.Double())}, {"y": P.Array(P.Double())}], P.Array(P.Double())),
Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Double())}, {"y": P.Map(P.Double())}], P.Map(P.Double())),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24030
def __call__(self, state, scope, pos, paramTypes, x, y):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \
isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [[xj + yj for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]
elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):
if len(x) != len(y):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [xi + yi for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \
isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}).get(j, 0.0)) for j in cols)) for i in rows)
else:
rows = rowKeys(x).union(rowKeys(y))
return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)
provide(Add())
class Sub(LibFcn):
name = prefix + "sub"
sig = Sigs([Sig([{"x": P.Array(P.Double())}, {"y": P.Array(P.Double())}], P.Array(P.Double())),
Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Double())}, {"y": P.Map(P.Double())}], P.Map(P.Double())),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24040
def __call__(self, state, scope, pos, paramTypes, x, y):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \
isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [[xj - yj for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]
elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):
if len(x) != len(y):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [xi - yi for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \
isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}).get(j, 0.0)) for j in cols)) for i in rows)
else:
rows = rowKeys(x).union(rowKeys(y))
return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)
provide(Sub())
class Dot(LibFcn):
name = prefix + "dot"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Double())}], P.Array(P.Double())),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Double())}], P.Map(P.Double())),
Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24050
def __call__(self, state, scope, pos, paramTypes, x, y):
if paramTypes[1]["type"] == "array":
if isinstance(paramTypes[1]["items"], dict) and paramTypes[1]["items"]["type"] == "array":
# array matrix-matrix case
bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x) or \
any(any(math.isnan(z) or math.isinf(z) for z in row) for row in y)
xmat = arraysToMatrix(x)
ymat = arraysToMatrix(y)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 1, self.name, pos)
try:
if bad: raise PFARuntimeException("contains non-finite value", self.errcodeBase + 2, self.name, pos)
return matrixToArrays(np().dot(xmat, ymat))
except ValueError:
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
else:
# array matrix-vector case
bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x) or \
any(math.isnan(z) or math.isinf(z) for z in y)
xmat = arraysToMatrix(x)
ymat = arrayToRowVector(y)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 1, self.name, pos)
try:
if bad: raise PFARuntimeException("contains non-finite value", self.errcodeBase + 2, self.name, pos)
return rowVectorToArray(np().dot(xmat, ymat))
except ValueError:
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
elif paramTypes[1]["type"] == "map":
if isinstance(paramTypes[1]["values"], dict) and paramTypes[1]["values"]["type"] == "map":
# map matrix-matrix case
bad = any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())) or \
any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(y.values()))
rows = list(rowKeys(x))
inter = list(colKeys(x).union(rowKeys(y)))
cols = list(colKeys(y))
xmat = mapsToMatrix(x, rows, inter)
ymat = mapsToMatrix(y, inter, cols)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 1, self.name, pos)
if bad: raise PFARuntimeException("contains non-finite value", self.errcodeBase + 2, self.name, pos)
return matrixToMaps(np().dot(xmat, ymat), rows, cols)
else:
# map matrix-vector case
bad = any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())) or \
any(math.isnan(z) or math.isinf(z) for z in list(y.values()))
rows = list(rowKeys(x))
cols = list(colKeys(x).union(rowKeys(y)))
xmat = mapsToMatrix(x, rows, cols)
ymat = mapToRowVector(y, cols)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 1, self.name, pos)
if bad: raise PFARuntimeException("contains non-finite value", self.errcodeBase + 2, self.name, pos)
return rowVectorToMap(np().dot(xmat, ymat), rows)
provide(Dot())
class Transpose(LibFcn):
name = prefix + "transpose"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24060
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
return [[x[r][c] for r in range(rows)] for c in range(cols)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
rows = rowKeys(x)
cols = colKeys(x)
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedMap(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)
provide(Transpose())
class Inverse(LibFcn):
name = prefix + "inverse"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24070
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
return matrixToArrays(arraysToMatrix(x).I)
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
rows = list(rowKeys(x))
cols = list(colKeys(x))
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
xmat = mapsToMatrix(x, rows, cols)
return matrixToMaps(xmat.I, cols, rows)
provide(Inverse())
class Trace(LibFcn):
name = prefix + "trace"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Double()),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Double())])
errcodeBase = 24080
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows == 0:
return 0.0
else:
cols = len(x[0])
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 0, self.name, pos)
return sum(x[i][i] for i in range(min(rows, cols)))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
keys = rowKeys(x).intersection(colKeys(x))
return sum(x[i][i] for i in keys)
provide(Trace())
class Det(LibFcn):
name = prefix + "det"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Double()),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Double())])
errcodeBase = 24090
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException("non-square matrix", self.errcodeBase + 2, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x):
return float("nan")
else:
return float(np().linalg.det(arraysToMatrix(x)))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())):
return float("nan")
else:
return float(np().linalg.det(mapsToMatrix(x, keys, keys)))
provide(Det())
class Symmetric(LibFcn):
name = prefix + "symmetric"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"tol": P.Double()}], P.Boolean()),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"tol": P.Double()}], P.Boolean())])
errcodeBase = 24100
@staticmethod
def same(x, y, tol):
if math.isinf(x) and math.isinf(y) and ((x > 0.0 and y > 0.0) or (x < 0.0 and y < 0.0)):
return True
elif math.isnan(x) and math.isnan(y):
return True
elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y) and not math.isnan(y):
return abs(x - y) < tol
else:
return False
def __call__(self, state, scope, pos, paramTypes, x, tol):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException("non-square matrix", self.errcodeBase + 2, self.name, pos)
return all(all(self.same(x[i][j], x[j][i], tol) for j in range(cols)) for i in range(rows))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {}).get(i, 0.0), tol) for j in keys) for i in keys)
provide(Symmetric())
class EigenBasis(LibFcn):
name = prefix + "eigenBasis"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24110
def calculate(self, x, size):
symm = (x + x.T) * 0.5
evals, evects = np().linalg.eig(symm)
evects = np().array(evects)
evects2 = [evects[:,i] * (-1.0 if evects[0,i] < 0.0 else 1.0) for i in range(size)]
eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]
order = np().argsort(eigvalm2)
out = np().empty((size, size), dtype=np().double)
for i in range(size):
for j in range(size):
out[i,j] = evects2[order[i]][j] * eigvalm2[order[i]]
return out
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException("non-square matrix", self.errcodeBase + 2, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x):
raise PFARuntimeException("non-finite matrix", self.errcodeBase + 3, self.name, pos)
return matrixToArrays(self.calculate(arraysToMatrix(x), rows))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())):
raise PFARuntimeException("non-finite matrix", self.errcodeBase + 3, self.name, pos)
return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys), len(keys)), list(map(str, range(len(keys)))), keys)
provide(EigenBasis())
class Truncate(LibFcn):
name = prefix + "truncate"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"keep": P.Int()}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"keep": P.Array(P.String())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24120
def __call__(self, state, scope, pos, paramTypes, x, keep):
if isinstance(keep, int) and keep < 0:
keep = 0
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
return x[:keep]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
rows = rowKeys(x)
cols = colKeys(x)
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
return dict((k, x[k]) for k in rows if k in keep)
provide(Truncate())
|
flexible
|
{
"blob_id": "780dc49c3eaef3fb25ca0aac760326b1c3adc633",
"index": 6002,
"step-1": "<mask token>\n\n\nclass Dot(LibFcn):\n name = prefix + 'dot'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Double())}], P.Array(P.Double())), Sig([{'x': P.Map(P.Map(P.Double(\n )))}, {'y': P.Map(P.Double())}], P.Map(P.Double())), Sig([{'x': P.\n Array(P.Array(P.Double()))}, {'y': P.Array(P.Array(P.Double()))}],\n P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))},\n {'y': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24050\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if paramTypes[1]['type'] == 'array':\n if isinstance(paramTypes[1]['items'], dict) and paramTypes[1][\n 'items']['type'] == 'array':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(any(math.isnan(z) or math.isinf(z) for\n z in row) for row in y)\n xmat = arraysToMatrix(x)\n ymat = arraysToMatrix(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return matrixToArrays(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(math.isnan(z) or math.isinf(z) for z in y)\n xmat = arraysToMatrix(x)\n ymat = arrayToRowVector(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return rowVectorToArray(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n elif paramTypes[1]['type'] == 'map':\n if isinstance(paramTypes[1]['values'], dict) and paramTypes[1][\n 'values']['type'] == 'map':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(any(\n math.isnan(z) or math.isinf(z) for z in list(row.values\n ())) for row in list(y.values()))\n rows = list(rowKeys(x))\n inter = list(colKeys(x).union(rowKeys(y)))\n cols = list(colKeys(y))\n xmat = mapsToMatrix(x, rows, inter)\n ymat = mapsToMatrix(y, inter, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return matrixToMaps(np().dot(xmat, ymat), rows, cols)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(math\n .isnan(z) or math.isinf(z) for z in list(y.values()))\n rows = list(rowKeys(x))\n cols = list(colKeys(x).union(rowKeys(y)))\n xmat = mapsToMatrix(x, rows, cols)\n ymat = mapToRowVector(y, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return rowVectorToMap(np().dot(xmat, ymat), rows)\n\n\n<mask token>\n\n\nclass Transpose(LibFcn):\n name = prefix + 'transpose'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24060\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return [[x[r][c] for r in range(rows)] for c in range(cols)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedMap(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)\n\n\n<mask token>\n\n\nclass Inverse(LibFcn):\n name = prefix + 'inverse'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24070\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return matrixToArrays(arraysToMatrix(x).I)\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = list(rowKeys(x))\n cols = list(colKeys(x))\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n xmat = mapsToMatrix(x, rows, cols)\n return matrixToMaps(xmat.I, cols, rows)\n\n\n<mask token>\n\n\nclass Trace(LibFcn):\n name = prefix + 'trace'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24080\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows == 0:\n return 0.0\n else:\n cols = len(x[0])\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 0, self.name, pos)\n return sum(x[i][i] for i in range(min(rows, cols)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = rowKeys(x).intersection(colKeys(x))\n return sum(x[i][i] for i in keys)\n\n\n<mask token>\n\n\nclass Det(LibFcn):\n name = prefix + 'det'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24090\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n return float('nan')\n else:\n return float(np().linalg.det(arraysToMatrix(x)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n return float('nan')\n else:\n return float(np().linalg.det(mapsToMatrix(x, keys, keys)))\n\n\n<mask token>\n\n\nclass Symmetric(LibFcn):\n name = prefix + 'symmetric'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'tol': P.Double(\n )}], P.Boolean()), Sig([{'x': P.Map(P.Map(P.Double()))}, {'tol': P.\n Double()}], P.Boolean())])\n errcodeBase = 24100\n\n @staticmethod\n def same(x, y, tol):\n if math.isinf(x) and math.isinf(y) and (x > 0.0 and y > 0.0 or x < \n 0.0 and y < 0.0):\n return True\n elif math.isnan(x) and math.isnan(y):\n return True\n elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y\n ) and not math.isnan(y):\n return abs(x - y) < tol\n else:\n return False\n\n def __call__(self, state, scope, pos, paramTypes, x, tol):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n return all(all(self.same(x[i][j], x[j][i], tol) for j in range(\n cols)) for i in range(rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {})\n .get(i, 0.0), tol) for j in keys) for i in keys)\n\n\n<mask token>\n\n\nclass EigenBasis(LibFcn):\n name = prefix + 'eigenBasis'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24110\n\n def calculate(self, x, size):\n symm = (x + x.T) * 0.5\n evals, evects = np().linalg.eig(symm)\n evects = np().array(evects)\n evects2 = [(evects[:, i] * (-1.0 if evects[0, i] < 0.0 else 1.0)) for\n i in range(size)]\n eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]\n order = np().argsort(eigvalm2)\n out = np().empty((size, size), dtype=np().double)\n for i in range(size):\n for j in range(size):\n out[i, j] = evects2[order[i]][j] * eigvalm2[order[i]]\n return out\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToArrays(self.calculate(arraysToMatrix(x), rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys),\n len(keys)), list(map(str, range(len(keys)))), keys)\n\n\n<mask token>\n\n\nclass Truncate(LibFcn):\n name = prefix + 'truncate'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'keep': P.Int()}\n ], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()\n ))}, {'keep': P.Array(P.String())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24120\n\n def __call__(self, state, scope, pos, paramTypes, x, keep):\n if isinstance(keep, int) and keep < 0:\n keep = 0\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return x[:keep]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return dict((k, x[k]) for k in rows if k in keep)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef matrixToMaps(x, rows, cols):\n return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x\n .tolist()))\n\n\n<mask token>\n\n\nclass MapApply(LibFcn):\n name = prefix + 'map'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'fcn': P.Fcn([P.\n Double()], P.Double())}], P.Array(P.Array(P.Double()))), Sig([{'x':\n P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.Double()], P.Double())}\n ], P.Map(P.Map(P.Double())))])\n errcodeBase = 24000\n\n def __call__(self, state, scope, pos, paramTypes, x, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x\n ]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for \n j, xj in list(xi.items()))) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass Scale(LibFcn):\n name = prefix + 'scale'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'alpha': P.Double()}], P.\n Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))}, {\n 'alpha': P.Double()}], P.Array(P.Array(P.Double()))), Sig([{'x': P.\n Map(P.Double())}, {'alpha': P.Double()}], P.Map(P.Double())), Sig([\n {'x': P.Map(P.Map(P.Double()))}, {'alpha': P.Double()}], P.Map(P.\n Map(P.Double())))])\n errcodeBase = 24010\n\n def __call__(self, state, scope, pos, paramTypes, x, alpha):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[(xj * alpha) for xj in xi] for xi in x]\n elif isinstance(x, (list, tuple)):\n return [(xi * alpha) for xi in x]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):\n return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items\n ()))) for i, xi in list(x.items()))\n else:\n return dict((i, xi * alpha) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass ZipMap(LibFcn):\n name = prefix + 'zipmap'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Array(P.Double()))}, {'fcn': P.Fcn([P.Double(), P.Double()], P.\n Double())}], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(\n P.Double()))}, {'y': P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.\n Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24020\n\n def __call__(self, state, scope, pos, paramTypes, x, y, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip\n (xi, yi)] for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {\n }).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for\n i in rows)\n\n\n<mask token>\n\n\nclass Add(LibFcn):\n name = prefix + 'add'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24030\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj + yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi + yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Sub(LibFcn):\n name = prefix + 'sub'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24040\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj - yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi - yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Dot(LibFcn):\n name = prefix + 'dot'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Double())}], P.Array(P.Double())), Sig([{'x': P.Map(P.Map(P.Double(\n )))}, {'y': P.Map(P.Double())}], P.Map(P.Double())), Sig([{'x': P.\n Array(P.Array(P.Double()))}, {'y': P.Array(P.Array(P.Double()))}],\n P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))},\n {'y': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24050\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if paramTypes[1]['type'] == 'array':\n if isinstance(paramTypes[1]['items'], dict) and paramTypes[1][\n 'items']['type'] == 'array':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(any(math.isnan(z) or math.isinf(z) for\n z in row) for row in y)\n xmat = arraysToMatrix(x)\n ymat = arraysToMatrix(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return matrixToArrays(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(math.isnan(z) or math.isinf(z) for z in y)\n xmat = arraysToMatrix(x)\n ymat = arrayToRowVector(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return rowVectorToArray(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n elif paramTypes[1]['type'] == 'map':\n if isinstance(paramTypes[1]['values'], dict) and paramTypes[1][\n 'values']['type'] == 'map':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(any(\n math.isnan(z) or math.isinf(z) for z in list(row.values\n ())) for row in list(y.values()))\n rows = list(rowKeys(x))\n inter = list(colKeys(x).union(rowKeys(y)))\n cols = list(colKeys(y))\n xmat = mapsToMatrix(x, rows, inter)\n ymat = mapsToMatrix(y, inter, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return matrixToMaps(np().dot(xmat, ymat), rows, cols)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(math\n .isnan(z) or math.isinf(z) for z in list(y.values()))\n rows = list(rowKeys(x))\n cols = list(colKeys(x).union(rowKeys(y)))\n xmat = mapsToMatrix(x, rows, cols)\n ymat = mapToRowVector(y, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return rowVectorToMap(np().dot(xmat, ymat), rows)\n\n\n<mask token>\n\n\nclass Transpose(LibFcn):\n name = prefix + 'transpose'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24060\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return [[x[r][c] for r in range(rows)] for c in range(cols)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedMap(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)\n\n\n<mask token>\n\n\nclass Inverse(LibFcn):\n name = prefix + 'inverse'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24070\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return matrixToArrays(arraysToMatrix(x).I)\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = list(rowKeys(x))\n cols = list(colKeys(x))\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n xmat = mapsToMatrix(x, rows, cols)\n return matrixToMaps(xmat.I, cols, rows)\n\n\n<mask token>\n\n\nclass Trace(LibFcn):\n name = prefix + 'trace'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24080\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows == 0:\n return 0.0\n else:\n cols = len(x[0])\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 0, self.name, pos)\n return sum(x[i][i] for i in range(min(rows, cols)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = rowKeys(x).intersection(colKeys(x))\n return sum(x[i][i] for i in keys)\n\n\n<mask token>\n\n\nclass Det(LibFcn):\n name = prefix + 'det'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24090\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n return float('nan')\n else:\n return float(np().linalg.det(arraysToMatrix(x)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n return float('nan')\n else:\n return float(np().linalg.det(mapsToMatrix(x, keys, keys)))\n\n\n<mask token>\n\n\nclass Symmetric(LibFcn):\n name = prefix + 'symmetric'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'tol': P.Double(\n )}], P.Boolean()), Sig([{'x': P.Map(P.Map(P.Double()))}, {'tol': P.\n Double()}], P.Boolean())])\n errcodeBase = 24100\n\n @staticmethod\n def same(x, y, tol):\n if math.isinf(x) and math.isinf(y) and (x > 0.0 and y > 0.0 or x < \n 0.0 and y < 0.0):\n return True\n elif math.isnan(x) and math.isnan(y):\n return True\n elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y\n ) and not math.isnan(y):\n return abs(x - y) < tol\n else:\n return False\n\n def __call__(self, state, scope, pos, paramTypes, x, tol):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n return all(all(self.same(x[i][j], x[j][i], tol) for j in range(\n cols)) for i in range(rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {})\n .get(i, 0.0), tol) for j in keys) for i in keys)\n\n\n<mask token>\n\n\nclass EigenBasis(LibFcn):\n name = prefix + 'eigenBasis'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24110\n\n def calculate(self, x, size):\n symm = (x + x.T) * 0.5\n evals, evects = np().linalg.eig(symm)\n evects = np().array(evects)\n evects2 = [(evects[:, i] * (-1.0 if evects[0, i] < 0.0 else 1.0)) for\n i in range(size)]\n eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]\n order = np().argsort(eigvalm2)\n out = np().empty((size, size), dtype=np().double)\n for i in range(size):\n for j in range(size):\n out[i, j] = evects2[order[i]][j] * eigvalm2[order[i]]\n return out\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToArrays(self.calculate(arraysToMatrix(x), rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys),\n len(keys)), list(map(str, range(len(keys)))), keys)\n\n\n<mask token>\n\n\nclass Truncate(LibFcn):\n name = prefix + 'truncate'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'keep': P.Int()}\n ], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()\n ))}, {'keep': P.Array(P.String())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24120\n\n def __call__(self, state, scope, pos, paramTypes, x, keep):\n if isinstance(keep, int) and keep < 0:\n keep = 0\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return x[:keep]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return dict((k, x[k]) for k in rows if k in keep)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef np():\n import numpy\n return numpy\n\n\ndef rowKeys(x):\n return set(x.keys())\n\n\ndef colKeys(x):\n if len(x) == 0:\n return set()\n else:\n return reduce(lambda a, b: a.union(b), [set(xi.keys()) for xi in\n list(x.values())])\n\n\n<mask token>\n\n\ndef arrayToRowVector(x):\n return np().matrix(x, dtype=np().double).T\n\n\n<mask token>\n\n\ndef matrixToMaps(x, rows, cols):\n return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x\n .tolist()))\n\n\n<mask token>\n\n\ndef raggedMap(x):\n return len(set(len(xi) for xi in list(x.values()))) != 1\n\n\nclass MapApply(LibFcn):\n name = prefix + 'map'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'fcn': P.Fcn([P.\n Double()], P.Double())}], P.Array(P.Array(P.Double()))), Sig([{'x':\n P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.Double()], P.Double())}\n ], P.Map(P.Map(P.Double())))])\n errcodeBase = 24000\n\n def __call__(self, state, scope, pos, paramTypes, x, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x\n ]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for \n j, xj in list(xi.items()))) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass Scale(LibFcn):\n name = prefix + 'scale'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'alpha': P.Double()}], P.\n Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))}, {\n 'alpha': P.Double()}], P.Array(P.Array(P.Double()))), Sig([{'x': P.\n Map(P.Double())}, {'alpha': P.Double()}], P.Map(P.Double())), Sig([\n {'x': P.Map(P.Map(P.Double()))}, {'alpha': P.Double()}], P.Map(P.\n Map(P.Double())))])\n errcodeBase = 24010\n\n def __call__(self, state, scope, pos, paramTypes, x, alpha):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[(xj * alpha) for xj in xi] for xi in x]\n elif isinstance(x, (list, tuple)):\n return [(xi * alpha) for xi in x]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):\n return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items\n ()))) for i, xi in list(x.items()))\n else:\n return dict((i, xi * alpha) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass ZipMap(LibFcn):\n name = prefix + 'zipmap'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Array(P.Double()))}, {'fcn': P.Fcn([P.Double(), P.Double()], P.\n Double())}], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(\n P.Double()))}, {'y': P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.\n Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24020\n\n def __call__(self, state, scope, pos, paramTypes, x, y, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip\n (xi, yi)] for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {\n }).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for\n i in rows)\n\n\n<mask token>\n\n\nclass Add(LibFcn):\n name = prefix + 'add'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24030\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj + yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi + yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Sub(LibFcn):\n name = prefix + 'sub'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24040\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj - yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi - yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Dot(LibFcn):\n name = prefix + 'dot'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Double())}], P.Array(P.Double())), Sig([{'x': P.Map(P.Map(P.Double(\n )))}, {'y': P.Map(P.Double())}], P.Map(P.Double())), Sig([{'x': P.\n Array(P.Array(P.Double()))}, {'y': P.Array(P.Array(P.Double()))}],\n P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))},\n {'y': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24050\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if paramTypes[1]['type'] == 'array':\n if isinstance(paramTypes[1]['items'], dict) and paramTypes[1][\n 'items']['type'] == 'array':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(any(math.isnan(z) or math.isinf(z) for\n z in row) for row in y)\n xmat = arraysToMatrix(x)\n ymat = arraysToMatrix(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return matrixToArrays(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(math.isnan(z) or math.isinf(z) for z in y)\n xmat = arraysToMatrix(x)\n ymat = arrayToRowVector(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return rowVectorToArray(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n elif paramTypes[1]['type'] == 'map':\n if isinstance(paramTypes[1]['values'], dict) and paramTypes[1][\n 'values']['type'] == 'map':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(any(\n math.isnan(z) or math.isinf(z) for z in list(row.values\n ())) for row in list(y.values()))\n rows = list(rowKeys(x))\n inter = list(colKeys(x).union(rowKeys(y)))\n cols = list(colKeys(y))\n xmat = mapsToMatrix(x, rows, inter)\n ymat = mapsToMatrix(y, inter, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return matrixToMaps(np().dot(xmat, ymat), rows, cols)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(math\n .isnan(z) or math.isinf(z) for z in list(y.values()))\n rows = list(rowKeys(x))\n cols = list(colKeys(x).union(rowKeys(y)))\n xmat = mapsToMatrix(x, rows, cols)\n ymat = mapToRowVector(y, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return rowVectorToMap(np().dot(xmat, ymat), rows)\n\n\n<mask token>\n\n\nclass Transpose(LibFcn):\n name = prefix + 'transpose'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24060\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return [[x[r][c] for r in range(rows)] for c in range(cols)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedMap(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)\n\n\n<mask token>\n\n\nclass Inverse(LibFcn):\n name = prefix + 'inverse'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24070\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return matrixToArrays(arraysToMatrix(x).I)\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = list(rowKeys(x))\n cols = list(colKeys(x))\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n xmat = mapsToMatrix(x, rows, cols)\n return matrixToMaps(xmat.I, cols, rows)\n\n\n<mask token>\n\n\nclass Trace(LibFcn):\n name = prefix + 'trace'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24080\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows == 0:\n return 0.0\n else:\n cols = len(x[0])\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 0, self.name, pos)\n return sum(x[i][i] for i in range(min(rows, cols)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = rowKeys(x).intersection(colKeys(x))\n return sum(x[i][i] for i in keys)\n\n\n<mask token>\n\n\nclass Det(LibFcn):\n name = prefix + 'det'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24090\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n return float('nan')\n else:\n return float(np().linalg.det(arraysToMatrix(x)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n return float('nan')\n else:\n return float(np().linalg.det(mapsToMatrix(x, keys, keys)))\n\n\n<mask token>\n\n\nclass Symmetric(LibFcn):\n name = prefix + 'symmetric'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'tol': P.Double(\n )}], P.Boolean()), Sig([{'x': P.Map(P.Map(P.Double()))}, {'tol': P.\n Double()}], P.Boolean())])\n errcodeBase = 24100\n\n @staticmethod\n def same(x, y, tol):\n if math.isinf(x) and math.isinf(y) and (x > 0.0 and y > 0.0 or x < \n 0.0 and y < 0.0):\n return True\n elif math.isnan(x) and math.isnan(y):\n return True\n elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y\n ) and not math.isnan(y):\n return abs(x - y) < tol\n else:\n return False\n\n def __call__(self, state, scope, pos, paramTypes, x, tol):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n return all(all(self.same(x[i][j], x[j][i], tol) for j in range(\n cols)) for i in range(rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {})\n .get(i, 0.0), tol) for j in keys) for i in keys)\n\n\n<mask token>\n\n\nclass EigenBasis(LibFcn):\n name = prefix + 'eigenBasis'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24110\n\n def calculate(self, x, size):\n symm = (x + x.T) * 0.5\n evals, evects = np().linalg.eig(symm)\n evects = np().array(evects)\n evects2 = [(evects[:, i] * (-1.0 if evects[0, i] < 0.0 else 1.0)) for\n i in range(size)]\n eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]\n order = np().argsort(eigvalm2)\n out = np().empty((size, size), dtype=np().double)\n for i in range(size):\n for j in range(size):\n out[i, j] = evects2[order[i]][j] * eigvalm2[order[i]]\n return out\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToArrays(self.calculate(arraysToMatrix(x), rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys),\n len(keys)), list(map(str, range(len(keys)))), keys)\n\n\n<mask token>\n\n\nclass Truncate(LibFcn):\n name = prefix + 'truncate'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'keep': P.Int()}\n ], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()\n ))}, {'keep': P.Array(P.String())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24120\n\n def __call__(self, state, scope, pos, paramTypes, x, keep):\n if isinstance(keep, int) and keep < 0:\n keep = 0\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return x[:keep]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return dict((k, x[k]) for k in rows if k in keep)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef np():\n import numpy\n return numpy\n\n\ndef rowKeys(x):\n return set(x.keys())\n\n\ndef colKeys(x):\n if len(x) == 0:\n return set()\n else:\n return reduce(lambda a, b: a.union(b), [set(xi.keys()) for xi in\n list(x.values())])\n\n\ndef arraysToMatrix(x):\n return np().matrix(x, dtype=np().double)\n\n\ndef arrayToRowVector(x):\n return np().matrix(x, dtype=np().double).T\n\n\n<mask token>\n\n\ndef matrixToArrays(x):\n return x.tolist()\n\n\ndef mapsToMatrix(x, rows, cols):\n return np().matrix([[x.get(i, {}).get(j, 0.0) for j in cols] for i in\n rows], dtype=np().double)\n\n\ndef mapToRowVector(x, keys):\n return np().matrix([x.get(k, 0.0) for k in keys], dtype=np().double).T\n\n\ndef rowVectorToMap(x, keys):\n return dict(list(zip(keys, x.T.tolist()[0])))\n\n\ndef matrixToMaps(x, rows, cols):\n return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x\n .tolist()))\n\n\ndef raggedArray(x):\n collens = list(map(len, x))\n return max(collens) != min(collens)\n\n\ndef raggedMap(x):\n return len(set(len(xi) for xi in list(x.values()))) != 1\n\n\nclass MapApply(LibFcn):\n name = prefix + 'map'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'fcn': P.Fcn([P.\n Double()], P.Double())}], P.Array(P.Array(P.Double()))), Sig([{'x':\n P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.Double()], P.Double())}\n ], P.Map(P.Map(P.Double())))])\n errcodeBase = 24000\n\n def __call__(self, state, scope, pos, paramTypes, x, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x\n ]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for \n j, xj in list(xi.items()))) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass Scale(LibFcn):\n name = prefix + 'scale'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'alpha': P.Double()}], P.\n Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))}, {\n 'alpha': P.Double()}], P.Array(P.Array(P.Double()))), Sig([{'x': P.\n Map(P.Double())}, {'alpha': P.Double()}], P.Map(P.Double())), Sig([\n {'x': P.Map(P.Map(P.Double()))}, {'alpha': P.Double()}], P.Map(P.\n Map(P.Double())))])\n errcodeBase = 24010\n\n def __call__(self, state, scope, pos, paramTypes, x, alpha):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[(xj * alpha) for xj in xi] for xi in x]\n elif isinstance(x, (list, tuple)):\n return [(xi * alpha) for xi in x]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):\n return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items\n ()))) for i, xi in list(x.items()))\n else:\n return dict((i, xi * alpha) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass ZipMap(LibFcn):\n name = prefix + 'zipmap'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Array(P.Double()))}, {'fcn': P.Fcn([P.Double(), P.Double()], P.\n Double())}], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(\n P.Double()))}, {'y': P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.\n Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24020\n\n def __call__(self, state, scope, pos, paramTypes, x, y, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip\n (xi, yi)] for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {\n }).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for\n i in rows)\n\n\n<mask token>\n\n\nclass Add(LibFcn):\n name = prefix + 'add'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24030\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj + yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi + yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Sub(LibFcn):\n name = prefix + 'sub'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24040\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj - yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi - yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Dot(LibFcn):\n name = prefix + 'dot'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Double())}], P.Array(P.Double())), Sig([{'x': P.Map(P.Map(P.Double(\n )))}, {'y': P.Map(P.Double())}], P.Map(P.Double())), Sig([{'x': P.\n Array(P.Array(P.Double()))}, {'y': P.Array(P.Array(P.Double()))}],\n P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))},\n {'y': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24050\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if paramTypes[1]['type'] == 'array':\n if isinstance(paramTypes[1]['items'], dict) and paramTypes[1][\n 'items']['type'] == 'array':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(any(math.isnan(z) or math.isinf(z) for\n z in row) for row in y)\n xmat = arraysToMatrix(x)\n ymat = arraysToMatrix(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return matrixToArrays(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(math.isnan(z) or math.isinf(z) for z in y)\n xmat = arraysToMatrix(x)\n ymat = arrayToRowVector(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return rowVectorToArray(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n elif paramTypes[1]['type'] == 'map':\n if isinstance(paramTypes[1]['values'], dict) and paramTypes[1][\n 'values']['type'] == 'map':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(any(\n math.isnan(z) or math.isinf(z) for z in list(row.values\n ())) for row in list(y.values()))\n rows = list(rowKeys(x))\n inter = list(colKeys(x).union(rowKeys(y)))\n cols = list(colKeys(y))\n xmat = mapsToMatrix(x, rows, inter)\n ymat = mapsToMatrix(y, inter, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return matrixToMaps(np().dot(xmat, ymat), rows, cols)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(math\n .isnan(z) or math.isinf(z) for z in list(y.values()))\n rows = list(rowKeys(x))\n cols = list(colKeys(x).union(rowKeys(y)))\n xmat = mapsToMatrix(x, rows, cols)\n ymat = mapToRowVector(y, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return rowVectorToMap(np().dot(xmat, ymat), rows)\n\n\n<mask token>\n\n\nclass Transpose(LibFcn):\n name = prefix + 'transpose'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24060\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return [[x[r][c] for r in range(rows)] for c in range(cols)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedMap(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)\n\n\n<mask token>\n\n\nclass Inverse(LibFcn):\n name = prefix + 'inverse'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24070\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return matrixToArrays(arraysToMatrix(x).I)\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = list(rowKeys(x))\n cols = list(colKeys(x))\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n xmat = mapsToMatrix(x, rows, cols)\n return matrixToMaps(xmat.I, cols, rows)\n\n\n<mask token>\n\n\nclass Trace(LibFcn):\n name = prefix + 'trace'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24080\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows == 0:\n return 0.0\n else:\n cols = len(x[0])\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 0, self.name, pos)\n return sum(x[i][i] for i in range(min(rows, cols)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = rowKeys(x).intersection(colKeys(x))\n return sum(x[i][i] for i in keys)\n\n\n<mask token>\n\n\nclass Det(LibFcn):\n name = prefix + 'det'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24090\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n return float('nan')\n else:\n return float(np().linalg.det(arraysToMatrix(x)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n return float('nan')\n else:\n return float(np().linalg.det(mapsToMatrix(x, keys, keys)))\n\n\n<mask token>\n\n\nclass Symmetric(LibFcn):\n name = prefix + 'symmetric'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'tol': P.Double(\n )}], P.Boolean()), Sig([{'x': P.Map(P.Map(P.Double()))}, {'tol': P.\n Double()}], P.Boolean())])\n errcodeBase = 24100\n\n @staticmethod\n def same(x, y, tol):\n if math.isinf(x) and math.isinf(y) and (x > 0.0 and y > 0.0 or x < \n 0.0 and y < 0.0):\n return True\n elif math.isnan(x) and math.isnan(y):\n return True\n elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y\n ) and not math.isnan(y):\n return abs(x - y) < tol\n else:\n return False\n\n def __call__(self, state, scope, pos, paramTypes, x, tol):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n return all(all(self.same(x[i][j], x[j][i], tol) for j in range(\n cols)) for i in range(rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {})\n .get(i, 0.0), tol) for j in keys) for i in keys)\n\n\n<mask token>\n\n\nclass EigenBasis(LibFcn):\n name = prefix + 'eigenBasis'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24110\n\n def calculate(self, x, size):\n symm = (x + x.T) * 0.5\n evals, evects = np().linalg.eig(symm)\n evects = np().array(evects)\n evects2 = [(evects[:, i] * (-1.0 if evects[0, i] < 0.0 else 1.0)) for\n i in range(size)]\n eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]\n order = np().argsort(eigvalm2)\n out = np().empty((size, size), dtype=np().double)\n for i in range(size):\n for j in range(size):\n out[i, j] = evects2[order[i]][j] * eigvalm2[order[i]]\n return out\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToArrays(self.calculate(arraysToMatrix(x), rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys),\n len(keys)), list(map(str, range(len(keys)))), keys)\n\n\n<mask token>\n\n\nclass Truncate(LibFcn):\n name = prefix + 'truncate'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'keep': P.Int()}\n ], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()\n ))}, {'keep': P.Array(P.String())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24120\n\n def __call__(self, state, scope, pos, paramTypes, x, keep):\n if isinstance(keep, int) and keep < 0:\n keep = 0\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return x[:keep]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return dict((k, x[k]) for k in rows if k in keep)\n\n\n<mask token>\n",
"step-5": "#!/usr/bin/env python\n\n# Copyright (C) 2014 Open Data (\"Open Data\" refers to\n# one or more of the following companies: Open Data Partners LLC,\n# Open Data Research LLC, or Open Data Capital LLC.)\n# \n# This file is part of Hadrian.\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\n\nfrom titus.fcn import Fcn\nfrom titus.fcn import LibFcn\nfrom titus.signature import Sig\nfrom titus.signature import Sigs\nfrom titus.datatype import *\nfrom titus.errors import *\nfrom titus.util import callfcn, div\nimport titus.P as P\nfrom functools import reduce\n\nprovides = {}\ndef provide(fcn):\n provides[fcn.name] = fcn\n\nprefix = \"la.\"\n\ndef np():\n import numpy\n return numpy\n\ndef rowKeys(x):\n return set(x.keys())\n\ndef colKeys(x):\n if len(x) == 0:\n return set()\n else:\n return reduce(lambda a, b: a.union(b), [set(xi.keys()) for xi in list(x.values())])\n\ndef arraysToMatrix(x):\n return np().matrix(x, dtype=np().double)\n\ndef arrayToRowVector(x):\n return np().matrix(x, dtype=np().double).T\n\ndef rowVectorToArray(x):\n return x.T.tolist()[0]\n\ndef matrixToArrays(x):\n return x.tolist()\n\ndef mapsToMatrix(x, rows, cols):\n return np().matrix([[x.get(i, {}).get(j, 0.0) for j in cols] for i in rows], dtype=np().double)\n\ndef mapToRowVector(x, keys):\n return np().matrix([x.get(k, 0.0) for k in keys], dtype=np().double).T\n\ndef rowVectorToMap(x, keys):\n return dict(list(zip(keys, x.T.tolist()[0])))\n\ndef matrixToMaps(x, rows, cols):\n return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x.tolist()))\n\ndef raggedArray(x):\n collens = list(map(len, x))\n return max(collens) != min(collens)\n\ndef raggedMap(x):\n return len(set(len(xi) for xi in list(x.values()))) != 1\n\nclass MapApply(LibFcn):\n name = prefix + \"map\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"fcn\": P.Fcn([P.Double()], P.Double())}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"fcn\": P.Fcn([P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24000\n def __call__(self, state, scope, pos, paramTypes, x, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for j, xj in list(xi.items()))) for i, xi in list(x.items()))\n\nprovide(MapApply())\n\nclass Scale(LibFcn):\n name = prefix + \"scale\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Double())}, {\"alpha\": P.Double()}], P.Array(P.Double())),\n Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"alpha\": P.Double()}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Double())}, {\"alpha\": P.Double()}], P.Map(P.Double())),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"alpha\": P.Double()}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24010\n def __call__(self, state, scope, pos, paramTypes, x, alpha):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n return [[xj * alpha for xj in xi] for xi in x]\n elif isinstance(x, (list, tuple)):\n return [xi * alpha for xi in x]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):\n return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items()))) for i, xi in list(x.items()))\n else:\n return dict((i, xi * alpha) for i, xi in list(x.items()))\n\nprovide(Scale())\n\nclass ZipMap(LibFcn):\n name = prefix + \"zipmap\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"y\": P.Array(P.Array(P.Double()))}, {\"fcn\": P.Fcn([P.Double(), P.Double()], P.Double())}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"y\": P.Map(P.Map(P.Double()))}, {\"fcn\": P.Fcn([P.Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24020\n def __call__(self, state, scope, pos, paramTypes, x, y, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \\\n isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \\\n isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {}).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for i in rows)\n\nprovide(ZipMap())\n\nclass Add(LibFcn):\n name = prefix + \"add\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Double())}, {\"y\": P.Array(P.Double())}], P.Array(P.Double())),\n Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"y\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Double())}, {\"y\": P.Map(P.Double())}], P.Map(P.Double())),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"y\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24030\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \\\n isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n return [[xj + yj for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]\n\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n return [xi + yi for xi, yi in zip(x, y)]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \\\n isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}).get(j, 0.0)) for j in cols)) for i in rows)\n\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)\n\nprovide(Add())\n\nclass Sub(LibFcn):\n name = prefix + \"sub\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Double())}, {\"y\": P.Array(P.Double())}], P.Array(P.Double())),\n Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"y\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Double())}, {\"y\": P.Map(P.Double())}], P.Map(P.Double())),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"y\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24040\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \\\n isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n return [[xj - yj for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]\n\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n return [xi - yi for xi, yi in zip(x, y)]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \\\n isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}).get(j, 0.0)) for j in cols)) for i in rows)\n\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)\n\nprovide(Sub())\n\nclass Dot(LibFcn):\n name = prefix + \"dot\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"y\": P.Array(P.Double())}], P.Array(P.Double())),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"y\": P.Map(P.Double())}], P.Map(P.Double())),\n Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"y\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"y\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24050\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if paramTypes[1][\"type\"] == \"array\":\n if isinstance(paramTypes[1][\"items\"], dict) and paramTypes[1][\"items\"][\"type\"] == \"array\":\n # array matrix-matrix case\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x) or \\\n any(any(math.isnan(z) or math.isinf(z) for z in row) for row in y)\n xmat = arraysToMatrix(x)\n ymat = arraysToMatrix(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 1, self.name, pos)\n try:\n if bad: raise PFARuntimeException(\"contains non-finite value\", self.errcodeBase + 2, self.name, pos)\n return matrixToArrays(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n\n else:\n # array matrix-vector case\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x) or \\\n any(math.isnan(z) or math.isinf(z) for z in y)\n xmat = arraysToMatrix(x)\n ymat = arrayToRowVector(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 1, self.name, pos)\n try:\n if bad: raise PFARuntimeException(\"contains non-finite value\", self.errcodeBase + 2, self.name, pos)\n return rowVectorToArray(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n\n elif paramTypes[1][\"type\"] == \"map\":\n if isinstance(paramTypes[1][\"values\"], dict) and paramTypes[1][\"values\"][\"type\"] == \"map\":\n # map matrix-matrix case\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())) or \\\n any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(y.values()))\n rows = list(rowKeys(x))\n inter = list(colKeys(x).union(rowKeys(y)))\n cols = list(colKeys(y))\n xmat = mapsToMatrix(x, rows, inter)\n ymat = mapsToMatrix(y, inter, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 1, self.name, pos)\n if bad: raise PFARuntimeException(\"contains non-finite value\", self.errcodeBase + 2, self.name, pos)\n return matrixToMaps(np().dot(xmat, ymat), rows, cols)\n\n else:\n # map matrix-vector case\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())) or \\\n any(math.isnan(z) or math.isinf(z) for z in list(y.values()))\n rows = list(rowKeys(x))\n cols = list(colKeys(x).union(rowKeys(y)))\n xmat = mapsToMatrix(x, rows, cols)\n ymat = mapToRowVector(y, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 1, self.name, pos)\n if bad: raise PFARuntimeException(\"contains non-finite value\", self.errcodeBase + 2, self.name, pos)\n return rowVectorToMap(np().dot(xmat, ymat), rows)\n\nprovide(Dot())\n \nclass Transpose(LibFcn):\n name = prefix + \"transpose\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24060\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n return [[x[r][c] for r in range(rows)] for c in range(cols)]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedMap(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)\n\nprovide(Transpose())\n\nclass Inverse(LibFcn):\n name = prefix + \"inverse\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24070\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n return matrixToArrays(arraysToMatrix(x).I)\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n rows = list(rowKeys(x))\n cols = list(colKeys(x))\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n xmat = mapsToMatrix(x, rows, cols)\n return matrixToMaps(xmat.I, cols, rows)\n\nprovide(Inverse())\n\nclass Trace(LibFcn):\n name = prefix + \"trace\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}], P.Double()),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24080\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows == 0:\n return 0.0\n else:\n cols = len(x[0])\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 0, self.name, pos)\n return sum(x[i][i] for i in range(min(rows, cols)))\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n keys = rowKeys(x).intersection(colKeys(x))\n return sum(x[i][i] for i in keys)\n\nprovide(Trace())\n\nclass Det(LibFcn):\n name = prefix + \"det\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}], P.Double()),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24090\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException(\"non-square matrix\", self.errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x):\n return float(\"nan\")\n else:\n return float(np().linalg.det(arraysToMatrix(x)))\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())):\n return float(\"nan\")\n else:\n return float(np().linalg.det(mapsToMatrix(x, keys, keys)))\n\nprovide(Det())\n\nclass Symmetric(LibFcn):\n name = prefix + \"symmetric\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"tol\": P.Double()}], P.Boolean()),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"tol\": P.Double()}], P.Boolean())])\n errcodeBase = 24100\n @staticmethod\n def same(x, y, tol):\n if math.isinf(x) and math.isinf(y) and ((x > 0.0 and y > 0.0) or (x < 0.0 and y < 0.0)):\n return True\n elif math.isnan(x) and math.isnan(y):\n return True\n elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y) and not math.isnan(y):\n return abs(x - y) < tol\n else:\n return False\n def __call__(self, state, scope, pos, paramTypes, x, tol):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException(\"non-square matrix\", self.errcodeBase + 2, self.name, pos)\n return all(all(self.same(x[i][j], x[j][i], tol) for j in range(cols)) for i in range(rows))\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {}).get(i, 0.0), tol) for j in keys) for i in keys)\n\nprovide(Symmetric())\n\nclass EigenBasis(LibFcn):\n name = prefix + \"eigenBasis\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n\n errcodeBase = 24110\n def calculate(self, x, size):\n symm = (x + x.T) * 0.5\n\n evals, evects = np().linalg.eig(symm)\n evects = np().array(evects)\n evects2 = [evects[:,i] * (-1.0 if evects[0,i] < 0.0 else 1.0) for i in range(size)]\n\n eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]\n order = np().argsort(eigvalm2)\n\n out = np().empty((size, size), dtype=np().double)\n for i in range(size):\n for j in range(size):\n out[i,j] = evects2[order[i]][j] * eigvalm2[order[i]]\n return out\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException(\"non-square matrix\", self.errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x):\n raise PFARuntimeException(\"non-finite matrix\", self.errcodeBase + 3, self.name, pos)\n return matrixToArrays(self.calculate(arraysToMatrix(x), rows))\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())):\n raise PFARuntimeException(\"non-finite matrix\", self.errcodeBase + 3, self.name, pos)\n return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys), len(keys)), list(map(str, range(len(keys)))), keys)\n\nprovide(EigenBasis())\n\nclass Truncate(LibFcn):\n name = prefix + \"truncate\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"keep\": P.Int()}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"keep\": P.Array(P.String())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24120\n def __call__(self, state, scope, pos, paramTypes, x, keep):\n if isinstance(keep, int) and keep < 0:\n keep = 0\n\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n return x[:keep]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n return dict((k, x[k]) for k in rows if k in keep)\n\nprovide(Truncate())\n",
"step-ids": [
26,
42,
47,
53,
59
]
}
|
[
26,
42,
47,
53,
59
] |
from tkinter import *
from tkinter import messagebox
root = Tk()
def hello():
messagebox.showinfo("Say Hello", "Hello World")
B1 = Button(root, text = "Say Hello", command = hello, font='arial 20')
B1.pack()
mainloop()
|
normal
|
{
"blob_id": "61e38ae6ae2a1ed061f9893742f45b3e44f19a68",
"index": 6110,
"step-1": "<mask token>\n\n\ndef hello():\n messagebox.showinfo('Say Hello', 'Hello World')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef hello():\n messagebox.showinfo('Say Hello', 'Hello World')\n\n\n<mask token>\nB1.pack()\nmainloop()\n",
"step-3": "<mask token>\nroot = Tk()\n\n\ndef hello():\n messagebox.showinfo('Say Hello', 'Hello World')\n\n\nB1 = Button(root, text='Say Hello', command=hello, font='arial 20')\nB1.pack()\nmainloop()\n",
"step-4": "from tkinter import *\nfrom tkinter import messagebox\nroot = Tk()\n\n\ndef hello():\n messagebox.showinfo('Say Hello', 'Hello World')\n\n\nB1 = Button(root, text='Say Hello', command=hello, font='arial 20')\nB1.pack()\nmainloop()\n",
"step-5": "from tkinter import *\r\nfrom tkinter import messagebox\r\n\r\nroot = Tk()\r\ndef hello():\r\n messagebox.showinfo(\"Say Hello\", \"Hello World\")\r\n\r\nB1 = Button(root, text = \"Say Hello\", command = hello, font='arial 20')\r\nB1.pack()\r\n\r\nmainloop()\r\n\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from django.db import models
#Precisa existir uma conversao ticker -> ticker_id mais facil, ou definir como trabalhar com o ticker.name,
#na maioria dos casos só tenho o nome do ticker, nao o id.
class User(models.Model):
""" Usuario que pode operar ativos """
name = models.CharField(max_length=200)
saldo = models.DecimalField(max_digits=12, decimal_places=3)
def __unicode__(self):
return "User(%s, %.3f)" %(self.name, self.saldo)
class Stock(models.Model):
""" Representa um ativo """
ticker = models.CharField(max_length=8, unique=True)
name = models.CharField(max_length=200)
def __unicode__(self):
return "Stock(%s, %s)" %(self.ticker, self.name)
class Order(models.Model):
""" Ordem criada por um usuario, que vai para o book de um dado ativo """
ORDER_BUY = 'C'
ORDER_SELL = 'V'
STATUS_NEW = 'N'
STATUS_OPEN = 'A'
STATUS_PARTIAL = 'P'
STATUS_FINALIZED = 'F'
STATUS_CANCELLED = 'C'
ORDER_TYPES = [
(ORDER_BUY, "Compra"),
(ORDER_SELL, "Venda") ]
ORDER_STATUS = [
(STATUS_NEW, "Nova"),
(STATUS_OPEN, "Aberta"),
(STATUS_PARTIAL, "Parcialmente Executada"),
(STATUS_FINALIZED, "Finalizada"),
(STATUS_CANCELLED, "Cancelada") ]
user = models.ForeignKey(User)
stock = models.ForeignKey(Stock)
tipo = models.CharField(max_length=1, choices=ORDER_TYPES)
status = models.CharField(max_length=1, choices=ORDER_STATUS, default=STATUS_NEW)
original_qty = models.IntegerField()
qty = models.IntegerField()
value = models.DecimalField(max_digits=6, decimal_places=3)
included = models.DateTimeField(auto_now_add = True)
cancel_reason = models.CharField(max_length=255)
def __unicode__(self):
return "Order(%c, %d, %s, %s, %s | %s)" %(self.tipo, self.qty, self.stock.ticker, self.value, self.user.name, self.status)
class PortfolioItem(models.Model):
""" Representa um ativo em uma custódia """
user = models.ForeignKey(User)
stock = models.ForeignKey(Stock)
qty = models.IntegerField()
def __unicode__(self):
return "PortfolioItem(%s, %s, %d)" %(self.user.name, self.stock.ticker, self.qty)
class Historical(models.Model):
""" Registra uma negociacao efetuada """
stock = models.ForeignKey(Stock)
qty = models.IntegerField()
value = models.DecimalField(max_digits=6, decimal_places=3)
user_buy = models.ForeignKey(User, related_name='buy_historical')
user_sell = models.ForeignKey(User, related_name='sell_historical')
timestamp = models.DateTimeField(auto_now_add = True)
def __unicode__(self):
return "Historical(%s, %d, %s, %s, %s)" %\
(self.stock.ticker, self.qty, self.value, self.user_buy.name, self.user_sell.name)
|
normal
|
{
"blob_id": "13e7484a80e4e45ee911f15837b9d82a1ef4d0b1",
"index": 7259,
"step-1": "from django.db import models\r\n\r\n#Precisa existir uma conversao ticker -> ticker_id mais facil, ou definir como trabalhar com o ticker.name,\r\n#na maioria dos casos só tenho o nome do ticker, nao o id.\r\n\r\nclass User(models.Model):\r\n \"\"\" Usuario que pode operar ativos \"\"\"\r\n name = models.CharField(max_length=200)\r\n saldo = models.DecimalField(max_digits=12, decimal_places=3)\r\n \r\n def __unicode__(self):\r\n return \"User(%s, %.3f)\" %(self.name, self.saldo)\r\n\r\nclass Stock(models.Model):\r\n \"\"\" Representa um ativo \"\"\"\r\n ticker = models.CharField(max_length=8, unique=True)\r\n name = models.CharField(max_length=200)\r\n \r\n def __unicode__(self):\r\n return \"Stock(%s, %s)\" %(self.ticker, self.name)\r\n\r\nclass Order(models.Model):\r\n \"\"\" Ordem criada por um usuario, que vai para o book de um dado ativo \"\"\"\r\n ORDER_BUY = 'C'\r\n ORDER_SELL = 'V'\r\n \r\n STATUS_NEW = 'N'\r\n STATUS_OPEN = 'A'\r\n STATUS_PARTIAL = 'P'\r\n STATUS_FINALIZED = 'F'\r\n STATUS_CANCELLED = 'C'\r\n \r\n ORDER_TYPES = [ \r\n (ORDER_BUY, \"Compra\"),\r\n (ORDER_SELL, \"Venda\") ]\r\n ORDER_STATUS = [\r\n (STATUS_NEW, \"Nova\"),\r\n (STATUS_OPEN, \"Aberta\"),\r\n (STATUS_PARTIAL, \"Parcialmente Executada\"),\r\n (STATUS_FINALIZED, \"Finalizada\"),\r\n (STATUS_CANCELLED, \"Cancelada\") ]\r\n \r\n user = models.ForeignKey(User)\r\n stock = models.ForeignKey(Stock)\r\n \r\n tipo = models.CharField(max_length=1, choices=ORDER_TYPES) \r\n status = models.CharField(max_length=1, choices=ORDER_STATUS, default=STATUS_NEW) \r\n original_qty = models.IntegerField()\r\n qty = models.IntegerField()\r\n value = models.DecimalField(max_digits=6, decimal_places=3)\r\n included = models.DateTimeField(auto_now_add = True)\r\n \r\n cancel_reason = models.CharField(max_length=255)\r\n \r\n def __unicode__(self):\r\n return \"Order(%c, %d, %s, %s, %s | %s)\" %(self.tipo, self.qty, self.stock.ticker, self.value, self.user.name, self.status)\r\n\r\nclass PortfolioItem(models.Model):\r\n \"\"\" Representa um ativo em uma custódia \"\"\"\r\n user = models.ForeignKey(User)\r\n stock = models.ForeignKey(Stock)\r\n qty = models.IntegerField()\r\n \r\n def __unicode__(self):\r\n return \"PortfolioItem(%s, %s, %d)\" %(self.user.name, self.stock.ticker, self.qty)\r\n \r\nclass Historical(models.Model):\r\n \"\"\" Registra uma negociacao efetuada \"\"\"\r\n stock = models.ForeignKey(Stock)\r\n qty = models.IntegerField()\r\n value = models.DecimalField(max_digits=6, decimal_places=3)\r\n user_buy = models.ForeignKey(User, related_name='buy_historical')\r\n user_sell = models.ForeignKey(User, related_name='sell_historical')\r\n timestamp = models.DateTimeField(auto_now_add = True)\r\n \r\n def __unicode__(self):\r\n return \"Historical(%s, %d, %s, %s, %s)\" %\\\r\n (self.stock.ticker, self.qty, self.value, self.user_buy.name, self.user_sell.name)\r\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
argparser.add_argument('ch', nargs='?', help='channel', type=int)
<|reserved_special_token_0|>
if args.ch is None:
for channel in range(0, 8):
print(f'== CHANNEL {channel} ==')
TCA9548A.write8(0, 1 << channel)
os.system('i2cdetect -y 1')
else:
TCA9548A.write8(0, 1 << args.ch)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
argparser = argparse.ArgumentParser(formatter_class=argparse.
ArgumentDefaultsHelpFormatter, description=
'Select I2C channel multiplexed by TCA9548A')
argparser.add_argument('ch', nargs='?', help='channel', type=int)
args = argparser.parse_args()
TCA9548A = I2C.get_i2c_device(112)
if args.ch is None:
for channel in range(0, 8):
print(f'== CHANNEL {channel} ==')
TCA9548A.write8(0, 1 << channel)
os.system('i2cdetect -y 1')
else:
TCA9548A.write8(0, 1 << args.ch)
<|reserved_special_token_1|>
import Adafruit_GPIO
import Adafruit_GPIO.I2C as I2C
import time
import sys
import argparse
import os
argparser = argparse.ArgumentParser(formatter_class=argparse.
ArgumentDefaultsHelpFormatter, description=
'Select I2C channel multiplexed by TCA9548A')
argparser.add_argument('ch', nargs='?', help='channel', type=int)
args = argparser.parse_args()
TCA9548A = I2C.get_i2c_device(112)
if args.ch is None:
for channel in range(0, 8):
print(f'== CHANNEL {channel} ==')
TCA9548A.write8(0, 1 << channel)
os.system('i2cdetect -y 1')
else:
TCA9548A.write8(0, 1 << args.ch)
<|reserved_special_token_1|>
#!/usr/bin/env python3.7
import Adafruit_GPIO
import Adafruit_GPIO.I2C as I2C
import time
import sys
import argparse
import os
argparser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="Select I2C channel multiplexed by TCA9548A")
argparser.add_argument('ch', nargs='?', help="channel", type=int)
args = argparser.parse_args()
TCA9548A = I2C.get_i2c_device(0x70)
if args.ch is None:
for channel in range(0,8):
print(f"== CHANNEL {channel} ==")
TCA9548A.write8(0, 1<<channel)
os.system("i2cdetect -y 1")
else:
TCA9548A.write8(0, 1<<args.ch)
|
flexible
|
{
"blob_id": "46aa795bb72db0fcd588b1747e3559b8828be17c",
"index": 6927,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nargparser.add_argument('ch', nargs='?', help='channel', type=int)\n<mask token>\nif args.ch is None:\n for channel in range(0, 8):\n print(f'== CHANNEL {channel} ==')\n TCA9548A.write8(0, 1 << channel)\n os.system('i2cdetect -y 1')\nelse:\n TCA9548A.write8(0, 1 << args.ch)\n",
"step-3": "<mask token>\nargparser = argparse.ArgumentParser(formatter_class=argparse.\n ArgumentDefaultsHelpFormatter, description=\n 'Select I2C channel multiplexed by TCA9548A')\nargparser.add_argument('ch', nargs='?', help='channel', type=int)\nargs = argparser.parse_args()\nTCA9548A = I2C.get_i2c_device(112)\nif args.ch is None:\n for channel in range(0, 8):\n print(f'== CHANNEL {channel} ==')\n TCA9548A.write8(0, 1 << channel)\n os.system('i2cdetect -y 1')\nelse:\n TCA9548A.write8(0, 1 << args.ch)\n",
"step-4": "import Adafruit_GPIO\nimport Adafruit_GPIO.I2C as I2C\nimport time\nimport sys\nimport argparse\nimport os\nargparser = argparse.ArgumentParser(formatter_class=argparse.\n ArgumentDefaultsHelpFormatter, description=\n 'Select I2C channel multiplexed by TCA9548A')\nargparser.add_argument('ch', nargs='?', help='channel', type=int)\nargs = argparser.parse_args()\nTCA9548A = I2C.get_i2c_device(112)\nif args.ch is None:\n for channel in range(0, 8):\n print(f'== CHANNEL {channel} ==')\n TCA9548A.write8(0, 1 << channel)\n os.system('i2cdetect -y 1')\nelse:\n TCA9548A.write8(0, 1 << args.ch)\n",
"step-5": "#!/usr/bin/env python3.7\r\nimport Adafruit_GPIO\r\nimport Adafruit_GPIO.I2C as I2C\r\nimport time\r\nimport sys\r\nimport argparse\r\nimport os\r\n\r\nargparser = argparse.ArgumentParser(\r\n formatter_class=argparse.ArgumentDefaultsHelpFormatter,\r\n description=\"Select I2C channel multiplexed by TCA9548A\")\r\nargparser.add_argument('ch', nargs='?', help=\"channel\", type=int)\r\nargs = argparser.parse_args()\r\n\r\nTCA9548A = I2C.get_i2c_device(0x70)\r\n\r\nif args.ch is None:\r\n for channel in range(0,8):\r\n print(f\"== CHANNEL {channel} ==\")\r\n TCA9548A.write8(0, 1<<channel)\r\n os.system(\"i2cdetect -y 1\")\r\nelse:\r\n TCA9548A.write8(0, 1<<args.ch)\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from nose.tools import *
from packt_offer import *
from bs4 import BeautifulSoup
class TestPacktOffer:
def setUp(self):
self.proper_soup = BeautifulSoup(
""""
<div id="deal-of-the-day" class="cf">
<div class="dotd-main-book cf">
<div class="section-inner">
<div class="dotd-main-book-image float-left">
<a href="/application-development/github-essentials">
<noscript><img src="//serv.cloudfront.net/sites/imagecache/9781783553716.png" class="bookimage imagecache imagecache-dotd_main_image" itemprop="url"/>
</noscript><img src="//serv.cloudfront.net/sites/imagecache/9781783553716.png" data-original="//d1ldz4te4covpm.cloudfront.net/sites/default/files/imagecache/dotd_main_image/9781783553716.png" class="bookimage imagecache imagecache-dotd_main_image" itemprop="url" style="opacity: 1;">
</a>
</div>
<div class="dotd-main-book-summary float-left">
<div class="dotd-title">
<h2>Example title</h2>
</div>
<br>
<div>
An example description of book offered by Packtpub.
<ul>
<li>First reason why you should read this book.</li>
<li>Second reason why you should read this book.</li>
</ul>
</div>
<div class="dotd-main-book-form cf">
<div class="dots-main-book-price float-left"></div>
<div class="float-left free-ebook"></div>
</div>
</div>
</div>
</div>
</div>""", "html.parser")
for linebreak in self.proper_soup.find_all('br'):
linebreak.extract()
self.improper_soup = BeautifulSoup("""
<div id="deal-of-the-day" class="cf">
<div class="dotd-main-book cf">
<div class="section-inner">
<div class="dotd-main-book-summary float-left">
<div class="dotd-title">
</div>
<br>
</div>
</div>
</div>
</div>""", "html.parser")
for linebreak in self.improper_soup.find_all('br'):
linebreak.extract()
def test_offer_image_url_extracter_proper(self):
result = offer_image_url_extracter(self.proper_soup)
assert_equals(result,
'http://serv.cloudfront.net/sites/imagecache/9781783553716.png')
def test_offer_image_url_extracter_no_content(self):
"""Case when <div> with a given image class is not present in a given page."""
result = offer_image_url_extracter(self.improper_soup)
assert_equals(result, '')
def test_offer_title_extracter_proper(self):
result = offer_title_extracter(self.proper_soup)
assert_equals(result, 'Example title')
def test_offer_title_extracter_no_content(self):
result = offer_title_extracter(self.improper_soup)
assert_equals(result, '')
def test_offer_description_extracter_proper(self):
result = offer_description_extracter(self.proper_soup)
assert_equals(result, """<div>
An example description of book offered by Packtpub.
<ul>
<li>First reason why you should read this book.</li>
<li>Second reason why you should read this book.</li>
</ul>
</div>
""")
def test_offer_description_extracter_no_content(self):
result = offer_description_extracter(self.improper_soup)
assert_equals(result, '')
def test_message_creator_all_proper(self):
msg = message_creator(b'000000', 'www.image.com/image.jpg', 'Offer title', 'Offer description',
'sender@mail.com', ['receiver@mail.com'])
assert_in(
"""\
MIME-Version: 1.0
Subject: Packt offer: Offer title
From: sender@mail.com
To: receiver@mail.com
This is a multi-part message in MIME format.""", msg)
assert_in(
"""\
<div><h2>New Packtpub offer:</h2></div>
</br>
<div>
<img src="cid:image1">
</div>
<div><h2>Offer title</h2></div>
</br>
<div>Offer description</div>
</br>
<a href="https://www.packtpub.com/packt/offers/free-learning">Get it!</a>""", msg)
assert_in(
"""\
Content-Type: image/jpeg
MIME-Version: 1.0
Content-Transfer-Encoding: base64
Content-ID: <image1>
Content-Disposition: inline; filename="www.image.com/image.jpg"\
""", msg)
@raises(AttributeError)
def test_message_creator_wrong_image_url(self):
msg = message_creator(b'000000', 'www.image.com', 'Offer title', 'Offer description',
'sender@mail.com', ['receiver@mail.com'])
|
normal
|
{
"blob_id": "a29f89750ef3a55116959b217b8c9100b294c66c",
"index": 3766,
"step-1": "<mask token>\n\n\nclass TestPacktOffer:\n <mask token>\n <mask token>\n <mask token>\n\n def test_offer_title_extracter_proper(self):\n result = offer_title_extracter(self.proper_soup)\n assert_equals(result, 'Example title')\n <mask token>\n <mask token>\n\n def test_offer_description_extracter_no_content(self):\n result = offer_description_extracter(self.improper_soup)\n assert_equals(result, '')\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestPacktOffer:\n <mask token>\n\n def test_offer_image_url_extracter_proper(self):\n result = offer_image_url_extracter(self.proper_soup)\n assert_equals(result,\n 'http://serv.cloudfront.net/sites/imagecache/9781783553716.png')\n <mask token>\n\n def test_offer_title_extracter_proper(self):\n result = offer_title_extracter(self.proper_soup)\n assert_equals(result, 'Example title')\n <mask token>\n\n def test_offer_description_extracter_proper(self):\n result = offer_description_extracter(self.proper_soup)\n assert_equals(result,\n \"\"\"<div>\n An example description of book offered by Packtpub.\n <ul>\n<li>First reason why you should read this book.</li>\n<li>Second reason why you should read this book.</li>\n</ul>\n</div>\n\"\"\"\n )\n\n def test_offer_description_extracter_no_content(self):\n result = offer_description_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_message_creator_all_proper(self):\n msg = message_creator(b'000000', 'www.image.com/image.jpg',\n 'Offer title', 'Offer description', 'sender@mail.com', [\n 'receiver@mail.com'])\n assert_in(\n \"\"\"MIME-Version: 1.0\nSubject: Packt offer: Offer title\nFrom: sender@mail.com\nTo: receiver@mail.com\n\nThis is a multi-part message in MIME format.\"\"\"\n , msg)\n assert_in(\n \"\"\" <div><h2>New Packtpub offer:</h2></div>\n </br>\n <div>\n <img src=\"cid:image1\">\n </div>\n <div><h2>Offer title</h2></div>\n </br>\n <div>Offer description</div>\n </br>\n <a href=\"https://www.packtpub.com/packt/offers/free-learning\">Get it!</a>\"\"\"\n , msg)\n assert_in(\n \"\"\"Content-Type: image/jpeg\nMIME-Version: 1.0\nContent-Transfer-Encoding: base64\nContent-ID: <image1>\nContent-Disposition: inline; filename=\"www.image.com/image.jpg\\\"\"\"\"\n , msg)\n\n @raises(AttributeError)\n def test_message_creator_wrong_image_url(self):\n msg = message_creator(b'000000', 'www.image.com', 'Offer title',\n 'Offer description', 'sender@mail.com', ['receiver@mail.com'])\n",
"step-3": "<mask token>\n\n\nclass TestPacktOffer:\n <mask token>\n\n def test_offer_image_url_extracter_proper(self):\n result = offer_image_url_extracter(self.proper_soup)\n assert_equals(result,\n 'http://serv.cloudfront.net/sites/imagecache/9781783553716.png')\n\n def test_offer_image_url_extracter_no_content(self):\n \"\"\"Case when <div> with a given image class is not present in a given page.\"\"\"\n result = offer_image_url_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_offer_title_extracter_proper(self):\n result = offer_title_extracter(self.proper_soup)\n assert_equals(result, 'Example title')\n <mask token>\n\n def test_offer_description_extracter_proper(self):\n result = offer_description_extracter(self.proper_soup)\n assert_equals(result,\n \"\"\"<div>\n An example description of book offered by Packtpub.\n <ul>\n<li>First reason why you should read this book.</li>\n<li>Second reason why you should read this book.</li>\n</ul>\n</div>\n\"\"\"\n )\n\n def test_offer_description_extracter_no_content(self):\n result = offer_description_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_message_creator_all_proper(self):\n msg = message_creator(b'000000', 'www.image.com/image.jpg',\n 'Offer title', 'Offer description', 'sender@mail.com', [\n 'receiver@mail.com'])\n assert_in(\n \"\"\"MIME-Version: 1.0\nSubject: Packt offer: Offer title\nFrom: sender@mail.com\nTo: receiver@mail.com\n\nThis is a multi-part message in MIME format.\"\"\"\n , msg)\n assert_in(\n \"\"\" <div><h2>New Packtpub offer:</h2></div>\n </br>\n <div>\n <img src=\"cid:image1\">\n </div>\n <div><h2>Offer title</h2></div>\n </br>\n <div>Offer description</div>\n </br>\n <a href=\"https://www.packtpub.com/packt/offers/free-learning\">Get it!</a>\"\"\"\n , msg)\n assert_in(\n \"\"\"Content-Type: image/jpeg\nMIME-Version: 1.0\nContent-Transfer-Encoding: base64\nContent-ID: <image1>\nContent-Disposition: inline; filename=\"www.image.com/image.jpg\\\"\"\"\"\n , msg)\n\n @raises(AttributeError)\n def test_message_creator_wrong_image_url(self):\n msg = message_creator(b'000000', 'www.image.com', 'Offer title',\n 'Offer description', 'sender@mail.com', ['receiver@mail.com'])\n",
"step-4": "from nose.tools import *\nfrom packt_offer import *\nfrom bs4 import BeautifulSoup\n\n\nclass TestPacktOffer:\n\n def setUp(self):\n self.proper_soup = BeautifulSoup(\n \"\"\"\"\n <div id=\"deal-of-the-day\" class=\"cf\">\n <div class=\"dotd-main-book cf\">\n <div class=\"section-inner\">\n <div class=\"dotd-main-book-image float-left\">\n <a href=\"/application-development/github-essentials\">\n <noscript><img src=\"//serv.cloudfront.net/sites/imagecache/9781783553716.png\" class=\"bookimage imagecache imagecache-dotd_main_image\" itemprop=\"url\"/>\n </noscript><img src=\"//serv.cloudfront.net/sites/imagecache/9781783553716.png\" data-original=\"//d1ldz4te4covpm.cloudfront.net/sites/default/files/imagecache/dotd_main_image/9781783553716.png\" class=\"bookimage imagecache imagecache-dotd_main_image\" itemprop=\"url\" style=\"opacity: 1;\">\t\t\t\t\t\t\n </a>\n </div>\n <div class=\"dotd-main-book-summary float-left\">\n <div class=\"dotd-title\">\n <h2>Example title</h2>\n </div>\n <br>\n <div>\n An example description of book offered by Packtpub.\n <ul>\n <li>First reason why you should read this book.</li>\n <li>Second reason why you should read this book.</li>\n </ul>\n </div>\n <div class=\"dotd-main-book-form cf\">\n <div class=\"dots-main-book-price float-left\"></div>\n <div class=\"float-left free-ebook\"></div>\n </div>\n </div>\n \n </div>\n </div>\n </div>\"\"\"\n , 'html.parser')\n for linebreak in self.proper_soup.find_all('br'):\n linebreak.extract()\n self.improper_soup = BeautifulSoup(\n \"\"\"\n <div id=\"deal-of-the-day\" class=\"cf\">\n <div class=\"dotd-main-book cf\">\n <div class=\"section-inner\">\n <div class=\"dotd-main-book-summary float-left\">\n <div class=\"dotd-title\">\n </div>\n <br>\n </div>\n\n </div>\n </div>\n </div>\"\"\"\n , 'html.parser')\n for linebreak in self.improper_soup.find_all('br'):\n linebreak.extract()\n\n def test_offer_image_url_extracter_proper(self):\n result = offer_image_url_extracter(self.proper_soup)\n assert_equals(result,\n 'http://serv.cloudfront.net/sites/imagecache/9781783553716.png')\n\n def test_offer_image_url_extracter_no_content(self):\n \"\"\"Case when <div> with a given image class is not present in a given page.\"\"\"\n result = offer_image_url_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_offer_title_extracter_proper(self):\n result = offer_title_extracter(self.proper_soup)\n assert_equals(result, 'Example title')\n\n def test_offer_title_extracter_no_content(self):\n result = offer_title_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_offer_description_extracter_proper(self):\n result = offer_description_extracter(self.proper_soup)\n assert_equals(result,\n \"\"\"<div>\n An example description of book offered by Packtpub.\n <ul>\n<li>First reason why you should read this book.</li>\n<li>Second reason why you should read this book.</li>\n</ul>\n</div>\n\"\"\"\n )\n\n def test_offer_description_extracter_no_content(self):\n result = offer_description_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_message_creator_all_proper(self):\n msg = message_creator(b'000000', 'www.image.com/image.jpg',\n 'Offer title', 'Offer description', 'sender@mail.com', [\n 'receiver@mail.com'])\n assert_in(\n \"\"\"MIME-Version: 1.0\nSubject: Packt offer: Offer title\nFrom: sender@mail.com\nTo: receiver@mail.com\n\nThis is a multi-part message in MIME format.\"\"\"\n , msg)\n assert_in(\n \"\"\" <div><h2>New Packtpub offer:</h2></div>\n </br>\n <div>\n <img src=\"cid:image1\">\n </div>\n <div><h2>Offer title</h2></div>\n </br>\n <div>Offer description</div>\n </br>\n <a href=\"https://www.packtpub.com/packt/offers/free-learning\">Get it!</a>\"\"\"\n , msg)\n assert_in(\n \"\"\"Content-Type: image/jpeg\nMIME-Version: 1.0\nContent-Transfer-Encoding: base64\nContent-ID: <image1>\nContent-Disposition: inline; filename=\"www.image.com/image.jpg\\\"\"\"\"\n , msg)\n\n @raises(AttributeError)\n def test_message_creator_wrong_image_url(self):\n msg = message_creator(b'000000', 'www.image.com', 'Offer title',\n 'Offer description', 'sender@mail.com', ['receiver@mail.com'])\n",
"step-5": "from nose.tools import *\nfrom packt_offer import *\nfrom bs4 import BeautifulSoup\n\n\nclass TestPacktOffer:\n def setUp(self):\n self.proper_soup = BeautifulSoup(\n \"\"\"\"\n <div id=\"deal-of-the-day\" class=\"cf\">\n <div class=\"dotd-main-book cf\">\n <div class=\"section-inner\">\n <div class=\"dotd-main-book-image float-left\">\n <a href=\"/application-development/github-essentials\">\n <noscript><img src=\"//serv.cloudfront.net/sites/imagecache/9781783553716.png\" class=\"bookimage imagecache imagecache-dotd_main_image\" itemprop=\"url\"/>\n </noscript><img src=\"//serv.cloudfront.net/sites/imagecache/9781783553716.png\" data-original=\"//d1ldz4te4covpm.cloudfront.net/sites/default/files/imagecache/dotd_main_image/9781783553716.png\" class=\"bookimage imagecache imagecache-dotd_main_image\" itemprop=\"url\" style=\"opacity: 1;\">\t\t\t\t\t\t\n </a>\n </div>\n <div class=\"dotd-main-book-summary float-left\">\n <div class=\"dotd-title\">\n <h2>Example title</h2>\n </div>\n <br>\n <div>\n An example description of book offered by Packtpub.\n <ul>\n <li>First reason why you should read this book.</li>\n <li>Second reason why you should read this book.</li>\n </ul>\n </div>\n <div class=\"dotd-main-book-form cf\">\n <div class=\"dots-main-book-price float-left\"></div>\n <div class=\"float-left free-ebook\"></div>\n </div>\n </div>\n \n </div>\n </div>\n </div>\"\"\", \"html.parser\")\n for linebreak in self.proper_soup.find_all('br'):\n linebreak.extract()\n\n self.improper_soup = BeautifulSoup(\"\"\"\n <div id=\"deal-of-the-day\" class=\"cf\">\n <div class=\"dotd-main-book cf\">\n <div class=\"section-inner\">\n <div class=\"dotd-main-book-summary float-left\">\n <div class=\"dotd-title\">\n </div>\n <br>\n </div>\n\n </div>\n </div>\n </div>\"\"\", \"html.parser\")\n\n for linebreak in self.improper_soup.find_all('br'):\n linebreak.extract()\n\n def test_offer_image_url_extracter_proper(self):\n result = offer_image_url_extracter(self.proper_soup)\n assert_equals(result,\n 'http://serv.cloudfront.net/sites/imagecache/9781783553716.png')\n\n def test_offer_image_url_extracter_no_content(self):\n \"\"\"Case when <div> with a given image class is not present in a given page.\"\"\"\n result = offer_image_url_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_offer_title_extracter_proper(self):\n result = offer_title_extracter(self.proper_soup)\n assert_equals(result, 'Example title')\n\n def test_offer_title_extracter_no_content(self):\n result = offer_title_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_offer_description_extracter_proper(self):\n result = offer_description_extracter(self.proper_soup)\n assert_equals(result, \"\"\"<div>\n An example description of book offered by Packtpub.\n <ul>\n<li>First reason why you should read this book.</li>\n<li>Second reason why you should read this book.</li>\n</ul>\n</div>\n\"\"\")\n\n def test_offer_description_extracter_no_content(self):\n result = offer_description_extracter(self.improper_soup)\n assert_equals(result, '')\n\n def test_message_creator_all_proper(self):\n msg = message_creator(b'000000', 'www.image.com/image.jpg', 'Offer title', 'Offer description',\n 'sender@mail.com', ['receiver@mail.com'])\n assert_in(\n \"\"\"\\\nMIME-Version: 1.0\nSubject: Packt offer: Offer title\nFrom: sender@mail.com\nTo: receiver@mail.com\n\nThis is a multi-part message in MIME format.\"\"\", msg)\n\n assert_in(\n \"\"\"\\\n <div><h2>New Packtpub offer:</h2></div>\n </br>\n <div>\n <img src=\"cid:image1\">\n </div>\n <div><h2>Offer title</h2></div>\n </br>\n <div>Offer description</div>\n </br>\n <a href=\"https://www.packtpub.com/packt/offers/free-learning\">Get it!</a>\"\"\", msg)\n\n assert_in(\n \"\"\"\\\nContent-Type: image/jpeg\nMIME-Version: 1.0\nContent-Transfer-Encoding: base64\nContent-ID: <image1>\nContent-Disposition: inline; filename=\"www.image.com/image.jpg\"\\\n\"\"\", msg)\n\n @raises(AttributeError)\n def test_message_creator_wrong_image_url(self):\n msg = message_creator(b'000000', 'www.image.com', 'Offer title', 'Offer description',\n 'sender@mail.com', ['receiver@mail.com'])\n",
"step-ids": [
3,
7,
8,
11,
12
]
}
|
[
3,
7,
8,
11,
12
] |
"""product_ingredient unique constraint
Revision ID: a07768b0d4c0
Revises: a80cd9a35e58
Create Date: 2017-05-18 11:39:52.258266
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a07768b0d4c0'
down_revision = 'a80cd9a35e58'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_unique_constraint('_unique_name_unit', 'ingredient', ['name', 'unit'])
op.create_unique_constraint(None, 'product', ['nappi_code'])
op.add_column('product_ingredient', sa.Column('strength', sa.String(), nullable=True))
op.create_unique_constraint('_unique_product_ingredient_strength', 'product_ingredient', ['product_id', 'ingredient_id', 'strength'])
op.drop_column('product_ingredient', 'stength')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('product_ingredient', sa.Column('stength', sa.VARCHAR(), autoincrement=False, nullable=True))
op.drop_constraint('_unique_product_ingredient_strength', 'product_ingredient', type_='unique')
op.drop_column('product_ingredient', 'strength')
op.drop_constraint(None, 'product', type_='unique')
op.drop_constraint('_unique_name_unit', 'ingredient', type_='unique')
# ### end Alembic commands ###
|
normal
|
{
"blob_id": "d0a73385db0dd6f729d267095ef83b9fec72e40c",
"index": 1464,
"step-1": "<mask token>\n\n\ndef upgrade():\n op.create_unique_constraint('_unique_name_unit', 'ingredient', ['name',\n 'unit'])\n op.create_unique_constraint(None, 'product', ['nappi_code'])\n op.add_column('product_ingredient', sa.Column('strength', sa.String(),\n nullable=True))\n op.create_unique_constraint('_unique_product_ingredient_strength',\n 'product_ingredient', ['product_id', 'ingredient_id', 'strength'])\n op.drop_column('product_ingredient', 'stength')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef upgrade():\n op.create_unique_constraint('_unique_name_unit', 'ingredient', ['name',\n 'unit'])\n op.create_unique_constraint(None, 'product', ['nappi_code'])\n op.add_column('product_ingredient', sa.Column('strength', sa.String(),\n nullable=True))\n op.create_unique_constraint('_unique_product_ingredient_strength',\n 'product_ingredient', ['product_id', 'ingredient_id', 'strength'])\n op.drop_column('product_ingredient', 'stength')\n\n\ndef downgrade():\n op.add_column('product_ingredient', sa.Column('stength', sa.VARCHAR(),\n autoincrement=False, nullable=True))\n op.drop_constraint('_unique_product_ingredient_strength',\n 'product_ingredient', type_='unique')\n op.drop_column('product_ingredient', 'strength')\n op.drop_constraint(None, 'product', type_='unique')\n op.drop_constraint('_unique_name_unit', 'ingredient', type_='unique')\n",
"step-3": "<mask token>\nrevision = 'a07768b0d4c0'\ndown_revision = 'a80cd9a35e58'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.create_unique_constraint('_unique_name_unit', 'ingredient', ['name',\n 'unit'])\n op.create_unique_constraint(None, 'product', ['nappi_code'])\n op.add_column('product_ingredient', sa.Column('strength', sa.String(),\n nullable=True))\n op.create_unique_constraint('_unique_product_ingredient_strength',\n 'product_ingredient', ['product_id', 'ingredient_id', 'strength'])\n op.drop_column('product_ingredient', 'stength')\n\n\ndef downgrade():\n op.add_column('product_ingredient', sa.Column('stength', sa.VARCHAR(),\n autoincrement=False, nullable=True))\n op.drop_constraint('_unique_product_ingredient_strength',\n 'product_ingredient', type_='unique')\n op.drop_column('product_ingredient', 'strength')\n op.drop_constraint(None, 'product', type_='unique')\n op.drop_constraint('_unique_name_unit', 'ingredient', type_='unique')\n",
"step-4": "<mask token>\nfrom alembic import op\nimport sqlalchemy as sa\nrevision = 'a07768b0d4c0'\ndown_revision = 'a80cd9a35e58'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.create_unique_constraint('_unique_name_unit', 'ingredient', ['name',\n 'unit'])\n op.create_unique_constraint(None, 'product', ['nappi_code'])\n op.add_column('product_ingredient', sa.Column('strength', sa.String(),\n nullable=True))\n op.create_unique_constraint('_unique_product_ingredient_strength',\n 'product_ingredient', ['product_id', 'ingredient_id', 'strength'])\n op.drop_column('product_ingredient', 'stength')\n\n\ndef downgrade():\n op.add_column('product_ingredient', sa.Column('stength', sa.VARCHAR(),\n autoincrement=False, nullable=True))\n op.drop_constraint('_unique_product_ingredient_strength',\n 'product_ingredient', type_='unique')\n op.drop_column('product_ingredient', 'strength')\n op.drop_constraint(None, 'product', type_='unique')\n op.drop_constraint('_unique_name_unit', 'ingredient', type_='unique')\n",
"step-5": "\"\"\"product_ingredient unique constraint\n\nRevision ID: a07768b0d4c0\nRevises: a80cd9a35e58\nCreate Date: 2017-05-18 11:39:52.258266\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = 'a07768b0d4c0'\ndown_revision = 'a80cd9a35e58'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.create_unique_constraint('_unique_name_unit', 'ingredient', ['name', 'unit'])\n op.create_unique_constraint(None, 'product', ['nappi_code'])\n op.add_column('product_ingredient', sa.Column('strength', sa.String(), nullable=True))\n op.create_unique_constraint('_unique_product_ingredient_strength', 'product_ingredient', ['product_id', 'ingredient_id', 'strength'])\n op.drop_column('product_ingredient', 'stength')\n # ### end Alembic commands ###\n\n\ndef downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.add_column('product_ingredient', sa.Column('stength', sa.VARCHAR(), autoincrement=False, nullable=True))\n op.drop_constraint('_unique_product_ingredient_strength', 'product_ingredient', type_='unique')\n op.drop_column('product_ingredient', 'strength')\n op.drop_constraint(None, 'product', type_='unique')\n op.drop_constraint('_unique_name_unit', 'ingredient', type_='unique')\n # ### end Alembic commands ###\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
@csrf_exempt
def callback(request):
if request.method == 'POST':
signature = request.META['HTTP_X_LINE_SIGNATURE']
body = request.body.decode('utf-8')
try:
events = parser.parse(body, signature)
except InvalidSignatureError:
return HttpResponseForbidden()
except LineBotApiError:
return HttpResponseBadRequest()
for event in events:
if isinstance(event, MessageEvent):
user_id = event.source.user_id
if not users.objects.filter(uid=user_id).exists():
unit = users.objects.create(uid=user_id)
unit.save()
if isinstance(event.message, TextMessage):
mtext = event.message.text
if mtext == '@修繕申請':
func.sendFix(event, user_id)
elif mtext == '@修繕查詢':
func.fix_inquire(event, user_id)
elif mtext == 'admin_mode':
func.judge(event, mtext, user_id)
elif mtext[:6] == '123456' and len(mtext) > 6:
func.judge(event, mtext, user_id)
elif mtext[:2] == '++' and len(mtext) > 2:
func.judge(event, mtext, user_id)
elif mtext[:2] == '##' and len(mtext) > 2:
func.manageForm(event, mtext, user_id)
elif mtext[:3] == '!!!' and len(mtext) > 3:
func.personData(event, mtext, user_id)
return HttpResponse()
else:
return HttpResponseBadRequest()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@csrf_exempt
def callback(request):
if request.method == 'POST':
signature = request.META['HTTP_X_LINE_SIGNATURE']
body = request.body.decode('utf-8')
try:
events = parser.parse(body, signature)
except InvalidSignatureError:
return HttpResponseForbidden()
except LineBotApiError:
return HttpResponseBadRequest()
for event in events:
if isinstance(event, MessageEvent):
user_id = event.source.user_id
if not users.objects.filter(uid=user_id).exists():
unit = users.objects.create(uid=user_id)
unit.save()
if isinstance(event.message, TextMessage):
mtext = event.message.text
if mtext == '@修繕申請':
func.sendFix(event, user_id)
elif mtext == '@修繕查詢':
func.fix_inquire(event, user_id)
elif mtext == 'admin_mode':
func.judge(event, mtext, user_id)
elif mtext[:6] == '123456' and len(mtext) > 6:
func.judge(event, mtext, user_id)
elif mtext[:2] == '++' and len(mtext) > 2:
func.judge(event, mtext, user_id)
elif mtext[:2] == '##' and len(mtext) > 2:
func.manageForm(event, mtext, user_id)
elif mtext[:3] == '!!!' and len(mtext) > 3:
func.personData(event, mtext, user_id)
return HttpResponse()
else:
return HttpResponseBadRequest()
def listall(request):
user = users.objects.all().order_by('name')
return render(request, 'listall.html', locals())
<|reserved_special_token_1|>
<|reserved_special_token_0|>
line_bot_api = LineBotApi(settings.LINE_CHANNEL_ACCESS_TOKEN)
parser = WebhookParser(settings.LINE_CHANNEL_SECRET)
@csrf_exempt
def callback(request):
if request.method == 'POST':
signature = request.META['HTTP_X_LINE_SIGNATURE']
body = request.body.decode('utf-8')
try:
events = parser.parse(body, signature)
except InvalidSignatureError:
return HttpResponseForbidden()
except LineBotApiError:
return HttpResponseBadRequest()
for event in events:
if isinstance(event, MessageEvent):
user_id = event.source.user_id
if not users.objects.filter(uid=user_id).exists():
unit = users.objects.create(uid=user_id)
unit.save()
if isinstance(event.message, TextMessage):
mtext = event.message.text
if mtext == '@修繕申請':
func.sendFix(event, user_id)
elif mtext == '@修繕查詢':
func.fix_inquire(event, user_id)
elif mtext == 'admin_mode':
func.judge(event, mtext, user_id)
elif mtext[:6] == '123456' and len(mtext) > 6:
func.judge(event, mtext, user_id)
elif mtext[:2] == '++' and len(mtext) > 2:
func.judge(event, mtext, user_id)
elif mtext[:2] == '##' and len(mtext) > 2:
func.manageForm(event, mtext, user_id)
elif mtext[:3] == '!!!' and len(mtext) > 3:
func.personData(event, mtext, user_id)
return HttpResponse()
else:
return HttpResponseBadRequest()
def listall(request):
user = users.objects.all().order_by('name')
return render(request, 'listall.html', locals())
<|reserved_special_token_1|>
from django.conf import settings
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from linebot import LineBotApi, WebhookParser
from linebot.exceptions import InvalidSignatureError, LineBotApiError
from linebot.models import MessageEvent, TextMessage
from module import func
from urllib.parse import parse_qsl
from func5api.models import users
from django.shortcuts import render
line_bot_api = LineBotApi(settings.LINE_CHANNEL_ACCESS_TOKEN)
parser = WebhookParser(settings.LINE_CHANNEL_SECRET)
@csrf_exempt
def callback(request):
if request.method == 'POST':
signature = request.META['HTTP_X_LINE_SIGNATURE']
body = request.body.decode('utf-8')
try:
events = parser.parse(body, signature)
except InvalidSignatureError:
return HttpResponseForbidden()
except LineBotApiError:
return HttpResponseBadRequest()
for event in events:
if isinstance(event, MessageEvent):
user_id = event.source.user_id
if not users.objects.filter(uid=user_id).exists():
unit = users.objects.create(uid=user_id)
unit.save()
if isinstance(event.message, TextMessage):
mtext = event.message.text
if mtext == '@修繕申請':
func.sendFix(event, user_id)
elif mtext == '@修繕查詢':
func.fix_inquire(event, user_id)
elif mtext == 'admin_mode':
func.judge(event, mtext, user_id)
elif mtext[:6] == '123456' and len(mtext) > 6:
func.judge(event, mtext, user_id)
elif mtext[:2] == '++' and len(mtext) > 2:
func.judge(event, mtext, user_id)
elif mtext[:2] == '##' and len(mtext) > 2:
func.manageForm(event, mtext, user_id)
elif mtext[:3] == '!!!' and len(mtext) > 3:
func.personData(event, mtext, user_id)
return HttpResponse()
else:
return HttpResponseBadRequest()
def listall(request):
user = users.objects.all().order_by('name')
return render(request, 'listall.html', locals())
<|reserved_special_token_1|>
from django.conf import settings
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from linebot import LineBotApi, WebhookParser
from linebot.exceptions import InvalidSignatureError, LineBotApiError
from linebot.models import MessageEvent, TextMessage
from module import func
from urllib.parse import parse_qsl
from func5api.models import users
from django.shortcuts import render
line_bot_api = LineBotApi(settings.LINE_CHANNEL_ACCESS_TOKEN)
parser = WebhookParser(settings.LINE_CHANNEL_SECRET)
@csrf_exempt
def callback(request):
if request.method == 'POST':
signature = request.META['HTTP_X_LINE_SIGNATURE']
body = request.body.decode('utf-8')
try:
events = parser.parse(body, signature)
except InvalidSignatureError:
return HttpResponseForbidden()
except LineBotApiError:
return HttpResponseBadRequest()
for event in events:
if isinstance(event, MessageEvent):
user_id = event.source.user_id #取得user_id
if not(users.objects.filter(uid = user_id).exists()): #將user_id存入資料庫中
unit = users.objects.create(uid = user_id)
unit.save() #將user_id上傳至資料庫
if isinstance(event.message, TextMessage):
mtext = event.message.text
if mtext == '@修繕申請':
func.sendFix(event, user_id)
elif mtext =='@修繕查詢':
func.fix_inquire(event, user_id)
elif mtext == 'admin_mode':
func.judge(event, mtext, user_id)
elif mtext[:6] == '123456' and len(mtext) > 6: #all
func.judge(event, mtext, user_id)
elif mtext[:2] == '++' and len(mtext) > 2: #specify
func.judge(event, mtext, user_id)
elif mtext[:2] == '##' and len(mtext) > 2:
func.manageForm(event, mtext, user_id)
elif mtext[:3] == '!!!' and len(mtext) > 3:
func.personData(event, mtext, user_id)
return HttpResponse()
else:
return HttpResponseBadRequest()
def listall(request):
user = users.objects.all().order_by('name')
return render(request, "listall.html", locals())
|
flexible
|
{
"blob_id": "19f202c32e1cf9f7ab2663827f1f98080f70b83e",
"index": 8313,
"step-1": "<mask token>\n\n\n@csrf_exempt\ndef callback(request):\n if request.method == 'POST':\n signature = request.META['HTTP_X_LINE_SIGNATURE']\n body = request.body.decode('utf-8')\n try:\n events = parser.parse(body, signature)\n except InvalidSignatureError:\n return HttpResponseForbidden()\n except LineBotApiError:\n return HttpResponseBadRequest()\n for event in events:\n if isinstance(event, MessageEvent):\n user_id = event.source.user_id\n if not users.objects.filter(uid=user_id).exists():\n unit = users.objects.create(uid=user_id)\n unit.save()\n if isinstance(event.message, TextMessage):\n mtext = event.message.text\n if mtext == '@修繕申請':\n func.sendFix(event, user_id)\n elif mtext == '@修繕查詢':\n func.fix_inquire(event, user_id)\n elif mtext == 'admin_mode':\n func.judge(event, mtext, user_id)\n elif mtext[:6] == '123456' and len(mtext) > 6:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '++' and len(mtext) > 2:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '##' and len(mtext) > 2:\n func.manageForm(event, mtext, user_id)\n elif mtext[:3] == '!!!' and len(mtext) > 3:\n func.personData(event, mtext, user_id)\n return HttpResponse()\n else:\n return HttpResponseBadRequest()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@csrf_exempt\ndef callback(request):\n if request.method == 'POST':\n signature = request.META['HTTP_X_LINE_SIGNATURE']\n body = request.body.decode('utf-8')\n try:\n events = parser.parse(body, signature)\n except InvalidSignatureError:\n return HttpResponseForbidden()\n except LineBotApiError:\n return HttpResponseBadRequest()\n for event in events:\n if isinstance(event, MessageEvent):\n user_id = event.source.user_id\n if not users.objects.filter(uid=user_id).exists():\n unit = users.objects.create(uid=user_id)\n unit.save()\n if isinstance(event.message, TextMessage):\n mtext = event.message.text\n if mtext == '@修繕申請':\n func.sendFix(event, user_id)\n elif mtext == '@修繕查詢':\n func.fix_inquire(event, user_id)\n elif mtext == 'admin_mode':\n func.judge(event, mtext, user_id)\n elif mtext[:6] == '123456' and len(mtext) > 6:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '++' and len(mtext) > 2:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '##' and len(mtext) > 2:\n func.manageForm(event, mtext, user_id)\n elif mtext[:3] == '!!!' and len(mtext) > 3:\n func.personData(event, mtext, user_id)\n return HttpResponse()\n else:\n return HttpResponseBadRequest()\n\n\ndef listall(request):\n user = users.objects.all().order_by('name')\n return render(request, 'listall.html', locals())\n",
"step-3": "<mask token>\nline_bot_api = LineBotApi(settings.LINE_CHANNEL_ACCESS_TOKEN)\nparser = WebhookParser(settings.LINE_CHANNEL_SECRET)\n\n\n@csrf_exempt\ndef callback(request):\n if request.method == 'POST':\n signature = request.META['HTTP_X_LINE_SIGNATURE']\n body = request.body.decode('utf-8')\n try:\n events = parser.parse(body, signature)\n except InvalidSignatureError:\n return HttpResponseForbidden()\n except LineBotApiError:\n return HttpResponseBadRequest()\n for event in events:\n if isinstance(event, MessageEvent):\n user_id = event.source.user_id\n if not users.objects.filter(uid=user_id).exists():\n unit = users.objects.create(uid=user_id)\n unit.save()\n if isinstance(event.message, TextMessage):\n mtext = event.message.text\n if mtext == '@修繕申請':\n func.sendFix(event, user_id)\n elif mtext == '@修繕查詢':\n func.fix_inquire(event, user_id)\n elif mtext == 'admin_mode':\n func.judge(event, mtext, user_id)\n elif mtext[:6] == '123456' and len(mtext) > 6:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '++' and len(mtext) > 2:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '##' and len(mtext) > 2:\n func.manageForm(event, mtext, user_id)\n elif mtext[:3] == '!!!' and len(mtext) > 3:\n func.personData(event, mtext, user_id)\n return HttpResponse()\n else:\n return HttpResponseBadRequest()\n\n\ndef listall(request):\n user = users.objects.all().order_by('name')\n return render(request, 'listall.html', locals())\n",
"step-4": "from django.conf import settings\nfrom django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden\nfrom django.views.decorators.csrf import csrf_exempt\nfrom linebot import LineBotApi, WebhookParser\nfrom linebot.exceptions import InvalidSignatureError, LineBotApiError\nfrom linebot.models import MessageEvent, TextMessage\nfrom module import func\nfrom urllib.parse import parse_qsl\nfrom func5api.models import users\nfrom django.shortcuts import render\nline_bot_api = LineBotApi(settings.LINE_CHANNEL_ACCESS_TOKEN)\nparser = WebhookParser(settings.LINE_CHANNEL_SECRET)\n\n\n@csrf_exempt\ndef callback(request):\n if request.method == 'POST':\n signature = request.META['HTTP_X_LINE_SIGNATURE']\n body = request.body.decode('utf-8')\n try:\n events = parser.parse(body, signature)\n except InvalidSignatureError:\n return HttpResponseForbidden()\n except LineBotApiError:\n return HttpResponseBadRequest()\n for event in events:\n if isinstance(event, MessageEvent):\n user_id = event.source.user_id\n if not users.objects.filter(uid=user_id).exists():\n unit = users.objects.create(uid=user_id)\n unit.save()\n if isinstance(event.message, TextMessage):\n mtext = event.message.text\n if mtext == '@修繕申請':\n func.sendFix(event, user_id)\n elif mtext == '@修繕查詢':\n func.fix_inquire(event, user_id)\n elif mtext == 'admin_mode':\n func.judge(event, mtext, user_id)\n elif mtext[:6] == '123456' and len(mtext) > 6:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '++' and len(mtext) > 2:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '##' and len(mtext) > 2:\n func.manageForm(event, mtext, user_id)\n elif mtext[:3] == '!!!' and len(mtext) > 3:\n func.personData(event, mtext, user_id)\n return HttpResponse()\n else:\n return HttpResponseBadRequest()\n\n\ndef listall(request):\n user = users.objects.all().order_by('name')\n return render(request, 'listall.html', locals())\n",
"step-5": "from django.conf import settings\r\nfrom django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden\r\nfrom django.views.decorators.csrf import csrf_exempt\r\n\r\nfrom linebot import LineBotApi, WebhookParser\r\nfrom linebot.exceptions import InvalidSignatureError, LineBotApiError\r\nfrom linebot.models import MessageEvent, TextMessage\r\nfrom module import func\r\nfrom urllib.parse import parse_qsl\r\nfrom func5api.models import users\r\nfrom django.shortcuts import render\r\n\r\nline_bot_api = LineBotApi(settings.LINE_CHANNEL_ACCESS_TOKEN)\r\nparser = WebhookParser(settings.LINE_CHANNEL_SECRET)\r\n\r\n@csrf_exempt\r\ndef callback(request):\r\n if request.method == 'POST':\r\n signature = request.META['HTTP_X_LINE_SIGNATURE']\r\n body = request.body.decode('utf-8')\r\n try:\r\n events = parser.parse(body, signature)\r\n except InvalidSignatureError:\r\n return HttpResponseForbidden()\r\n except LineBotApiError:\r\n return HttpResponseBadRequest()\r\n\r\n for event in events:\r\n if isinstance(event, MessageEvent):\r\n user_id = event.source.user_id #取得user_id\r\n if not(users.objects.filter(uid = user_id).exists()): #將user_id存入資料庫中\r\n unit = users.objects.create(uid = user_id)\r\n unit.save() #將user_id上傳至資料庫\r\n if isinstance(event.message, TextMessage):\r\n mtext = event.message.text\r\n if mtext == '@修繕申請':\r\n func.sendFix(event, user_id)\r\n elif mtext =='@修繕查詢':\r\n func.fix_inquire(event, user_id)\r\n elif mtext == 'admin_mode':\r\n func.judge(event, mtext, user_id)\r\n elif mtext[:6] == '123456' and len(mtext) > 6: #all\r\n func.judge(event, mtext, user_id)\r\n elif mtext[:2] == '++' and len(mtext) > 2: #specify\r\n func.judge(event, mtext, user_id)\r\n elif mtext[:2] == '##' and len(mtext) > 2:\r\n func.manageForm(event, mtext, user_id)\r\n elif mtext[:3] == '!!!' and len(mtext) > 3:\r\n func.personData(event, mtext, user_id)\r\n \r\n return HttpResponse()\r\n\r\n else:\r\n return HttpResponseBadRequest()\r\n \r\ndef listall(request):\r\n user = users.objects.all().order_by('name')\r\n return render(request, \"listall.html\", locals())\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from . import colorbar_artist
from . import subplot_artist
from . import surface_3d_with_shadows
from .colorbar_artist import *
from .subplot_artist import *
from .surface_3d_with_shadows import *
__all__ = ['colorbar_artist', 'subplot_artist', 'surface_3d_with_shadows']
__all__.extend(colorbar_artist.__all__)
__all__.extend(subplot_artist.__all__)
__all__.extend(surface_3d_with_shadows.__all__)
|
normal
|
{
"blob_id": "16c4dbd472f9d32e5fa48a28dff4a40914f7d29e",
"index": 8231,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n__all__.extend(colorbar_artist.__all__)\n__all__.extend(subplot_artist.__all__)\n__all__.extend(surface_3d_with_shadows.__all__)\n",
"step-3": "<mask token>\n__all__ = ['colorbar_artist', 'subplot_artist', 'surface_3d_with_shadows']\n__all__.extend(colorbar_artist.__all__)\n__all__.extend(subplot_artist.__all__)\n__all__.extend(surface_3d_with_shadows.__all__)\n",
"step-4": "from . import colorbar_artist\nfrom . import subplot_artist\nfrom . import surface_3d_with_shadows\nfrom .colorbar_artist import *\nfrom .subplot_artist import *\nfrom .surface_3d_with_shadows import *\n__all__ = ['colorbar_artist', 'subplot_artist', 'surface_3d_with_shadows']\n__all__.extend(colorbar_artist.__all__)\n__all__.extend(subplot_artist.__all__)\n__all__.extend(surface_3d_with_shadows.__all__)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
x = 'From marquard@uct.ac.za'
print(x[8])
x = 'From marquard@uct.ac.za'
print(x[14:17])
greet = 'Hello Bob'
xa = "aaa"
print(greet.upper())
print(len('banana')*7)
data = 'From stephen.marquard@uct.ac.za Sat Jan 5 09:14:16 2008'
pos = data.find('.')
print(data[pos:pos+3])
stuff = dict()
print(stuff.get('candy',-1))
|
normal
|
{
"blob_id": "e26f673dfae38148a56927ce82d5ea7ea2545e12",
"index": 8540,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(x[8])\n<mask token>\nprint(x[14:17])\n<mask token>\nprint(greet.upper())\nprint(len('banana') * 7)\n<mask token>\nprint(data[pos:pos + 3])\n<mask token>\nprint(stuff.get('candy', -1))\n",
"step-3": "x = 'From marquard@uct.ac.za'\nprint(x[8])\nx = 'From marquard@uct.ac.za'\nprint(x[14:17])\ngreet = 'Hello Bob'\nxa = 'aaa'\nprint(greet.upper())\nprint(len('banana') * 7)\ndata = 'From stephen.marquard@uct.ac.za Sat Jan 5 09:14:16 2008'\npos = data.find('.')\nprint(data[pos:pos + 3])\nstuff = dict()\nprint(stuff.get('candy', -1))\n",
"step-4": "x = 'From marquard@uct.ac.za'\nprint(x[8])\n\nx = 'From marquard@uct.ac.za'\nprint(x[14:17])\n\ngreet = 'Hello Bob'\nxa = \"aaa\"\nprint(greet.upper())\n\n\nprint(len('banana')*7)\n\ndata = 'From stephen.marquard@uct.ac.za Sat Jan 5 09:14:16 2008'\npos = data.find('.')\nprint(data[pos:pos+3])\n\nstuff = dict()\nprint(stuff.get('candy',-1))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class PacketSender:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def await_acks(conn):
print('awaiting acks')
while not PacketSender.acked_all_packets:
data, sender = conn.recvfrom(1024)
threading.Thread(target=PacketSender.handle_ack, args=(data,)
).start()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@staticmethod
def send_as_packets(data, conn, destination, peer_ip, peer_port):
global sent_packets
global acked_packets
global next_seq_num
global acked_all_packets
global seq_num
PacketSender.reset()
max_payload_length = Packet.MAX_LEN - Packet.MIN_LEN
curr = [0, 0]
def nbytes(n):
curr[0], curr[1] = curr[1], curr[1] + n
return data[curr[0]:curr[1]]
remaining_data = len(data)
if remaining_data > 0:
threading.Thread(target=PacketSender.await_acks, args=(conn,)
).start()
while remaining_data > 0:
while (sent_packets < PacketConstructor.window_size and
remaining_data > 0):
print('sending packet %d' % seq_num)
if remaining_data > max_payload_length:
p = Packet(packet_type=PacketConstructor.data_type,
seq_num=seq_num, peer_ip_addr=peer_ip, peer_port=
peer_port, is_last_packet=False, payload=nbytes(
max_payload_length))
conn.sendto(p.to_bytes(), destination)
sent_packets += 1
remaining_data -= max_payload_length
seq_num += 1
PacketSender.spawn_resend_thread(conn, p, destination)
print('not last packet')
else:
p = Packet(packet_type=PacketConstructor.data_type,
seq_num=seq_num, peer_ip_addr=peer_ip, peer_port=
peer_port, is_last_packet=True, payload=nbytes(
remaining_data))
conn.sendto(p.to_bytes(), destination)
sent_packets += 1
remaining_data -= remaining_data
seq_num += 1
print('remaining data ' + str(remaining_data))
print('is last packet')
PacketSender.spawn_resend_thread(conn, p, destination)
while next_seq_num in acked_packets:
next_seq_num += 1
sent_packets -= 1
print('Waiting for acks')
while not acked_all_packets:
pass
print('RECEIVED ALL ACKS')
PacketSender.was_reset = True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class PacketSender:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def reset(self):
global seq_num
global sent_packets
global next_seq_num
global acked_packets
global acked_all_packets
global acked_packets_lock
seq_num = 0
sent_packets = 0
next_seq_num = 0
acked_packets = []
acked_all_packets = False
acked_packets_lock = threading.Lock()
def handle_ack(data):
global acked_packets
global seq_num
global acked_all_packets
global acked_packets_lock
p = Packet.from_bytes(data)
if not p.packet_type == PacketConstructor.ack_type:
return
print('received ack ' + str(p.seq_num))
acked_packets_lock.acquire()
if p.seq_num not in acked_packets:
print("it's a new ack")
acked_packets.append(p.seq_num)
if len(acked_packets) == seq_num:
print('got all acks')
acked_all_packets = True
else:
print('len: ' + str(len(acked_packets)))
print('seq_num: ' + str(seq_num))
acked_packets_lock.release()
def await_acks(conn):
print('awaiting acks')
while not PacketSender.acked_all_packets:
data, sender = conn.recvfrom(1024)
threading.Thread(target=PacketSender.handle_ack, args=(data,)
).start()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@staticmethod
def send_as_packets(data, conn, destination, peer_ip, peer_port):
global sent_packets
global acked_packets
global next_seq_num
global acked_all_packets
global seq_num
PacketSender.reset()
max_payload_length = Packet.MAX_LEN - Packet.MIN_LEN
curr = [0, 0]
def nbytes(n):
curr[0], curr[1] = curr[1], curr[1] + n
return data[curr[0]:curr[1]]
remaining_data = len(data)
if remaining_data > 0:
threading.Thread(target=PacketSender.await_acks, args=(conn,)
).start()
while remaining_data > 0:
while (sent_packets < PacketConstructor.window_size and
remaining_data > 0):
print('sending packet %d' % seq_num)
if remaining_data > max_payload_length:
p = Packet(packet_type=PacketConstructor.data_type,
seq_num=seq_num, peer_ip_addr=peer_ip, peer_port=
peer_port, is_last_packet=False, payload=nbytes(
max_payload_length))
conn.sendto(p.to_bytes(), destination)
sent_packets += 1
remaining_data -= max_payload_length
seq_num += 1
PacketSender.spawn_resend_thread(conn, p, destination)
print('not last packet')
else:
p = Packet(packet_type=PacketConstructor.data_type,
seq_num=seq_num, peer_ip_addr=peer_ip, peer_port=
peer_port, is_last_packet=True, payload=nbytes(
remaining_data))
conn.sendto(p.to_bytes(), destination)
sent_packets += 1
remaining_data -= remaining_data
seq_num += 1
print('remaining data ' + str(remaining_data))
print('is last packet')
PacketSender.spawn_resend_thread(conn, p, destination)
while next_seq_num in acked_packets:
next_seq_num += 1
sent_packets -= 1
print('Waiting for acks')
while not acked_all_packets:
pass
print('RECEIVED ALL ACKS')
PacketSender.was_reset = True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class PacketSender:
"""
Packet represents a simulated UDP packet.
"""
seq_num = 0
next_seq_num = 0
sent_packets = 0
acked_packets = []
acked_all_packets = False
acked_packets_lock = threading.Lock()
was_reset = False
def reset(self):
global seq_num
global sent_packets
global next_seq_num
global acked_packets
global acked_all_packets
global acked_packets_lock
seq_num = 0
sent_packets = 0
next_seq_num = 0
acked_packets = []
acked_all_packets = False
acked_packets_lock = threading.Lock()
def handle_ack(data):
global acked_packets
global seq_num
global acked_all_packets
global acked_packets_lock
p = Packet.from_bytes(data)
if not p.packet_type == PacketConstructor.ack_type:
return
print('received ack ' + str(p.seq_num))
acked_packets_lock.acquire()
if p.seq_num not in acked_packets:
print("it's a new ack")
acked_packets.append(p.seq_num)
if len(acked_packets) == seq_num:
print('got all acks')
acked_all_packets = True
else:
print('len: ' + str(len(acked_packets)))
print('seq_num: ' + str(seq_num))
acked_packets_lock.release()
def await_acks(conn):
print('awaiting acks')
while not PacketSender.acked_all_packets:
data, sender = conn.recvfrom(1024)
threading.Thread(target=PacketSender.handle_ack, args=(data,)
).start()
def resend_packet_if_needed(conn, packet, destination):
while (not packet.seq_num in PacketSender.acked_packets and not
PacketSender.was_reset):
print('starting resend loop')
time.sleep(0.5)
acked_packets_lock.acquire()
if (not packet.seq_num in PacketSender.acked_packets and not
PacketSender.was_reset):
print('resending packet ' + str(packet.seq_num))
conn.sendto(packet.to_bytes(), destination)
acked_packets_lock.release()
def spawn_resend_thread(conn, packet, destination):
threading.Thread(target=PacketSender.resend_packet_if_needed, args=
(conn, packet, destination)).start()
@staticmethod
def send_as_packets(data, conn, destination, peer_ip, peer_port):
global sent_packets
global acked_packets
global next_seq_num
global acked_all_packets
global seq_num
PacketSender.reset()
max_payload_length = Packet.MAX_LEN - Packet.MIN_LEN
curr = [0, 0]
def nbytes(n):
curr[0], curr[1] = curr[1], curr[1] + n
return data[curr[0]:curr[1]]
remaining_data = len(data)
if remaining_data > 0:
threading.Thread(target=PacketSender.await_acks, args=(conn,)
).start()
while remaining_data > 0:
while (sent_packets < PacketConstructor.window_size and
remaining_data > 0):
print('sending packet %d' % seq_num)
if remaining_data > max_payload_length:
p = Packet(packet_type=PacketConstructor.data_type,
seq_num=seq_num, peer_ip_addr=peer_ip, peer_port=
peer_port, is_last_packet=False, payload=nbytes(
max_payload_length))
conn.sendto(p.to_bytes(), destination)
sent_packets += 1
remaining_data -= max_payload_length
seq_num += 1
PacketSender.spawn_resend_thread(conn, p, destination)
print('not last packet')
else:
p = Packet(packet_type=PacketConstructor.data_type,
seq_num=seq_num, peer_ip_addr=peer_ip, peer_port=
peer_port, is_last_packet=True, payload=nbytes(
remaining_data))
conn.sendto(p.to_bytes(), destination)
sent_packets += 1
remaining_data -= remaining_data
seq_num += 1
print('remaining data ' + str(remaining_data))
print('is last packet')
PacketSender.spawn_resend_thread(conn, p, destination)
while next_seq_num in acked_packets:
next_seq_num += 1
sent_packets -= 1
print('Waiting for acks')
while not acked_all_packets:
pass
print('RECEIVED ALL ACKS')
PacketSender.was_reset = True
<|reserved_special_token_1|>
from packet import Packet
from packetConstructor import PacketConstructor
import threading
import time
class PacketSender:
"""
Packet represents a simulated UDP packet.
"""
seq_num = 0
next_seq_num = 0
sent_packets = 0
acked_packets = []
acked_all_packets = False
acked_packets_lock = threading.Lock()
was_reset = False
def reset(self):
global seq_num
global sent_packets
global next_seq_num
global acked_packets
global acked_all_packets
global acked_packets_lock
seq_num = 0
sent_packets = 0
next_seq_num = 0
acked_packets = []
acked_all_packets = False
acked_packets_lock = threading.Lock()
def handle_ack(data):
global acked_packets
global seq_num
global acked_all_packets
global acked_packets_lock
p = Packet.from_bytes(data)
if not p.packet_type == PacketConstructor.ack_type:
return
print('received ack ' + str(p.seq_num))
acked_packets_lock.acquire()
if p.seq_num not in acked_packets:
print("it's a new ack")
acked_packets.append(p.seq_num)
if len(acked_packets) == seq_num:
print('got all acks')
acked_all_packets = True
else:
print('len: ' + str(len(acked_packets)))
print('seq_num: ' + str(seq_num))
acked_packets_lock.release()
def await_acks(conn):
print('awaiting acks')
while not PacketSender.acked_all_packets:
data, sender = conn.recvfrom(1024)
threading.Thread(target=PacketSender.handle_ack, args=(data,)
).start()
def resend_packet_if_needed(conn, packet, destination):
while (not packet.seq_num in PacketSender.acked_packets and not
PacketSender.was_reset):
print('starting resend loop')
time.sleep(0.5)
acked_packets_lock.acquire()
if (not packet.seq_num in PacketSender.acked_packets and not
PacketSender.was_reset):
print('resending packet ' + str(packet.seq_num))
conn.sendto(packet.to_bytes(), destination)
acked_packets_lock.release()
def spawn_resend_thread(conn, packet, destination):
threading.Thread(target=PacketSender.resend_packet_if_needed, args=
(conn, packet, destination)).start()
@staticmethod
def send_as_packets(data, conn, destination, peer_ip, peer_port):
global sent_packets
global acked_packets
global next_seq_num
global acked_all_packets
global seq_num
PacketSender.reset()
max_payload_length = Packet.MAX_LEN - Packet.MIN_LEN
curr = [0, 0]
def nbytes(n):
curr[0], curr[1] = curr[1], curr[1] + n
return data[curr[0]:curr[1]]
remaining_data = len(data)
if remaining_data > 0:
threading.Thread(target=PacketSender.await_acks, args=(conn,)
).start()
while remaining_data > 0:
while (sent_packets < PacketConstructor.window_size and
remaining_data > 0):
print('sending packet %d' % seq_num)
if remaining_data > max_payload_length:
p = Packet(packet_type=PacketConstructor.data_type,
seq_num=seq_num, peer_ip_addr=peer_ip, peer_port=
peer_port, is_last_packet=False, payload=nbytes(
max_payload_length))
conn.sendto(p.to_bytes(), destination)
sent_packets += 1
remaining_data -= max_payload_length
seq_num += 1
PacketSender.spawn_resend_thread(conn, p, destination)
print('not last packet')
else:
p = Packet(packet_type=PacketConstructor.data_type,
seq_num=seq_num, peer_ip_addr=peer_ip, peer_port=
peer_port, is_last_packet=True, payload=nbytes(
remaining_data))
conn.sendto(p.to_bytes(), destination)
sent_packets += 1
remaining_data -= remaining_data
seq_num += 1
print('remaining data ' + str(remaining_data))
print('is last packet')
PacketSender.spawn_resend_thread(conn, p, destination)
while next_seq_num in acked_packets:
next_seq_num += 1
sent_packets -= 1
print('Waiting for acks')
while not acked_all_packets:
pass
print('RECEIVED ALL ACKS')
PacketSender.was_reset = True
<|reserved_special_token_1|>
from packet import Packet
from packetConstructor import PacketConstructor
import threading
import time
class PacketSender:
"""
Packet represents a simulated UDP packet.
"""
# The next seq num for sent packets
seq_num = 0
# The next seq num for acks that we're waiting for
next_seq_num = 0
sent_packets = 0
acked_packets = []
acked_all_packets = False
acked_packets_lock = threading.Lock()
was_reset = False
def reset(self):
global seq_num
global sent_packets
global next_seq_num
global acked_packets
global acked_all_packets
global acked_packets_lock
seq_num = 0
sent_packets = 0
next_seq_num = 0
acked_packets = []
acked_all_packets = False
acked_packets_lock = threading.Lock()
def handle_ack(data):
global acked_packets
global seq_num
global acked_all_packets
global acked_packets_lock
p = Packet.from_bytes(data)
if not p.packet_type == PacketConstructor.ack_type:
# TODO: handle NAKs here
return
print("received ack " + str(p.seq_num))
acked_packets_lock.acquire()
if p.seq_num not in acked_packets:
print("it's a new ack")
acked_packets.append(p.seq_num)
if len(acked_packets) == seq_num:
print("got all acks")
acked_all_packets = True
else:
print("len: " + str(len(acked_packets)))
print("seq_num: " + str(seq_num))
acked_packets_lock.release()
def await_acks(conn):
print("awaiting acks")
while not PacketSender.acked_all_packets:
data, sender = conn.recvfrom(1024)
threading.Thread(target=PacketSender.handle_ack, args=(data,)).start()
def resend_packet_if_needed(conn, packet, destination):
while not packet.seq_num in PacketSender.acked_packets and not PacketSender.was_reset:
print("starting resend loop")
time.sleep(0.5)
acked_packets_lock.acquire()
if not packet.seq_num in PacketSender.acked_packets and not PacketSender.was_reset:
print("resending packet " + str(packet.seq_num))
conn.sendto(packet.to_bytes(), destination)
acked_packets_lock.release()
def spawn_resend_thread(conn, packet, destination):
threading.Thread(target=PacketSender.resend_packet_if_needed, args=(conn, packet, destination)).start()
@staticmethod
def send_as_packets(data, conn, destination, peer_ip, peer_port):
global sent_packets
global acked_packets
global next_seq_num
global acked_all_packets
global seq_num
PacketSender.reset()
max_payload_length = Packet.MAX_LEN - Packet.MIN_LEN
curr = [0, 0]
def nbytes(n):
curr[0], curr[1] = curr[1], curr[1] + n
return data[curr[0]: curr[1]]
remaining_data = len(data)
if remaining_data > 0:
threading.Thread(target=PacketSender.await_acks, args=(conn,)).start()
# While there's still data to be sent
while remaining_data > 0:
# While there are less packets in transit than the window size
while (sent_packets < PacketConstructor.window_size and remaining_data > 0):
print("sending packet %d" % seq_num)
if remaining_data > max_payload_length:
p = Packet(packet_type=PacketConstructor.data_type,
seq_num=seq_num,
peer_ip_addr=peer_ip,
peer_port=peer_port,
is_last_packet=False,
payload=nbytes(max_payload_length))
conn.sendto(p.to_bytes(), destination)
sent_packets += 1
remaining_data -= max_payload_length
seq_num += 1
PacketSender.spawn_resend_thread(conn, p, destination)
print("not last packet")
else:
p = Packet(packet_type=PacketConstructor.data_type,
seq_num=seq_num,
peer_ip_addr=peer_ip,
peer_port=peer_port,
is_last_packet=True,
payload=nbytes(remaining_data))
conn.sendto(p.to_bytes(), destination)
sent_packets += 1
remaining_data -= remaining_data
seq_num += 1
print("remaining data " + str(remaining_data))
print("is last packet")
PacketSender.spawn_resend_thread(conn, p, destination)
# Update the number of packets still in transit
while next_seq_num in acked_packets:
next_seq_num += 1
sent_packets -= 1
print("Waiting for acks")
while not acked_all_packets:
# Wait here until all packets have been acked
pass
print("RECEIVED ALL ACKS")
PacketSender.was_reset = True
|
flexible
|
{
"blob_id": "47c1ad4bd1ceffa38eef467ea8eb59dbd2fc2ebb",
"index": 262,
"step-1": "<mask token>\n\n\nclass PacketSender:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def await_acks(conn):\n print('awaiting acks')\n while not PacketSender.acked_all_packets:\n data, sender = conn.recvfrom(1024)\n threading.Thread(target=PacketSender.handle_ack, args=(data,)\n ).start()\n <mask token>\n <mask token>\n\n @staticmethod\n def send_as_packets(data, conn, destination, peer_ip, peer_port):\n global sent_packets\n global acked_packets\n global next_seq_num\n global acked_all_packets\n global seq_num\n PacketSender.reset()\n max_payload_length = Packet.MAX_LEN - Packet.MIN_LEN\n curr = [0, 0]\n\n def nbytes(n):\n curr[0], curr[1] = curr[1], curr[1] + n\n return data[curr[0]:curr[1]]\n remaining_data = len(data)\n if remaining_data > 0:\n threading.Thread(target=PacketSender.await_acks, args=(conn,)\n ).start()\n while remaining_data > 0:\n while (sent_packets < PacketConstructor.window_size and \n remaining_data > 0):\n print('sending packet %d' % seq_num)\n if remaining_data > max_payload_length:\n p = Packet(packet_type=PacketConstructor.data_type,\n seq_num=seq_num, peer_ip_addr=peer_ip, peer_port=\n peer_port, is_last_packet=False, payload=nbytes(\n max_payload_length))\n conn.sendto(p.to_bytes(), destination)\n sent_packets += 1\n remaining_data -= max_payload_length\n seq_num += 1\n PacketSender.spawn_resend_thread(conn, p, destination)\n print('not last packet')\n else:\n p = Packet(packet_type=PacketConstructor.data_type,\n seq_num=seq_num, peer_ip_addr=peer_ip, peer_port=\n peer_port, is_last_packet=True, payload=nbytes(\n remaining_data))\n conn.sendto(p.to_bytes(), destination)\n sent_packets += 1\n remaining_data -= remaining_data\n seq_num += 1\n print('remaining data ' + str(remaining_data))\n print('is last packet')\n PacketSender.spawn_resend_thread(conn, p, destination)\n while next_seq_num in acked_packets:\n next_seq_num += 1\n sent_packets -= 1\n print('Waiting for acks')\n while not acked_all_packets:\n pass\n print('RECEIVED ALL ACKS')\n PacketSender.was_reset = True\n",
"step-2": "<mask token>\n\n\nclass PacketSender:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def reset(self):\n global seq_num\n global sent_packets\n global next_seq_num\n global acked_packets\n global acked_all_packets\n global acked_packets_lock\n seq_num = 0\n sent_packets = 0\n next_seq_num = 0\n acked_packets = []\n acked_all_packets = False\n acked_packets_lock = threading.Lock()\n\n def handle_ack(data):\n global acked_packets\n global seq_num\n global acked_all_packets\n global acked_packets_lock\n p = Packet.from_bytes(data)\n if not p.packet_type == PacketConstructor.ack_type:\n return\n print('received ack ' + str(p.seq_num))\n acked_packets_lock.acquire()\n if p.seq_num not in acked_packets:\n print(\"it's a new ack\")\n acked_packets.append(p.seq_num)\n if len(acked_packets) == seq_num:\n print('got all acks')\n acked_all_packets = True\n else:\n print('len: ' + str(len(acked_packets)))\n print('seq_num: ' + str(seq_num))\n acked_packets_lock.release()\n\n def await_acks(conn):\n print('awaiting acks')\n while not PacketSender.acked_all_packets:\n data, sender = conn.recvfrom(1024)\n threading.Thread(target=PacketSender.handle_ack, args=(data,)\n ).start()\n <mask token>\n <mask token>\n\n @staticmethod\n def send_as_packets(data, conn, destination, peer_ip, peer_port):\n global sent_packets\n global acked_packets\n global next_seq_num\n global acked_all_packets\n global seq_num\n PacketSender.reset()\n max_payload_length = Packet.MAX_LEN - Packet.MIN_LEN\n curr = [0, 0]\n\n def nbytes(n):\n curr[0], curr[1] = curr[1], curr[1] + n\n return data[curr[0]:curr[1]]\n remaining_data = len(data)\n if remaining_data > 0:\n threading.Thread(target=PacketSender.await_acks, args=(conn,)\n ).start()\n while remaining_data > 0:\n while (sent_packets < PacketConstructor.window_size and \n remaining_data > 0):\n print('sending packet %d' % seq_num)\n if remaining_data > max_payload_length:\n p = Packet(packet_type=PacketConstructor.data_type,\n seq_num=seq_num, peer_ip_addr=peer_ip, peer_port=\n peer_port, is_last_packet=False, payload=nbytes(\n max_payload_length))\n conn.sendto(p.to_bytes(), destination)\n sent_packets += 1\n remaining_data -= max_payload_length\n seq_num += 1\n PacketSender.spawn_resend_thread(conn, p, destination)\n print('not last packet')\n else:\n p = Packet(packet_type=PacketConstructor.data_type,\n seq_num=seq_num, peer_ip_addr=peer_ip, peer_port=\n peer_port, is_last_packet=True, payload=nbytes(\n remaining_data))\n conn.sendto(p.to_bytes(), destination)\n sent_packets += 1\n remaining_data -= remaining_data\n seq_num += 1\n print('remaining data ' + str(remaining_data))\n print('is last packet')\n PacketSender.spawn_resend_thread(conn, p, destination)\n while next_seq_num in acked_packets:\n next_seq_num += 1\n sent_packets -= 1\n print('Waiting for acks')\n while not acked_all_packets:\n pass\n print('RECEIVED ALL ACKS')\n PacketSender.was_reset = True\n",
"step-3": "<mask token>\n\n\nclass PacketSender:\n \"\"\"\n Packet represents a simulated UDP packet.\n \"\"\"\n seq_num = 0\n next_seq_num = 0\n sent_packets = 0\n acked_packets = []\n acked_all_packets = False\n acked_packets_lock = threading.Lock()\n was_reset = False\n\n def reset(self):\n global seq_num\n global sent_packets\n global next_seq_num\n global acked_packets\n global acked_all_packets\n global acked_packets_lock\n seq_num = 0\n sent_packets = 0\n next_seq_num = 0\n acked_packets = []\n acked_all_packets = False\n acked_packets_lock = threading.Lock()\n\n def handle_ack(data):\n global acked_packets\n global seq_num\n global acked_all_packets\n global acked_packets_lock\n p = Packet.from_bytes(data)\n if not p.packet_type == PacketConstructor.ack_type:\n return\n print('received ack ' + str(p.seq_num))\n acked_packets_lock.acquire()\n if p.seq_num not in acked_packets:\n print(\"it's a new ack\")\n acked_packets.append(p.seq_num)\n if len(acked_packets) == seq_num:\n print('got all acks')\n acked_all_packets = True\n else:\n print('len: ' + str(len(acked_packets)))\n print('seq_num: ' + str(seq_num))\n acked_packets_lock.release()\n\n def await_acks(conn):\n print('awaiting acks')\n while not PacketSender.acked_all_packets:\n data, sender = conn.recvfrom(1024)\n threading.Thread(target=PacketSender.handle_ack, args=(data,)\n ).start()\n\n def resend_packet_if_needed(conn, packet, destination):\n while (not packet.seq_num in PacketSender.acked_packets and not\n PacketSender.was_reset):\n print('starting resend loop')\n time.sleep(0.5)\n acked_packets_lock.acquire()\n if (not packet.seq_num in PacketSender.acked_packets and not\n PacketSender.was_reset):\n print('resending packet ' + str(packet.seq_num))\n conn.sendto(packet.to_bytes(), destination)\n acked_packets_lock.release()\n\n def spawn_resend_thread(conn, packet, destination):\n threading.Thread(target=PacketSender.resend_packet_if_needed, args=\n (conn, packet, destination)).start()\n\n @staticmethod\n def send_as_packets(data, conn, destination, peer_ip, peer_port):\n global sent_packets\n global acked_packets\n global next_seq_num\n global acked_all_packets\n global seq_num\n PacketSender.reset()\n max_payload_length = Packet.MAX_LEN - Packet.MIN_LEN\n curr = [0, 0]\n\n def nbytes(n):\n curr[0], curr[1] = curr[1], curr[1] + n\n return data[curr[0]:curr[1]]\n remaining_data = len(data)\n if remaining_data > 0:\n threading.Thread(target=PacketSender.await_acks, args=(conn,)\n ).start()\n while remaining_data > 0:\n while (sent_packets < PacketConstructor.window_size and \n remaining_data > 0):\n print('sending packet %d' % seq_num)\n if remaining_data > max_payload_length:\n p = Packet(packet_type=PacketConstructor.data_type,\n seq_num=seq_num, peer_ip_addr=peer_ip, peer_port=\n peer_port, is_last_packet=False, payload=nbytes(\n max_payload_length))\n conn.sendto(p.to_bytes(), destination)\n sent_packets += 1\n remaining_data -= max_payload_length\n seq_num += 1\n PacketSender.spawn_resend_thread(conn, p, destination)\n print('not last packet')\n else:\n p = Packet(packet_type=PacketConstructor.data_type,\n seq_num=seq_num, peer_ip_addr=peer_ip, peer_port=\n peer_port, is_last_packet=True, payload=nbytes(\n remaining_data))\n conn.sendto(p.to_bytes(), destination)\n sent_packets += 1\n remaining_data -= remaining_data\n seq_num += 1\n print('remaining data ' + str(remaining_data))\n print('is last packet')\n PacketSender.spawn_resend_thread(conn, p, destination)\n while next_seq_num in acked_packets:\n next_seq_num += 1\n sent_packets -= 1\n print('Waiting for acks')\n while not acked_all_packets:\n pass\n print('RECEIVED ALL ACKS')\n PacketSender.was_reset = True\n",
"step-4": "from packet import Packet\nfrom packetConstructor import PacketConstructor\nimport threading\nimport time\n\n\nclass PacketSender:\n \"\"\"\n Packet represents a simulated UDP packet.\n \"\"\"\n seq_num = 0\n next_seq_num = 0\n sent_packets = 0\n acked_packets = []\n acked_all_packets = False\n acked_packets_lock = threading.Lock()\n was_reset = False\n\n def reset(self):\n global seq_num\n global sent_packets\n global next_seq_num\n global acked_packets\n global acked_all_packets\n global acked_packets_lock\n seq_num = 0\n sent_packets = 0\n next_seq_num = 0\n acked_packets = []\n acked_all_packets = False\n acked_packets_lock = threading.Lock()\n\n def handle_ack(data):\n global acked_packets\n global seq_num\n global acked_all_packets\n global acked_packets_lock\n p = Packet.from_bytes(data)\n if not p.packet_type == PacketConstructor.ack_type:\n return\n print('received ack ' + str(p.seq_num))\n acked_packets_lock.acquire()\n if p.seq_num not in acked_packets:\n print(\"it's a new ack\")\n acked_packets.append(p.seq_num)\n if len(acked_packets) == seq_num:\n print('got all acks')\n acked_all_packets = True\n else:\n print('len: ' + str(len(acked_packets)))\n print('seq_num: ' + str(seq_num))\n acked_packets_lock.release()\n\n def await_acks(conn):\n print('awaiting acks')\n while not PacketSender.acked_all_packets:\n data, sender = conn.recvfrom(1024)\n threading.Thread(target=PacketSender.handle_ack, args=(data,)\n ).start()\n\n def resend_packet_if_needed(conn, packet, destination):\n while (not packet.seq_num in PacketSender.acked_packets and not\n PacketSender.was_reset):\n print('starting resend loop')\n time.sleep(0.5)\n acked_packets_lock.acquire()\n if (not packet.seq_num in PacketSender.acked_packets and not\n PacketSender.was_reset):\n print('resending packet ' + str(packet.seq_num))\n conn.sendto(packet.to_bytes(), destination)\n acked_packets_lock.release()\n\n def spawn_resend_thread(conn, packet, destination):\n threading.Thread(target=PacketSender.resend_packet_if_needed, args=\n (conn, packet, destination)).start()\n\n @staticmethod\n def send_as_packets(data, conn, destination, peer_ip, peer_port):\n global sent_packets\n global acked_packets\n global next_seq_num\n global acked_all_packets\n global seq_num\n PacketSender.reset()\n max_payload_length = Packet.MAX_LEN - Packet.MIN_LEN\n curr = [0, 0]\n\n def nbytes(n):\n curr[0], curr[1] = curr[1], curr[1] + n\n return data[curr[0]:curr[1]]\n remaining_data = len(data)\n if remaining_data > 0:\n threading.Thread(target=PacketSender.await_acks, args=(conn,)\n ).start()\n while remaining_data > 0:\n while (sent_packets < PacketConstructor.window_size and \n remaining_data > 0):\n print('sending packet %d' % seq_num)\n if remaining_data > max_payload_length:\n p = Packet(packet_type=PacketConstructor.data_type,\n seq_num=seq_num, peer_ip_addr=peer_ip, peer_port=\n peer_port, is_last_packet=False, payload=nbytes(\n max_payload_length))\n conn.sendto(p.to_bytes(), destination)\n sent_packets += 1\n remaining_data -= max_payload_length\n seq_num += 1\n PacketSender.spawn_resend_thread(conn, p, destination)\n print('not last packet')\n else:\n p = Packet(packet_type=PacketConstructor.data_type,\n seq_num=seq_num, peer_ip_addr=peer_ip, peer_port=\n peer_port, is_last_packet=True, payload=nbytes(\n remaining_data))\n conn.sendto(p.to_bytes(), destination)\n sent_packets += 1\n remaining_data -= remaining_data\n seq_num += 1\n print('remaining data ' + str(remaining_data))\n print('is last packet')\n PacketSender.spawn_resend_thread(conn, p, destination)\n while next_seq_num in acked_packets:\n next_seq_num += 1\n sent_packets -= 1\n print('Waiting for acks')\n while not acked_all_packets:\n pass\n print('RECEIVED ALL ACKS')\n PacketSender.was_reset = True\n",
"step-5": "from packet import Packet\nfrom packetConstructor import PacketConstructor\nimport threading\nimport time\n\n\nclass PacketSender:\n \"\"\"\n Packet represents a simulated UDP packet.\n \"\"\"\n # The next seq num for sent packets\n seq_num = 0\n # The next seq num for acks that we're waiting for\n next_seq_num = 0\n sent_packets = 0\n acked_packets = []\n acked_all_packets = False\n acked_packets_lock = threading.Lock()\n was_reset = False\n\n def reset(self):\n global seq_num\n global sent_packets\n global next_seq_num\n global acked_packets\n global acked_all_packets\n global acked_packets_lock\n seq_num = 0\n sent_packets = 0\n next_seq_num = 0\n acked_packets = []\n acked_all_packets = False\n acked_packets_lock = threading.Lock()\n\n def handle_ack(data):\n global acked_packets\n global seq_num\n global acked_all_packets\n global acked_packets_lock\n p = Packet.from_bytes(data)\n if not p.packet_type == PacketConstructor.ack_type:\n # TODO: handle NAKs here\n return\n print(\"received ack \" + str(p.seq_num))\n acked_packets_lock.acquire()\n if p.seq_num not in acked_packets:\n print(\"it's a new ack\")\n acked_packets.append(p.seq_num)\n if len(acked_packets) == seq_num:\n print(\"got all acks\")\n acked_all_packets = True\n else:\n print(\"len: \" + str(len(acked_packets)))\n print(\"seq_num: \" + str(seq_num))\n acked_packets_lock.release()\n\n def await_acks(conn):\n print(\"awaiting acks\")\n while not PacketSender.acked_all_packets:\n data, sender = conn.recvfrom(1024)\n threading.Thread(target=PacketSender.handle_ack, args=(data,)).start()\n\n def resend_packet_if_needed(conn, packet, destination):\n while not packet.seq_num in PacketSender.acked_packets and not PacketSender.was_reset:\n print(\"starting resend loop\")\n time.sleep(0.5)\n acked_packets_lock.acquire()\n if not packet.seq_num in PacketSender.acked_packets and not PacketSender.was_reset:\n print(\"resending packet \" + str(packet.seq_num))\n conn.sendto(packet.to_bytes(), destination)\n acked_packets_lock.release()\n\n def spawn_resend_thread(conn, packet, destination):\n threading.Thread(target=PacketSender.resend_packet_if_needed, args=(conn, packet, destination)).start()\n\n @staticmethod\n def send_as_packets(data, conn, destination, peer_ip, peer_port):\n global sent_packets\n global acked_packets\n global next_seq_num\n global acked_all_packets\n global seq_num\n PacketSender.reset()\n max_payload_length = Packet.MAX_LEN - Packet.MIN_LEN\n\n curr = [0, 0]\n\n def nbytes(n):\n curr[0], curr[1] = curr[1], curr[1] + n\n return data[curr[0]: curr[1]]\n\n remaining_data = len(data)\n if remaining_data > 0:\n threading.Thread(target=PacketSender.await_acks, args=(conn,)).start()\n # While there's still data to be sent\n while remaining_data > 0:\n # While there are less packets in transit than the window size\n while (sent_packets < PacketConstructor.window_size and remaining_data > 0):\n print(\"sending packet %d\" % seq_num)\n if remaining_data > max_payload_length:\n p = Packet(packet_type=PacketConstructor.data_type,\n seq_num=seq_num,\n peer_ip_addr=peer_ip,\n peer_port=peer_port,\n is_last_packet=False,\n payload=nbytes(max_payload_length))\n\n conn.sendto(p.to_bytes(), destination)\n sent_packets += 1\n remaining_data -= max_payload_length\n seq_num += 1\n PacketSender.spawn_resend_thread(conn, p, destination)\n print(\"not last packet\")\n else:\n p = Packet(packet_type=PacketConstructor.data_type,\n seq_num=seq_num,\n peer_ip_addr=peer_ip,\n peer_port=peer_port,\n is_last_packet=True,\n payload=nbytes(remaining_data))\n\n conn.sendto(p.to_bytes(), destination)\n sent_packets += 1\n remaining_data -= remaining_data\n seq_num += 1\n print(\"remaining data \" + str(remaining_data))\n print(\"is last packet\")\n PacketSender.spawn_resend_thread(conn, p, destination)\n # Update the number of packets still in transit\n while next_seq_num in acked_packets:\n next_seq_num += 1\n sent_packets -= 1\n print(\"Waiting for acks\")\n while not acked_all_packets:\n # Wait here until all packets have been acked\n pass\n print(\"RECEIVED ALL ACKS\")\n PacketSender.was_reset = True\n",
"step-ids": [
3,
5,
9,
10,
11
]
}
|
[
3,
5,
9,
10,
11
] |
from ctypes import *
class GF_IPMPX_Data(Structure):
_fields_=[
("tag", c_char),
("Version", c_char),
("dataID", c_char)
]
|
normal
|
{
"blob_id": "b3f4815495c781fe6cc15f77b4ee601680117419",
"index": 8592,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass GF_IPMPX_Data(Structure):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass GF_IPMPX_Data(Structure):\n _fields_ = [('tag', c_char), ('Version', c_char), ('dataID', c_char)]\n",
"step-4": "from ctypes import *\n\n\nclass GF_IPMPX_Data(Structure):\n _fields_ = [('tag', c_char), ('Version', c_char), ('dataID', c_char)]\n",
"step-5": "from ctypes import *\n\n\nclass GF_IPMPX_Data(Structure):\n _fields_=[\n (\"tag\", c_char),\n (\"Version\", c_char),\n (\"dataID\", c_char)\n ]",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Planet Class
from turtle import *
class Planet:
def __init__(self, x, y, radius):
self.radius = radius
self.x = x
self.y = y
canvas = Screen()
canvas.setup(800, 800)
self.turtle = Turtle()
def circumference(self):
return 2*3.1415*self.radius
def scaleSize(self, scale):
self.radius = self.radius*scale
def draw(self, colour):
self.turtle.goto(self.x, self.y)
self.turtle.color(colour)
self.turtle.dot(self.radius)
#====instance of the class===
planet1 = Planet(-200, -100, 200)
planet1.draw('red')
print('Circumference *check the maths!* is:', planet1.circumference())
planet1.scaleSize(0.5)
planet1.draw('yellow')
planet2 = Planet(300, 200, 100)
planet2.draw('black')
|
normal
|
{
"blob_id": "668b63d1f1bd035226e3e12bc6816abc897affc3",
"index": 9975,
"step-1": "<mask token>\n\n\nclass Planet:\n\n def __init__(self, x, y, radius):\n self.radius = radius\n self.x = x\n self.y = y\n canvas = Screen()\n canvas.setup(800, 800)\n self.turtle = Turtle()\n <mask token>\n\n def scaleSize(self, scale):\n self.radius = self.radius * scale\n\n def draw(self, colour):\n self.turtle.goto(self.x, self.y)\n self.turtle.color(colour)\n self.turtle.dot(self.radius)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Planet:\n\n def __init__(self, x, y, radius):\n self.radius = radius\n self.x = x\n self.y = y\n canvas = Screen()\n canvas.setup(800, 800)\n self.turtle = Turtle()\n\n def circumference(self):\n return 2 * 3.1415 * self.radius\n\n def scaleSize(self, scale):\n self.radius = self.radius * scale\n\n def draw(self, colour):\n self.turtle.goto(self.x, self.y)\n self.turtle.color(colour)\n self.turtle.dot(self.radius)\n\n\n<mask token>\nplanet1.draw('red')\nprint('Circumference *check the maths!* is:', planet1.circumference())\nplanet1.scaleSize(0.5)\nplanet1.draw('yellow')\n<mask token>\nplanet2.draw('black')\n",
"step-3": "<mask token>\n\n\nclass Planet:\n\n def __init__(self, x, y, radius):\n self.radius = radius\n self.x = x\n self.y = y\n canvas = Screen()\n canvas.setup(800, 800)\n self.turtle = Turtle()\n\n def circumference(self):\n return 2 * 3.1415 * self.radius\n\n def scaleSize(self, scale):\n self.radius = self.radius * scale\n\n def draw(self, colour):\n self.turtle.goto(self.x, self.y)\n self.turtle.color(colour)\n self.turtle.dot(self.radius)\n\n\nplanet1 = Planet(-200, -100, 200)\nplanet1.draw('red')\nprint('Circumference *check the maths!* is:', planet1.circumference())\nplanet1.scaleSize(0.5)\nplanet1.draw('yellow')\nplanet2 = Planet(300, 200, 100)\nplanet2.draw('black')\n",
"step-4": "from turtle import *\n\n\nclass Planet:\n\n def __init__(self, x, y, radius):\n self.radius = radius\n self.x = x\n self.y = y\n canvas = Screen()\n canvas.setup(800, 800)\n self.turtle = Turtle()\n\n def circumference(self):\n return 2 * 3.1415 * self.radius\n\n def scaleSize(self, scale):\n self.radius = self.radius * scale\n\n def draw(self, colour):\n self.turtle.goto(self.x, self.y)\n self.turtle.color(colour)\n self.turtle.dot(self.radius)\n\n\nplanet1 = Planet(-200, -100, 200)\nplanet1.draw('red')\nprint('Circumference *check the maths!* is:', planet1.circumference())\nplanet1.scaleSize(0.5)\nplanet1.draw('yellow')\nplanet2 = Planet(300, 200, 100)\nplanet2.draw('black')\n",
"step-5": "# Planet Class\r\nfrom turtle import *\r\nclass Planet:\r\n def __init__(self, x, y, radius):\r\n self.radius = radius\r\n self.x = x\r\n self.y = y\r\n canvas = Screen()\r\n canvas.setup(800, 800)\r\n self.turtle = Turtle()\r\n\r\n def circumference(self):\r\n return 2*3.1415*self.radius\r\n\r\n def scaleSize(self, scale):\r\n self.radius = self.radius*scale\r\n\r\n def draw(self, colour):\r\n self.turtle.goto(self.x, self.y)\r\n self.turtle.color(colour)\r\n self.turtle.dot(self.radius)\r\n\r\n\r\n\r\n#====instance of the class===\r\nplanet1 = Planet(-200, -100, 200)\r\nplanet1.draw('red')\r\nprint('Circumference *check the maths!* is:', planet1.circumference())\r\nplanet1.scaleSize(0.5)\r\nplanet1.draw('yellow')\r\nplanet2 = Planet(300, 200, 100)\r\nplanet2.draw('black')\r\n",
"step-ids": [
4,
6,
7,
8,
9
]
}
|
[
4,
6,
7,
8,
9
] |
# -*- coding: utf-8 -*-
import os
import logging
import subprocess
import json
import sys
ROOT_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(ROOT_PATH)
from src.datafactory.common import json_util
from src.datafactory.config import constant
class SegmentProcess(object):
"""
function for segment
"""
def do_nlp_seg(self, sentence):
"""
connect nlp wordseg
"""
cmd = "curl -d '{\"lang_id\":1,\"lang_para\":0,\"query\":\"%s\"}" \
"' %s?username=%s\&app=%s\&encoding=utf8" % (
sentence,
constant.SEGMENT_URL,
constant.SEGMENT_USERNAME,
constant.SEGMENT_APP
)
try:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
retn = p.communicate()[0]
except Exception as e:
logging.critical("segment(%s) failed and try again:%s" % (sentence, e))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
retn = p.communicate()[0]
return retn
def deparser(self, segment_result_str):
segment_result = []
try:
segment_result_dict = json.loads(segment_result_str,
object_hook=json_util._decode_dict)
if "scw_out" in segment_result_dict and "wordsepbuf" in segment_result_dict["scw_out"]:
wordsepbuf = segment_result_dict["scw_out"]["wordsepbuf"]
wordsepbuf_split = wordsepbuf.strip("\t").split("\t")
for word in wordsepbuf_split:
segment_result.append(word)
else:
logging.critical("segment result(%s) error without wordsepbuf"
% segment_result_str)
except ValueError as e:
logging.critical("deparser segment result(%s) failed: %s" % (segment_result_str, e))
return segment_result
def get_segment(ori_data):
seg = SegmentProcess()
result = seg.do_nlp_seg(ori_data)
segment_result = seg.deparser(result)
return segment_result
if __name__ == "__main__":
print get_segment("同意 写论文的时候 用百度查一个庭园方面的术语\\\"ll")
|
normal
|
{
"blob_id": "96ea9b2b4d892ac88f7fac9594a6d2ad5d69a7c7",
"index": 7479,
"step-1": "# -*- coding: utf-8 -*-\n\nimport os\nimport logging\nimport subprocess\nimport json\nimport sys\n\nROOT_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))\nsys.path.append(ROOT_PATH)\n\nfrom src.datafactory.common import json_util\nfrom src.datafactory.config import constant\n\n\nclass SegmentProcess(object):\n \"\"\"\n function for segment\n \"\"\"\n\n def do_nlp_seg(self, sentence):\n \"\"\"\n connect nlp wordseg\n \"\"\"\n cmd = \"curl -d '{\\\"lang_id\\\":1,\\\"lang_para\\\":0,\\\"query\\\":\\\"%s\\\"}\" \\\n \"' %s?username=%s\\&app=%s\\&encoding=utf8\" % (\n sentence,\n constant.SEGMENT_URL,\n constant.SEGMENT_USERNAME,\n constant.SEGMENT_APP\n )\n try:\n p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)\n retn = p.communicate()[0]\n except Exception as e:\n logging.critical(\"segment(%s) failed and try again:%s\" % (sentence, e))\n p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)\n retn = p.communicate()[0]\n\n return retn\n\n def deparser(self, segment_result_str):\n segment_result = []\n try:\n segment_result_dict = json.loads(segment_result_str,\n object_hook=json_util._decode_dict)\n if \"scw_out\" in segment_result_dict and \"wordsepbuf\" in segment_result_dict[\"scw_out\"]:\n wordsepbuf = segment_result_dict[\"scw_out\"][\"wordsepbuf\"]\n wordsepbuf_split = wordsepbuf.strip(\"\\t\").split(\"\\t\")\n for word in wordsepbuf_split:\n\n segment_result.append(word)\n else:\n logging.critical(\"segment result(%s) error without wordsepbuf\"\n % segment_result_str)\n except ValueError as e:\n logging.critical(\"deparser segment result(%s) failed: %s\" % (segment_result_str, e))\n return segment_result\n\n\ndef get_segment(ori_data):\n seg = SegmentProcess()\n result = seg.do_nlp_seg(ori_data)\n segment_result = seg.deparser(result)\n return segment_result\n\n\nif __name__ == \"__main__\":\n print get_segment(\"同意 写论文的时候 用百度查一个庭园方面的术语\\\\\\\"ll\")\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class TestFactorMult(ParTestBase):
def __init__(self):
super().__init__()
<|reserved_special_token_0|>
def par_test_1(self):
"""
f(X, Y), scalar
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = [self.XY_factor.mult(self.scalar), self.XY_factor.mult(
self.scalarf), self.scalarf.mult(self.XY_factor)]
par_res = [self.XY_par_factor.mult(self.scalar), self.
XY_par_factor.mult(self.par_scalarf), self.par_scalarf.mult
(self.XY_par_factor)]
for i, ele in enumerate(res):
assert ele.rand_vars == par_res[i
].rand_vars and ele.values == par_res[i].values
<|reserved_special_token_0|>
def par_test_3(self):
"""
f(X), f(X)
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
res = self.X_factor.mult(self.X_factor)
par_res = self.X_par_factor.mult(self.X_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_4(self):
"""
f(X), f(Y)
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
self.Y_par_factor.setMaxDepth(i)
res = self.X_factor.mult(self.Y_factor)
par_res = self.X_par_factor.mult(self.Y_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
<|reserved_special_token_0|>
def par_test_6(self):
"""
f(X, Y) f(Y)
"""
for i in range(4):
self.Y_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.Y_factor)
par_res = self.XY_par_factor.mult(self.Y_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_7(self):
"""
f(X, Y) f(Z)
"""
for i in range(4):
self.Z_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.Z_factor)
par_res = self.XY_par_factor.mult(self.Z_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_8(self):
"""
f(X, Y) f(X, Y)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.XY_factor)
par_res = self.XY_par_factor.mult(self.XY_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_9(self):
"""
f(X, Y) F(X, Z)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XZ_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.XZ_factor)
par_res = self.XY_par_factor.mult(self.XZ_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_10(self):
"""
f(X, Y) f(Z, W)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.ZW_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.ZW_factor)
par_res = self.XY_par_factor.mult(self.ZW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_11(self):
"""
f(X, Y, Z) f(X, Y, Z)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XYZ_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.XYZ_factor)
par_res = self.XYZ_par_factor.mult(self.XYZ_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
<|reserved_special_token_0|>
def par_test_13(self):
"""
f(X, Y, Z) f(X, K, W)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XKW_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.XKW_factor)
par_res = self.XYZ_par_factor.mult(self.XKW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestFactorMult(ParTestBase):
def __init__(self):
super().__init__()
<|reserved_special_token_0|>
def par_test_1(self):
"""
f(X, Y), scalar
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = [self.XY_factor.mult(self.scalar), self.XY_factor.mult(
self.scalarf), self.scalarf.mult(self.XY_factor)]
par_res = [self.XY_par_factor.mult(self.scalar), self.
XY_par_factor.mult(self.par_scalarf), self.par_scalarf.mult
(self.XY_par_factor)]
for i, ele in enumerate(res):
assert ele.rand_vars == par_res[i
].rand_vars and ele.values == par_res[i].values
<|reserved_special_token_0|>
def par_test_3(self):
"""
f(X), f(X)
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
res = self.X_factor.mult(self.X_factor)
par_res = self.X_par_factor.mult(self.X_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_4(self):
"""
f(X), f(Y)
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
self.Y_par_factor.setMaxDepth(i)
res = self.X_factor.mult(self.Y_factor)
par_res = self.X_par_factor.mult(self.Y_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
<|reserved_special_token_0|>
def par_test_6(self):
"""
f(X, Y) f(Y)
"""
for i in range(4):
self.Y_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.Y_factor)
par_res = self.XY_par_factor.mult(self.Y_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_7(self):
"""
f(X, Y) f(Z)
"""
for i in range(4):
self.Z_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.Z_factor)
par_res = self.XY_par_factor.mult(self.Z_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_8(self):
"""
f(X, Y) f(X, Y)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.XY_factor)
par_res = self.XY_par_factor.mult(self.XY_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_9(self):
"""
f(X, Y) F(X, Z)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XZ_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.XZ_factor)
par_res = self.XY_par_factor.mult(self.XZ_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_10(self):
"""
f(X, Y) f(Z, W)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.ZW_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.ZW_factor)
par_res = self.XY_par_factor.mult(self.ZW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_11(self):
"""
f(X, Y, Z) f(X, Y, Z)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XYZ_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.XYZ_factor)
par_res = self.XYZ_par_factor.mult(self.XYZ_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_12(self):
"""
f(X, Y, Z) f(X, Y, W)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XYW_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.XYW_factor)
par_res = self.XYZ_par_factor.mult(self.XYW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_13(self):
"""
f(X, Y, Z) f(X, K, W)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XKW_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.XKW_factor)
par_res = self.XYZ_par_factor.mult(self.XKW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_14(self):
"""
f(X, Y, Z) f(T, K, W)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.TKW_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.TKW_factor)
par_res = self.XYZ_par_factor.mult(self.TKW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestFactorMult(ParTestBase):
def __init__(self):
super().__init__()
def par_test_0(self):
"""
f(X), scalar
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
res = [self.X_factor.mult(self.scalar), self.X_factor.mult(self
.scalarf), self.scalarf.mult(self.X_factor)]
par_res = [self.X_par_factor.mult(self.scalar), self.
X_par_factor.mult(self.par_scalarf), self.par_scalarf.mult(
self.X_par_factor)]
for i, ele in enumerate(res):
assert ele.rand_vars == par_res[i
].rand_vars and ele.values == par_res[i].values
def par_test_1(self):
"""
f(X, Y), scalar
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = [self.XY_factor.mult(self.scalar), self.XY_factor.mult(
self.scalarf), self.scalarf.mult(self.XY_factor)]
par_res = [self.XY_par_factor.mult(self.scalar), self.
XY_par_factor.mult(self.par_scalarf), self.par_scalarf.mult
(self.XY_par_factor)]
for i, ele in enumerate(res):
assert ele.rand_vars == par_res[i
].rand_vars and ele.values == par_res[i].values
<|reserved_special_token_0|>
def par_test_3(self):
"""
f(X), f(X)
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
res = self.X_factor.mult(self.X_factor)
par_res = self.X_par_factor.mult(self.X_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_4(self):
"""
f(X), f(Y)
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
self.Y_par_factor.setMaxDepth(i)
res = self.X_factor.mult(self.Y_factor)
par_res = self.X_par_factor.mult(self.Y_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
<|reserved_special_token_0|>
def par_test_6(self):
"""
f(X, Y) f(Y)
"""
for i in range(4):
self.Y_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.Y_factor)
par_res = self.XY_par_factor.mult(self.Y_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_7(self):
"""
f(X, Y) f(Z)
"""
for i in range(4):
self.Z_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.Z_factor)
par_res = self.XY_par_factor.mult(self.Z_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_8(self):
"""
f(X, Y) f(X, Y)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.XY_factor)
par_res = self.XY_par_factor.mult(self.XY_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_9(self):
"""
f(X, Y) F(X, Z)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XZ_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.XZ_factor)
par_res = self.XY_par_factor.mult(self.XZ_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_10(self):
"""
f(X, Y) f(Z, W)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.ZW_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.ZW_factor)
par_res = self.XY_par_factor.mult(self.ZW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_11(self):
"""
f(X, Y, Z) f(X, Y, Z)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XYZ_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.XYZ_factor)
par_res = self.XYZ_par_factor.mult(self.XYZ_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_12(self):
"""
f(X, Y, Z) f(X, Y, W)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XYW_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.XYW_factor)
par_res = self.XYZ_par_factor.mult(self.XYW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_13(self):
"""
f(X, Y, Z) f(X, K, W)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XKW_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.XKW_factor)
par_res = self.XYZ_par_factor.mult(self.XKW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_14(self):
"""
f(X, Y, Z) f(T, K, W)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.TKW_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.TKW_factor)
par_res = self.XYZ_par_factor.mult(self.TKW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestFactorMult(ParTestBase):
def __init__(self):
super().__init__()
def par_test_0(self):
"""
f(X), scalar
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
res = [self.X_factor.mult(self.scalar), self.X_factor.mult(self
.scalarf), self.scalarf.mult(self.X_factor)]
par_res = [self.X_par_factor.mult(self.scalar), self.
X_par_factor.mult(self.par_scalarf), self.par_scalarf.mult(
self.X_par_factor)]
for i, ele in enumerate(res):
assert ele.rand_vars == par_res[i
].rand_vars and ele.values == par_res[i].values
def par_test_1(self):
"""
f(X, Y), scalar
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = [self.XY_factor.mult(self.scalar), self.XY_factor.mult(
self.scalarf), self.scalarf.mult(self.XY_factor)]
par_res = [self.XY_par_factor.mult(self.scalar), self.
XY_par_factor.mult(self.par_scalarf), self.par_scalarf.mult
(self.XY_par_factor)]
for i, ele in enumerate(res):
assert ele.rand_vars == par_res[i
].rand_vars and ele.values == par_res[i].values
<|reserved_special_token_0|>
def par_test_3(self):
"""
f(X), f(X)
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
res = self.X_factor.mult(self.X_factor)
par_res = self.X_par_factor.mult(self.X_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_4(self):
"""
f(X), f(Y)
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
self.Y_par_factor.setMaxDepth(i)
res = self.X_factor.mult(self.Y_factor)
par_res = self.X_par_factor.mult(self.Y_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_5(self):
"""
f(X, Y) f(X)
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.X_factor)
par_res = self.XY_par_factor.mult(self.X_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_6(self):
"""
f(X, Y) f(Y)
"""
for i in range(4):
self.Y_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.Y_factor)
par_res = self.XY_par_factor.mult(self.Y_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_7(self):
"""
f(X, Y) f(Z)
"""
for i in range(4):
self.Z_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.Z_factor)
par_res = self.XY_par_factor.mult(self.Z_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_8(self):
"""
f(X, Y) f(X, Y)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.XY_factor)
par_res = self.XY_par_factor.mult(self.XY_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_9(self):
"""
f(X, Y) F(X, Z)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XZ_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.XZ_factor)
par_res = self.XY_par_factor.mult(self.XZ_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_10(self):
"""
f(X, Y) f(Z, W)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.ZW_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.ZW_factor)
par_res = self.XY_par_factor.mult(self.ZW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_11(self):
"""
f(X, Y, Z) f(X, Y, Z)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XYZ_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.XYZ_factor)
par_res = self.XYZ_par_factor.mult(self.XYZ_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_12(self):
"""
f(X, Y, Z) f(X, Y, W)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XYW_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.XYW_factor)
par_res = self.XYZ_par_factor.mult(self.XYW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_13(self):
"""
f(X, Y, Z) f(X, K, W)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XKW_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.XKW_factor)
par_res = self.XYZ_par_factor.mult(self.XKW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_14(self):
"""
f(X, Y, Z) f(T, K, W)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.TKW_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.TKW_factor)
par_res = self.XYZ_par_factor.mult(self.TKW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
<|reserved_special_token_1|>
from nose.tools import with_setup, nottest
from tests.par_test_base import ParTestBase
from ProbPy import RandVar, Factor, ParFactor
class TestFactorMult(ParTestBase):
def __init__(self):
super().__init__()
def par_test_0(self):
"""
f(X), scalar
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
res = [
self.X_factor.mult(self.scalar),
self.X_factor.mult(self.scalarf),
self.scalarf.mult(self.X_factor),
]
par_res = [
self.X_par_factor.mult(self.scalar),
self.X_par_factor.mult(self.par_scalarf),
self.par_scalarf.mult(self.X_par_factor),
]
for i, ele in enumerate(res):
assert (
ele.rand_vars == par_res[i].rand_vars
and ele.values == par_res[i].values
)
def par_test_1(self):
"""
f(X, Y), scalar
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = [
self.XY_factor.mult(self.scalar),
self.XY_factor.mult(self.scalarf),
self.scalarf.mult(self.XY_factor),
]
par_res = [
self.XY_par_factor.mult(self.scalar),
self.XY_par_factor.mult(self.par_scalarf),
self.par_scalarf.mult(self.XY_par_factor),
]
for i, ele in enumerate(res):
assert (
ele.rand_vars == par_res[i].rand_vars
and ele.values == par_res[i].values
)
def par_test_2(self):
"""
f(X, Y, Z), scalar
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XYZ_par_factor.setMaxDepth(i)
res = [
self.XYZ_factor.mult(self.scalar),
self.XYZ_factor.mult(self.scalarf),
self.scalarf.mult(self.XYZ_factor),
]
par_res = [
self.XYZ_par_factor.mult(self.scalar),
self.XYZ_par_factor.mult(self.par_scalarf),
self.par_scalarf.mult(self.XYZ_par_factor),
]
for i, ele in enumerate(res):
assert (
ele.rand_vars == par_res[i].rand_vars
and ele.values == par_res[i].values
)
def par_test_3(self):
"""
f(X), f(X)
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
res = self.X_factor.mult(self.X_factor)
par_res = self.X_par_factor.mult(self.X_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_4(self):
"""
f(X), f(Y)
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
self.Y_par_factor.setMaxDepth(i)
res = self.X_factor.mult(self.Y_factor)
par_res = self.X_par_factor.mult(self.Y_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_5(self):
"""
f(X, Y) f(X)
"""
for i in range(4):
self.X_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.X_factor)
par_res = self.XY_par_factor.mult(self.X_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_6(self):
"""
f(X, Y) f(Y)
"""
for i in range(4):
self.Y_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.Y_factor)
par_res = self.XY_par_factor.mult(self.Y_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_7(self):
"""
f(X, Y) f(Z)
"""
for i in range(4):
self.Z_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.Z_factor)
par_res = self.XY_par_factor.mult(self.Z_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_8(self):
"""
f(X, Y) f(X, Y)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XY_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.XY_factor)
par_res = self.XY_par_factor.mult(self.XY_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_9(self):
"""
f(X, Y) F(X, Z)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.XZ_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.XZ_factor)
par_res = self.XY_par_factor.mult(self.XZ_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_10(self):
"""
f(X, Y) f(Z, W)
"""
for i in range(4):
self.XY_par_factor.setMaxDepth(i)
self.ZW_par_factor.setMaxDepth(i)
res = self.XY_factor.mult(self.ZW_factor)
par_res = self.XY_par_factor.mult(self.ZW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_11(self):
"""
f(X, Y, Z) f(X, Y, Z)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XYZ_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.XYZ_factor)
par_res = self.XYZ_par_factor.mult(self.XYZ_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_12(self):
"""
f(X, Y, Z) f(X, Y, W)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XYW_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.XYW_factor)
par_res = self.XYZ_par_factor.mult(self.XYW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_13(self):
"""
f(X, Y, Z) f(X, K, W)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.XKW_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.XKW_factor)
par_res = self.XYZ_par_factor.mult(self.XKW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def par_test_14(self):
"""
f(X, Y, Z) f(T, K, W)
"""
for i in range(4):
self.XYZ_par_factor.setMaxDepth(i)
self.TKW_par_factor.setMaxDepth(i)
res = self.XYZ_factor.mult(self.TKW_factor)
par_res = self.XYZ_par_factor.mult(self.TKW_par_factor)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
|
flexible
|
{
"blob_id": "0aad96de65cc125e5c026dfd72a9cc9f4ebd3dd2",
"index": 6486,
"step-1": "<mask token>\n\n\nclass TestFactorMult(ParTestBase):\n\n def __init__(self):\n super().__init__()\n <mask token>\n\n def par_test_1(self):\n \"\"\"\n f(X, Y), scalar\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = [self.XY_factor.mult(self.scalar), self.XY_factor.mult(\n self.scalarf), self.scalarf.mult(self.XY_factor)]\n par_res = [self.XY_par_factor.mult(self.scalar), self.\n XY_par_factor.mult(self.par_scalarf), self.par_scalarf.mult\n (self.XY_par_factor)]\n for i, ele in enumerate(res):\n assert ele.rand_vars == par_res[i\n ].rand_vars and ele.values == par_res[i].values\n <mask token>\n\n def par_test_3(self):\n \"\"\"\n f(X), f(X)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n res = self.X_factor.mult(self.X_factor)\n par_res = self.X_par_factor.mult(self.X_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_4(self):\n \"\"\"\n f(X), f(Y)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n self.Y_par_factor.setMaxDepth(i)\n res = self.X_factor.mult(self.Y_factor)\n par_res = self.X_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n <mask token>\n\n def par_test_6(self):\n \"\"\"\n f(X, Y) f(Y)\n \"\"\"\n for i in range(4):\n self.Y_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.Y_factor)\n par_res = self.XY_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_7(self):\n \"\"\"\n f(X, Y) f(Z)\n \"\"\"\n for i in range(4):\n self.Z_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.Z_factor)\n par_res = self.XY_par_factor.mult(self.Z_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_8(self):\n \"\"\"\n f(X, Y) f(X, Y)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.XY_factor)\n par_res = self.XY_par_factor.mult(self.XY_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_9(self):\n \"\"\"\n f(X, Y) F(X, Z)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XZ_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.XZ_factor)\n par_res = self.XY_par_factor.mult(self.XZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_10(self):\n \"\"\"\n f(X, Y) f(Z, W)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.ZW_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.ZW_factor)\n par_res = self.XY_par_factor.mult(self.ZW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_11(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, Z)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYZ_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XYZ_factor)\n par_res = self.XYZ_par_factor.mult(self.XYZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n <mask token>\n\n def par_test_13(self):\n \"\"\"\n f(X, Y, Z) f(X, K, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XKW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XKW_factor)\n par_res = self.XYZ_par_factor.mult(self.XKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestFactorMult(ParTestBase):\n\n def __init__(self):\n super().__init__()\n <mask token>\n\n def par_test_1(self):\n \"\"\"\n f(X, Y), scalar\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = [self.XY_factor.mult(self.scalar), self.XY_factor.mult(\n self.scalarf), self.scalarf.mult(self.XY_factor)]\n par_res = [self.XY_par_factor.mult(self.scalar), self.\n XY_par_factor.mult(self.par_scalarf), self.par_scalarf.mult\n (self.XY_par_factor)]\n for i, ele in enumerate(res):\n assert ele.rand_vars == par_res[i\n ].rand_vars and ele.values == par_res[i].values\n <mask token>\n\n def par_test_3(self):\n \"\"\"\n f(X), f(X)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n res = self.X_factor.mult(self.X_factor)\n par_res = self.X_par_factor.mult(self.X_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_4(self):\n \"\"\"\n f(X), f(Y)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n self.Y_par_factor.setMaxDepth(i)\n res = self.X_factor.mult(self.Y_factor)\n par_res = self.X_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n <mask token>\n\n def par_test_6(self):\n \"\"\"\n f(X, Y) f(Y)\n \"\"\"\n for i in range(4):\n self.Y_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.Y_factor)\n par_res = self.XY_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_7(self):\n \"\"\"\n f(X, Y) f(Z)\n \"\"\"\n for i in range(4):\n self.Z_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.Z_factor)\n par_res = self.XY_par_factor.mult(self.Z_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_8(self):\n \"\"\"\n f(X, Y) f(X, Y)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.XY_factor)\n par_res = self.XY_par_factor.mult(self.XY_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_9(self):\n \"\"\"\n f(X, Y) F(X, Z)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XZ_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.XZ_factor)\n par_res = self.XY_par_factor.mult(self.XZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_10(self):\n \"\"\"\n f(X, Y) f(Z, W)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.ZW_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.ZW_factor)\n par_res = self.XY_par_factor.mult(self.ZW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_11(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, Z)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYZ_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XYZ_factor)\n par_res = self.XYZ_par_factor.mult(self.XYZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_12(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XYW_factor)\n par_res = self.XYZ_par_factor.mult(self.XYW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_13(self):\n \"\"\"\n f(X, Y, Z) f(X, K, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XKW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XKW_factor)\n par_res = self.XYZ_par_factor.mult(self.XKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_14(self):\n \"\"\"\n f(X, Y, Z) f(T, K, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.TKW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.TKW_factor)\n par_res = self.XYZ_par_factor.mult(self.TKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n",
"step-3": "<mask token>\n\n\nclass TestFactorMult(ParTestBase):\n\n def __init__(self):\n super().__init__()\n\n def par_test_0(self):\n \"\"\"\n f(X), scalar\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n res = [self.X_factor.mult(self.scalar), self.X_factor.mult(self\n .scalarf), self.scalarf.mult(self.X_factor)]\n par_res = [self.X_par_factor.mult(self.scalar), self.\n X_par_factor.mult(self.par_scalarf), self.par_scalarf.mult(\n self.X_par_factor)]\n for i, ele in enumerate(res):\n assert ele.rand_vars == par_res[i\n ].rand_vars and ele.values == par_res[i].values\n\n def par_test_1(self):\n \"\"\"\n f(X, Y), scalar\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = [self.XY_factor.mult(self.scalar), self.XY_factor.mult(\n self.scalarf), self.scalarf.mult(self.XY_factor)]\n par_res = [self.XY_par_factor.mult(self.scalar), self.\n XY_par_factor.mult(self.par_scalarf), self.par_scalarf.mult\n (self.XY_par_factor)]\n for i, ele in enumerate(res):\n assert ele.rand_vars == par_res[i\n ].rand_vars and ele.values == par_res[i].values\n <mask token>\n\n def par_test_3(self):\n \"\"\"\n f(X), f(X)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n res = self.X_factor.mult(self.X_factor)\n par_res = self.X_par_factor.mult(self.X_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_4(self):\n \"\"\"\n f(X), f(Y)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n self.Y_par_factor.setMaxDepth(i)\n res = self.X_factor.mult(self.Y_factor)\n par_res = self.X_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n <mask token>\n\n def par_test_6(self):\n \"\"\"\n f(X, Y) f(Y)\n \"\"\"\n for i in range(4):\n self.Y_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.Y_factor)\n par_res = self.XY_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_7(self):\n \"\"\"\n f(X, Y) f(Z)\n \"\"\"\n for i in range(4):\n self.Z_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.Z_factor)\n par_res = self.XY_par_factor.mult(self.Z_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_8(self):\n \"\"\"\n f(X, Y) f(X, Y)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.XY_factor)\n par_res = self.XY_par_factor.mult(self.XY_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_9(self):\n \"\"\"\n f(X, Y) F(X, Z)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XZ_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.XZ_factor)\n par_res = self.XY_par_factor.mult(self.XZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_10(self):\n \"\"\"\n f(X, Y) f(Z, W)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.ZW_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.ZW_factor)\n par_res = self.XY_par_factor.mult(self.ZW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_11(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, Z)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYZ_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XYZ_factor)\n par_res = self.XYZ_par_factor.mult(self.XYZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_12(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XYW_factor)\n par_res = self.XYZ_par_factor.mult(self.XYW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_13(self):\n \"\"\"\n f(X, Y, Z) f(X, K, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XKW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XKW_factor)\n par_res = self.XYZ_par_factor.mult(self.XKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_14(self):\n \"\"\"\n f(X, Y, Z) f(T, K, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.TKW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.TKW_factor)\n par_res = self.XYZ_par_factor.mult(self.TKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n",
"step-4": "<mask token>\n\n\nclass TestFactorMult(ParTestBase):\n\n def __init__(self):\n super().__init__()\n\n def par_test_0(self):\n \"\"\"\n f(X), scalar\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n res = [self.X_factor.mult(self.scalar), self.X_factor.mult(self\n .scalarf), self.scalarf.mult(self.X_factor)]\n par_res = [self.X_par_factor.mult(self.scalar), self.\n X_par_factor.mult(self.par_scalarf), self.par_scalarf.mult(\n self.X_par_factor)]\n for i, ele in enumerate(res):\n assert ele.rand_vars == par_res[i\n ].rand_vars and ele.values == par_res[i].values\n\n def par_test_1(self):\n \"\"\"\n f(X, Y), scalar\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = [self.XY_factor.mult(self.scalar), self.XY_factor.mult(\n self.scalarf), self.scalarf.mult(self.XY_factor)]\n par_res = [self.XY_par_factor.mult(self.scalar), self.\n XY_par_factor.mult(self.par_scalarf), self.par_scalarf.mult\n (self.XY_par_factor)]\n for i, ele in enumerate(res):\n assert ele.rand_vars == par_res[i\n ].rand_vars and ele.values == par_res[i].values\n <mask token>\n\n def par_test_3(self):\n \"\"\"\n f(X), f(X)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n res = self.X_factor.mult(self.X_factor)\n par_res = self.X_par_factor.mult(self.X_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_4(self):\n \"\"\"\n f(X), f(Y)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n self.Y_par_factor.setMaxDepth(i)\n res = self.X_factor.mult(self.Y_factor)\n par_res = self.X_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_5(self):\n \"\"\"\n f(X, Y) f(X)\n \"\"\"\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.X_factor)\n par_res = self.XY_par_factor.mult(self.X_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_6(self):\n \"\"\"\n f(X, Y) f(Y)\n \"\"\"\n for i in range(4):\n self.Y_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.Y_factor)\n par_res = self.XY_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_7(self):\n \"\"\"\n f(X, Y) f(Z)\n \"\"\"\n for i in range(4):\n self.Z_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.Z_factor)\n par_res = self.XY_par_factor.mult(self.Z_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_8(self):\n \"\"\"\n f(X, Y) f(X, Y)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.XY_factor)\n par_res = self.XY_par_factor.mult(self.XY_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_9(self):\n \"\"\"\n f(X, Y) F(X, Z)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XZ_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.XZ_factor)\n par_res = self.XY_par_factor.mult(self.XZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_10(self):\n \"\"\"\n f(X, Y) f(Z, W)\n \"\"\"\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.ZW_par_factor.setMaxDepth(i)\n res = self.XY_factor.mult(self.ZW_factor)\n par_res = self.XY_par_factor.mult(self.ZW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_11(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, Z)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYZ_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XYZ_factor)\n par_res = self.XYZ_par_factor.mult(self.XYZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_12(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XYW_factor)\n par_res = self.XYZ_par_factor.mult(self.XYW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_13(self):\n \"\"\"\n f(X, Y, Z) f(X, K, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XKW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.XKW_factor)\n par_res = self.XYZ_par_factor.mult(self.XKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_14(self):\n \"\"\"\n f(X, Y, Z) f(T, K, W)\n \"\"\"\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.TKW_par_factor.setMaxDepth(i)\n res = self.XYZ_factor.mult(self.TKW_factor)\n par_res = self.XYZ_par_factor.mult(self.TKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n",
"step-5": "from nose.tools import with_setup, nottest\n\nfrom tests.par_test_base import ParTestBase\nfrom ProbPy import RandVar, Factor, ParFactor\n\n\nclass TestFactorMult(ParTestBase):\n def __init__(self):\n super().__init__()\n\n def par_test_0(self):\n \"\"\"\n f(X), scalar\n \"\"\"\n\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n\n res = [\n self.X_factor.mult(self.scalar),\n self.X_factor.mult(self.scalarf),\n self.scalarf.mult(self.X_factor),\n ]\n\n par_res = [\n self.X_par_factor.mult(self.scalar),\n self.X_par_factor.mult(self.par_scalarf),\n self.par_scalarf.mult(self.X_par_factor),\n ]\n\n for i, ele in enumerate(res):\n assert (\n ele.rand_vars == par_res[i].rand_vars\n and ele.values == par_res[i].values\n )\n\n def par_test_1(self):\n \"\"\"\n f(X, Y), scalar\n \"\"\"\n\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n\n res = [\n self.XY_factor.mult(self.scalar),\n self.XY_factor.mult(self.scalarf),\n self.scalarf.mult(self.XY_factor),\n ]\n\n par_res = [\n self.XY_par_factor.mult(self.scalar),\n self.XY_par_factor.mult(self.par_scalarf),\n self.par_scalarf.mult(self.XY_par_factor),\n ]\n\n for i, ele in enumerate(res):\n assert (\n ele.rand_vars == par_res[i].rand_vars\n and ele.values == par_res[i].values\n )\n\n def par_test_2(self):\n \"\"\"\n f(X, Y, Z), scalar\n \"\"\"\n\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYZ_par_factor.setMaxDepth(i)\n\n res = [\n self.XYZ_factor.mult(self.scalar),\n self.XYZ_factor.mult(self.scalarf),\n self.scalarf.mult(self.XYZ_factor),\n ]\n\n par_res = [\n self.XYZ_par_factor.mult(self.scalar),\n self.XYZ_par_factor.mult(self.par_scalarf),\n self.par_scalarf.mult(self.XYZ_par_factor),\n ]\n\n for i, ele in enumerate(res):\n assert (\n ele.rand_vars == par_res[i].rand_vars\n and ele.values == par_res[i].values\n )\n\n def par_test_3(self):\n \"\"\"\n f(X), f(X)\n \"\"\"\n\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n\n res = self.X_factor.mult(self.X_factor)\n par_res = self.X_par_factor.mult(self.X_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_4(self):\n \"\"\"\n f(X), f(Y)\n \"\"\"\n\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n self.Y_par_factor.setMaxDepth(i)\n\n res = self.X_factor.mult(self.Y_factor)\n par_res = self.X_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_5(self):\n \"\"\"\n f(X, Y) f(X)\n \"\"\"\n\n for i in range(4):\n self.X_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n\n res = self.XY_factor.mult(self.X_factor)\n par_res = self.XY_par_factor.mult(self.X_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_6(self):\n \"\"\"\n f(X, Y) f(Y)\n \"\"\"\n\n for i in range(4):\n self.Y_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n\n res = self.XY_factor.mult(self.Y_factor)\n par_res = self.XY_par_factor.mult(self.Y_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_7(self):\n \"\"\"\n f(X, Y) f(Z)\n \"\"\"\n\n for i in range(4):\n self.Z_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n\n res = self.XY_factor.mult(self.Z_factor)\n par_res = self.XY_par_factor.mult(self.Z_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_8(self):\n \"\"\"\n f(X, Y) f(X, Y)\n \"\"\"\n\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XY_par_factor.setMaxDepth(i)\n\n res = self.XY_factor.mult(self.XY_factor)\n par_res = self.XY_par_factor.mult(self.XY_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_9(self):\n \"\"\"\n f(X, Y) F(X, Z)\n \"\"\"\n\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.XZ_par_factor.setMaxDepth(i)\n\n res = self.XY_factor.mult(self.XZ_factor)\n par_res = self.XY_par_factor.mult(self.XZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_10(self):\n \"\"\"\n f(X, Y) f(Z, W)\n \"\"\"\n\n for i in range(4):\n self.XY_par_factor.setMaxDepth(i)\n self.ZW_par_factor.setMaxDepth(i)\n\n res = self.XY_factor.mult(self.ZW_factor)\n par_res = self.XY_par_factor.mult(self.ZW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_11(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, Z)\n \"\"\"\n\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYZ_par_factor.setMaxDepth(i)\n\n res = self.XYZ_factor.mult(self.XYZ_factor)\n par_res = self.XYZ_par_factor.mult(self.XYZ_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_12(self):\n \"\"\"\n f(X, Y, Z) f(X, Y, W)\n \"\"\"\n\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XYW_par_factor.setMaxDepth(i)\n\n res = self.XYZ_factor.mult(self.XYW_factor)\n par_res = self.XYZ_par_factor.mult(self.XYW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_13(self):\n \"\"\"\n f(X, Y, Z) f(X, K, W)\n \"\"\"\n\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.XKW_par_factor.setMaxDepth(i)\n\n res = self.XYZ_factor.mult(self.XKW_factor)\n par_res = self.XYZ_par_factor.mult(self.XKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n\n def par_test_14(self):\n \"\"\"\n f(X, Y, Z) f(T, K, W)\n \"\"\"\n\n for i in range(4):\n self.XYZ_par_factor.setMaxDepth(i)\n self.TKW_par_factor.setMaxDepth(i)\n\n res = self.XYZ_factor.mult(self.TKW_factor)\n par_res = self.XYZ_par_factor.mult(self.TKW_par_factor)\n assert res.rand_vars == par_res.rand_vars and res.values == par_res.values\n",
"step-ids": [
12,
14,
15,
16,
19
]
}
|
[
12,
14,
15,
16,
19
] |
def sieve(limit):
numbers = list(range(3, limit, 2))
for prime in numbers:
for multiplier in reversed(range(2, limit)):
try:
numbers.remove(prime * multiplier)
except ValueError:
pass
return [2] + numbers
|
normal
|
{
"blob_id": "ec7ca03f627eaa635aac56e302b9c40bf0a3da38",
"index": 1796,
"step-1": "<mask token>\n",
"step-2": "def sieve(limit):\n numbers = list(range(3, limit, 2))\n for prime in numbers:\n for multiplier in reversed(range(2, limit)):\n try:\n numbers.remove(prime * multiplier)\n except ValueError:\n pass\n return [2] + numbers\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def isVPS(phrase):
testlist = []
for char in phrase:
if char == '(':
testlist.append(char)
elif len(testlist) == 0:
return 'NO'
else:
testlist.pop()
if len(testlist) == 0:
return 'YES'
else:
return 'NO'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def isVPS(phrase):
testlist = []
for char in phrase:
if char == '(':
testlist.append(char)
elif len(testlist) == 0:
return 'NO'
else:
testlist.pop()
if len(testlist) == 0:
return 'YES'
else:
return 'NO'
for string in strings:
print(isVPS(string))
<|reserved_special_token_1|>
strings = ['(())())', '(((()())()', '(()())((()))', '((()()(()))(((())))()',
'()()()()(()()())()', '(()((())()(']
<|reserved_special_token_0|>
def isVPS(phrase):
testlist = []
for char in phrase:
if char == '(':
testlist.append(char)
elif len(testlist) == 0:
return 'NO'
else:
testlist.pop()
if len(testlist) == 0:
return 'YES'
else:
return 'NO'
for string in strings:
print(isVPS(string))
<|reserved_special_token_1|>
strings = ['(())())', '(((()())()', '(()())((()))', '((()()(()))(((())))()', '()()()()(()()())()', '(()((())()(']
#print(string[0])
'''
for i in string:
testlist = []
for j in string[i]:
if j == ')':
if
'''
def isVPS(phrase):
testlist = []
for char in phrase:
if char == '(':
testlist.append(char)
else:
if len(testlist) == 0:
#return False
return 'NO'
else:
testlist.pop()
if len(testlist) == 0:
#return True
return 'YES'
else:
#return False
return 'NO'
for string in strings:
print(isVPS(string))
#print(isVPS(string[0]))
|
flexible
|
{
"blob_id": "d9f055301f050eea4281ce418974546c1245ac7e",
"index": 4621,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef isVPS(phrase):\n testlist = []\n for char in phrase:\n if char == '(':\n testlist.append(char)\n elif len(testlist) == 0:\n return 'NO'\n else:\n testlist.pop()\n if len(testlist) == 0:\n return 'YES'\n else:\n return 'NO'\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef isVPS(phrase):\n testlist = []\n for char in phrase:\n if char == '(':\n testlist.append(char)\n elif len(testlist) == 0:\n return 'NO'\n else:\n testlist.pop()\n if len(testlist) == 0:\n return 'YES'\n else:\n return 'NO'\n\n\nfor string in strings:\n print(isVPS(string))\n",
"step-4": "strings = ['(())())', '(((()())()', '(()())((()))', '((()()(()))(((())))()',\n '()()()()(()()())()', '(()((())()(']\n<mask token>\n\n\ndef isVPS(phrase):\n testlist = []\n for char in phrase:\n if char == '(':\n testlist.append(char)\n elif len(testlist) == 0:\n return 'NO'\n else:\n testlist.pop()\n if len(testlist) == 0:\n return 'YES'\n else:\n return 'NO'\n\n\nfor string in strings:\n print(isVPS(string))\n",
"step-5": "strings = ['(())())', '(((()())()', '(()())((()))', '((()()(()))(((())))()', '()()()()(()()())()', '(()((())()(']\n\n#print(string[0])\n'''\nfor i in string:\n testlist = []\n for j in string[i]:\n if j == ')':\n if \n'''\n\ndef isVPS(phrase):\n testlist = []\n for char in phrase:\n if char == '(':\n testlist.append(char)\n else:\n if len(testlist) == 0:\n #return False\n return 'NO'\n else:\n testlist.pop()\n if len(testlist) == 0:\n #return True\n return 'YES'\n else:\n #return False\n return 'NO'\n\nfor string in strings:\n print(isVPS(string))\n#print(isVPS(string[0]))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class MilkingListView(ListView):
<|reserved_special_token_0|>
def get_queryset(self, *args, **kwargs):
try:
animal = Animal.objects.get(self.kwargs.get('slug', None))
qs = Milking.objects.filter(animal__primary_breed__genus__slug=
self.kwargs['genus_slug'], animal__primary_breed__slug=self
.kwargs['breed_slug'], animal__slug=self.kwargs['slug'])
except:
try:
qs = Milking.objects.filter(animal__primary_breed__genus__slug
=self.kwargs['genus_slug'], animal__primary_breed__slug
=self.kwargs['breed_slug'], animal__uuid__contains=self
.kwargs['slug'])
except:
qs = None
return qs
class AnimalDetailView(DetailView):
model = Animal
def get_queryset(self, *args, **kwargs):
return Animal.objects.filter(primary_breed__genus__slug=self.kwargs
['genus_slug'], primary_breed__slug=self.kwargs['breed_slug'])
def get_object(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
pk = self.kwargs.get('pk', None)
slug_or_uuid = self.kwargs.get('slug', None)
if pk is not None:
queryset = queryset.filter(pk=pk)
elif slug_or_uuid is not None:
try:
obj = queryset.filter(uuid__contains=slug_or_uuid)[0]
except:
try:
obj = queryset.get(slug=slug_or_uuid)
except ObjectDoesNotExist:
raise Http404(_(
u'No %(verbose_name)s found matching the query') %
{'verbose_name': queryset.model._meta.verbose_name})
else:
raise AttributeError(
u'Generic detail view %s must be called with either an object pk or a slug.'
% self.__class__.__name__)
return obj
def get_context_data(self, **kwargs):
context = super(AnimalDetailView, self).get_context_data(**kwargs)
context['note_form'] = BriefNoteForm()
return context
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ProductDetailView(DetailView):
<|reserved_special_token_0|>
def get_queryset(self, *args, **kwargs):
return Product.objects.filter(type__slug=self.kwargs['type_slug'])
class MilkingListView(ListView):
model = Animal
def get_queryset(self, *args, **kwargs):
try:
animal = Animal.objects.get(self.kwargs.get('slug', None))
qs = Milking.objects.filter(animal__primary_breed__genus__slug=
self.kwargs['genus_slug'], animal__primary_breed__slug=self
.kwargs['breed_slug'], animal__slug=self.kwargs['slug'])
except:
try:
qs = Milking.objects.filter(animal__primary_breed__genus__slug
=self.kwargs['genus_slug'], animal__primary_breed__slug
=self.kwargs['breed_slug'], animal__uuid__contains=self
.kwargs['slug'])
except:
qs = None
return qs
class AnimalDetailView(DetailView):
model = Animal
def get_queryset(self, *args, **kwargs):
return Animal.objects.filter(primary_breed__genus__slug=self.kwargs
['genus_slug'], primary_breed__slug=self.kwargs['breed_slug'])
def get_object(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
pk = self.kwargs.get('pk', None)
slug_or_uuid = self.kwargs.get('slug', None)
if pk is not None:
queryset = queryset.filter(pk=pk)
elif slug_or_uuid is not None:
try:
obj = queryset.filter(uuid__contains=slug_or_uuid)[0]
except:
try:
obj = queryset.get(slug=slug_or_uuid)
except ObjectDoesNotExist:
raise Http404(_(
u'No %(verbose_name)s found matching the query') %
{'verbose_name': queryset.model._meta.verbose_name})
else:
raise AttributeError(
u'Generic detail view %s must be called with either an object pk or a slug.'
% self.__class__.__name__)
return obj
def get_context_data(self, **kwargs):
context = super(AnimalDetailView, self).get_context_data(**kwargs)
context['note_form'] = BriefNoteForm()
return context
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BreedDetailView(DetailView):
<|reserved_special_token_0|>
def get_queryset(self, *args, **kwargs):
return Breed.objects.filter(genus__slug=self.kwargs['genus_slug'])
class ProductDetailView(DetailView):
model = Product
def get_queryset(self, *args, **kwargs):
return Product.objects.filter(type__slug=self.kwargs['type_slug'])
class MilkingListView(ListView):
model = Animal
def get_queryset(self, *args, **kwargs):
try:
animal = Animal.objects.get(self.kwargs.get('slug', None))
qs = Milking.objects.filter(animal__primary_breed__genus__slug=
self.kwargs['genus_slug'], animal__primary_breed__slug=self
.kwargs['breed_slug'], animal__slug=self.kwargs['slug'])
except:
try:
qs = Milking.objects.filter(animal__primary_breed__genus__slug
=self.kwargs['genus_slug'], animal__primary_breed__slug
=self.kwargs['breed_slug'], animal__uuid__contains=self
.kwargs['slug'])
except:
qs = None
return qs
class AnimalDetailView(DetailView):
model = Animal
def get_queryset(self, *args, **kwargs):
return Animal.objects.filter(primary_breed__genus__slug=self.kwargs
['genus_slug'], primary_breed__slug=self.kwargs['breed_slug'])
def get_object(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
pk = self.kwargs.get('pk', None)
slug_or_uuid = self.kwargs.get('slug', None)
if pk is not None:
queryset = queryset.filter(pk=pk)
elif slug_or_uuid is not None:
try:
obj = queryset.filter(uuid__contains=slug_or_uuid)[0]
except:
try:
obj = queryset.get(slug=slug_or_uuid)
except ObjectDoesNotExist:
raise Http404(_(
u'No %(verbose_name)s found matching the query') %
{'verbose_name': queryset.model._meta.verbose_name})
else:
raise AttributeError(
u'Generic detail view %s must be called with either an object pk or a slug.'
% self.__class__.__name__)
return obj
def get_context_data(self, **kwargs):
context = super(AnimalDetailView, self).get_context_data(**kwargs)
context['note_form'] = BriefNoteForm()
return context
<|reserved_special_token_1|>
import datetime
from django.views.generic import DetailView, ListView
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponseRedirect, Http404
from django.shortcuts import get_list_or_404, render_to_response, get_object_or_404
from django.template import RequestContext
from django.core.urlresolvers import reverse
from django.conf import settings
from farm.models import Animal, Breed, Product, Milking
from notes.forms import BriefNoteForm
class BreedDetailView(DetailView):
model = Breed
def get_queryset(self, *args, **kwargs):
return Breed.objects.filter(genus__slug=self.kwargs['genus_slug'])
class ProductDetailView(DetailView):
model = Product
def get_queryset(self, *args, **kwargs):
return Product.objects.filter(type__slug=self.kwargs['type_slug'])
class MilkingListView(ListView):
model = Animal
def get_queryset(self, *args, **kwargs):
try:
animal = Animal.objects.get(self.kwargs.get('slug', None))
qs = Milking.objects.filter(animal__primary_breed__genus__slug=
self.kwargs['genus_slug'], animal__primary_breed__slug=self
.kwargs['breed_slug'], animal__slug=self.kwargs['slug'])
except:
try:
qs = Milking.objects.filter(animal__primary_breed__genus__slug
=self.kwargs['genus_slug'], animal__primary_breed__slug
=self.kwargs['breed_slug'], animal__uuid__contains=self
.kwargs['slug'])
except:
qs = None
return qs
class AnimalDetailView(DetailView):
model = Animal
def get_queryset(self, *args, **kwargs):
return Animal.objects.filter(primary_breed__genus__slug=self.kwargs
['genus_slug'], primary_breed__slug=self.kwargs['breed_slug'])
def get_object(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
pk = self.kwargs.get('pk', None)
slug_or_uuid = self.kwargs.get('slug', None)
if pk is not None:
queryset = queryset.filter(pk=pk)
elif slug_or_uuid is not None:
try:
obj = queryset.filter(uuid__contains=slug_or_uuid)[0]
except:
try:
obj = queryset.get(slug=slug_or_uuid)
except ObjectDoesNotExist:
raise Http404(_(
u'No %(verbose_name)s found matching the query') %
{'verbose_name': queryset.model._meta.verbose_name})
else:
raise AttributeError(
u'Generic detail view %s must be called with either an object pk or a slug.'
% self.__class__.__name__)
return obj
def get_context_data(self, **kwargs):
context = super(AnimalDetailView, self).get_context_data(**kwargs)
context['note_form'] = BriefNoteForm()
return context
<|reserved_special_token_1|>
import datetime
from django.views.generic import DetailView, ListView
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponseRedirect, Http404
from django.shortcuts import get_list_or_404, render_to_response, get_object_or_404
from django.template import RequestContext
from django.core.urlresolvers import reverse
from django.conf import settings
from farm.models import Animal, Breed, Product, Milking
from notes.forms import BriefNoteForm
class BreedDetailView(DetailView):
model = Breed
def get_queryset(self, *args, **kwargs):
return Breed.objects.filter(genus__slug=self.kwargs['genus_slug'])
class ProductDetailView(DetailView):
model = Product
def get_queryset(self, *args, **kwargs):
return Product.objects.filter(type__slug=self.kwargs['type_slug'])
class MilkingListView(ListView):
model = Animal
def get_queryset(self, *args, **kwargs):
try:
animal = Animal.objects.get(self.kwargs.get('slug', None))
qs = Milking.objects.filter(animal__primary_breed__genus__slug=self.kwargs['genus_slug'], animal__primary_breed__slug=self.kwargs['breed_slug'], animal__slug=self.kwargs['slug'])
except:
try:
qs = Milking.objects.filter(animal__primary_breed__genus__slug=self.kwargs['genus_slug'], animal__primary_breed__slug=self.kwargs['breed_slug'], animal__uuid__contains=self.kwargs['slug'])
except:
qs = None
return qs
class AnimalDetailView(DetailView):
model = Animal
def get_queryset(self, *args, **kwargs):
return Animal.objects.filter(primary_breed__genus__slug=self.kwargs['genus_slug'], primary_breed__slug=self.kwargs['breed_slug'])
def get_object(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
pk = self.kwargs.get('pk', None)
slug_or_uuid = self.kwargs.get('slug', None)
if pk is not None:
queryset = queryset.filter(pk=pk)
# Next, try looking up by slug.
elif slug_or_uuid is not None:
try:
obj = queryset.filter(uuid__contains=slug_or_uuid)[0]
except:
try:
obj = queryset.get(slug=slug_or_uuid)
except ObjectDoesNotExist:
raise Http404(_(u"No %(verbose_name)s found matching the query") %
{'verbose_name': queryset.model._meta.verbose_name})
# If none of those are defined, it's an error.
else:
raise AttributeError(u"Generic detail view %s must be called with "
u"either an object pk or a slug."
% self.__class__.__name__)
return obj
def get_context_data(self, **kwargs):
context = super(AnimalDetailView, self).get_context_data(**kwargs)
context['note_form'] = BriefNoteForm()
return context
|
flexible
|
{
"blob_id": "3ecc9ce82d9c902958a4da51ce7ee3c39b064b2b",
"index": 3591,
"step-1": "<mask token>\n\n\nclass MilkingListView(ListView):\n <mask token>\n\n def get_queryset(self, *args, **kwargs):\n try:\n animal = Animal.objects.get(self.kwargs.get('slug', None))\n qs = Milking.objects.filter(animal__primary_breed__genus__slug=\n self.kwargs['genus_slug'], animal__primary_breed__slug=self\n .kwargs['breed_slug'], animal__slug=self.kwargs['slug'])\n except:\n try:\n qs = Milking.objects.filter(animal__primary_breed__genus__slug\n =self.kwargs['genus_slug'], animal__primary_breed__slug\n =self.kwargs['breed_slug'], animal__uuid__contains=self\n .kwargs['slug'])\n except:\n qs = None\n return qs\n\n\nclass AnimalDetailView(DetailView):\n model = Animal\n\n def get_queryset(self, *args, **kwargs):\n return Animal.objects.filter(primary_breed__genus__slug=self.kwargs\n ['genus_slug'], primary_breed__slug=self.kwargs['breed_slug'])\n\n def get_object(self, queryset=None):\n if queryset is None:\n queryset = self.get_queryset()\n pk = self.kwargs.get('pk', None)\n slug_or_uuid = self.kwargs.get('slug', None)\n if pk is not None:\n queryset = queryset.filter(pk=pk)\n elif slug_or_uuid is not None:\n try:\n obj = queryset.filter(uuid__contains=slug_or_uuid)[0]\n except:\n try:\n obj = queryset.get(slug=slug_or_uuid)\n except ObjectDoesNotExist:\n raise Http404(_(\n u'No %(verbose_name)s found matching the query') %\n {'verbose_name': queryset.model._meta.verbose_name})\n else:\n raise AttributeError(\n u'Generic detail view %s must be called with either an object pk or a slug.'\n % self.__class__.__name__)\n return obj\n\n def get_context_data(self, **kwargs):\n context = super(AnimalDetailView, self).get_context_data(**kwargs)\n context['note_form'] = BriefNoteForm()\n return context\n",
"step-2": "<mask token>\n\n\nclass ProductDetailView(DetailView):\n <mask token>\n\n def get_queryset(self, *args, **kwargs):\n return Product.objects.filter(type__slug=self.kwargs['type_slug'])\n\n\nclass MilkingListView(ListView):\n model = Animal\n\n def get_queryset(self, *args, **kwargs):\n try:\n animal = Animal.objects.get(self.kwargs.get('slug', None))\n qs = Milking.objects.filter(animal__primary_breed__genus__slug=\n self.kwargs['genus_slug'], animal__primary_breed__slug=self\n .kwargs['breed_slug'], animal__slug=self.kwargs['slug'])\n except:\n try:\n qs = Milking.objects.filter(animal__primary_breed__genus__slug\n =self.kwargs['genus_slug'], animal__primary_breed__slug\n =self.kwargs['breed_slug'], animal__uuid__contains=self\n .kwargs['slug'])\n except:\n qs = None\n return qs\n\n\nclass AnimalDetailView(DetailView):\n model = Animal\n\n def get_queryset(self, *args, **kwargs):\n return Animal.objects.filter(primary_breed__genus__slug=self.kwargs\n ['genus_slug'], primary_breed__slug=self.kwargs['breed_slug'])\n\n def get_object(self, queryset=None):\n if queryset is None:\n queryset = self.get_queryset()\n pk = self.kwargs.get('pk', None)\n slug_or_uuid = self.kwargs.get('slug', None)\n if pk is not None:\n queryset = queryset.filter(pk=pk)\n elif slug_or_uuid is not None:\n try:\n obj = queryset.filter(uuid__contains=slug_or_uuid)[0]\n except:\n try:\n obj = queryset.get(slug=slug_or_uuid)\n except ObjectDoesNotExist:\n raise Http404(_(\n u'No %(verbose_name)s found matching the query') %\n {'verbose_name': queryset.model._meta.verbose_name})\n else:\n raise AttributeError(\n u'Generic detail view %s must be called with either an object pk or a slug.'\n % self.__class__.__name__)\n return obj\n\n def get_context_data(self, **kwargs):\n context = super(AnimalDetailView, self).get_context_data(**kwargs)\n context['note_form'] = BriefNoteForm()\n return context\n",
"step-3": "<mask token>\n\n\nclass BreedDetailView(DetailView):\n <mask token>\n\n def get_queryset(self, *args, **kwargs):\n return Breed.objects.filter(genus__slug=self.kwargs['genus_slug'])\n\n\nclass ProductDetailView(DetailView):\n model = Product\n\n def get_queryset(self, *args, **kwargs):\n return Product.objects.filter(type__slug=self.kwargs['type_slug'])\n\n\nclass MilkingListView(ListView):\n model = Animal\n\n def get_queryset(self, *args, **kwargs):\n try:\n animal = Animal.objects.get(self.kwargs.get('slug', None))\n qs = Milking.objects.filter(animal__primary_breed__genus__slug=\n self.kwargs['genus_slug'], animal__primary_breed__slug=self\n .kwargs['breed_slug'], animal__slug=self.kwargs['slug'])\n except:\n try:\n qs = Milking.objects.filter(animal__primary_breed__genus__slug\n =self.kwargs['genus_slug'], animal__primary_breed__slug\n =self.kwargs['breed_slug'], animal__uuid__contains=self\n .kwargs['slug'])\n except:\n qs = None\n return qs\n\n\nclass AnimalDetailView(DetailView):\n model = Animal\n\n def get_queryset(self, *args, **kwargs):\n return Animal.objects.filter(primary_breed__genus__slug=self.kwargs\n ['genus_slug'], primary_breed__slug=self.kwargs['breed_slug'])\n\n def get_object(self, queryset=None):\n if queryset is None:\n queryset = self.get_queryset()\n pk = self.kwargs.get('pk', None)\n slug_or_uuid = self.kwargs.get('slug', None)\n if pk is not None:\n queryset = queryset.filter(pk=pk)\n elif slug_or_uuid is not None:\n try:\n obj = queryset.filter(uuid__contains=slug_or_uuid)[0]\n except:\n try:\n obj = queryset.get(slug=slug_or_uuid)\n except ObjectDoesNotExist:\n raise Http404(_(\n u'No %(verbose_name)s found matching the query') %\n {'verbose_name': queryset.model._meta.verbose_name})\n else:\n raise AttributeError(\n u'Generic detail view %s must be called with either an object pk or a slug.'\n % self.__class__.__name__)\n return obj\n\n def get_context_data(self, **kwargs):\n context = super(AnimalDetailView, self).get_context_data(**kwargs)\n context['note_form'] = BriefNoteForm()\n return context\n",
"step-4": "import datetime\nfrom django.views.generic import DetailView, ListView\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.http import HttpResponseRedirect, Http404\nfrom django.shortcuts import get_list_or_404, render_to_response, get_object_or_404\nfrom django.template import RequestContext\nfrom django.core.urlresolvers import reverse\nfrom django.conf import settings\nfrom farm.models import Animal, Breed, Product, Milking\nfrom notes.forms import BriefNoteForm\n\n\nclass BreedDetailView(DetailView):\n model = Breed\n\n def get_queryset(self, *args, **kwargs):\n return Breed.objects.filter(genus__slug=self.kwargs['genus_slug'])\n\n\nclass ProductDetailView(DetailView):\n model = Product\n\n def get_queryset(self, *args, **kwargs):\n return Product.objects.filter(type__slug=self.kwargs['type_slug'])\n\n\nclass MilkingListView(ListView):\n model = Animal\n\n def get_queryset(self, *args, **kwargs):\n try:\n animal = Animal.objects.get(self.kwargs.get('slug', None))\n qs = Milking.objects.filter(animal__primary_breed__genus__slug=\n self.kwargs['genus_slug'], animal__primary_breed__slug=self\n .kwargs['breed_slug'], animal__slug=self.kwargs['slug'])\n except:\n try:\n qs = Milking.objects.filter(animal__primary_breed__genus__slug\n =self.kwargs['genus_slug'], animal__primary_breed__slug\n =self.kwargs['breed_slug'], animal__uuid__contains=self\n .kwargs['slug'])\n except:\n qs = None\n return qs\n\n\nclass AnimalDetailView(DetailView):\n model = Animal\n\n def get_queryset(self, *args, **kwargs):\n return Animal.objects.filter(primary_breed__genus__slug=self.kwargs\n ['genus_slug'], primary_breed__slug=self.kwargs['breed_slug'])\n\n def get_object(self, queryset=None):\n if queryset is None:\n queryset = self.get_queryset()\n pk = self.kwargs.get('pk', None)\n slug_or_uuid = self.kwargs.get('slug', None)\n if pk is not None:\n queryset = queryset.filter(pk=pk)\n elif slug_or_uuid is not None:\n try:\n obj = queryset.filter(uuid__contains=slug_or_uuid)[0]\n except:\n try:\n obj = queryset.get(slug=slug_or_uuid)\n except ObjectDoesNotExist:\n raise Http404(_(\n u'No %(verbose_name)s found matching the query') %\n {'verbose_name': queryset.model._meta.verbose_name})\n else:\n raise AttributeError(\n u'Generic detail view %s must be called with either an object pk or a slug.'\n % self.__class__.__name__)\n return obj\n\n def get_context_data(self, **kwargs):\n context = super(AnimalDetailView, self).get_context_data(**kwargs)\n context['note_form'] = BriefNoteForm()\n return context\n",
"step-5": "import datetime\nfrom django.views.generic import DetailView, ListView\n\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.http import HttpResponseRedirect, Http404\nfrom django.shortcuts import get_list_or_404, render_to_response, get_object_or_404\nfrom django.template import RequestContext\nfrom django.core.urlresolvers import reverse\nfrom django.conf import settings\n\nfrom farm.models import Animal, Breed, Product, Milking\nfrom notes.forms import BriefNoteForm\n\nclass BreedDetailView(DetailView):\n model = Breed\n\n def get_queryset(self, *args, **kwargs):\n return Breed.objects.filter(genus__slug=self.kwargs['genus_slug'])\n\nclass ProductDetailView(DetailView):\n model = Product\n\n def get_queryset(self, *args, **kwargs):\n return Product.objects.filter(type__slug=self.kwargs['type_slug'])\n\nclass MilkingListView(ListView):\n model = Animal\n\n def get_queryset(self, *args, **kwargs):\n try:\n animal = Animal.objects.get(self.kwargs.get('slug', None))\n qs = Milking.objects.filter(animal__primary_breed__genus__slug=self.kwargs['genus_slug'], animal__primary_breed__slug=self.kwargs['breed_slug'], animal__slug=self.kwargs['slug'])\n except:\n try:\n qs = Milking.objects.filter(animal__primary_breed__genus__slug=self.kwargs['genus_slug'], animal__primary_breed__slug=self.kwargs['breed_slug'], animal__uuid__contains=self.kwargs['slug'])\n except:\n qs = None\n return qs\n\nclass AnimalDetailView(DetailView):\n model = Animal\n\n def get_queryset(self, *args, **kwargs):\n return Animal.objects.filter(primary_breed__genus__slug=self.kwargs['genus_slug'], primary_breed__slug=self.kwargs['breed_slug'])\n\n def get_object(self, queryset=None):\n if queryset is None:\n queryset = self.get_queryset()\n\n pk = self.kwargs.get('pk', None)\n slug_or_uuid = self.kwargs.get('slug', None)\n if pk is not None:\n queryset = queryset.filter(pk=pk)\n\n # Next, try looking up by slug.\n elif slug_or_uuid is not None:\n try:\n obj = queryset.filter(uuid__contains=slug_or_uuid)[0]\n except:\n try:\n obj = queryset.get(slug=slug_or_uuid)\n except ObjectDoesNotExist:\n raise Http404(_(u\"No %(verbose_name)s found matching the query\") %\n {'verbose_name': queryset.model._meta.verbose_name})\n\n # If none of those are defined, it's an error.\n else:\n raise AttributeError(u\"Generic detail view %s must be called with \"\n u\"either an object pk or a slug.\"\n % self.__class__.__name__)\n\n return obj\n\n def get_context_data(self, **kwargs):\n context = super(AnimalDetailView, self).get_context_data(**kwargs)\n context['note_form'] = BriefNoteForm()\n return context\n",
"step-ids": [
7,
10,
13,
15,
16
]
}
|
[
7,
10,
13,
15,
16
] |
<|reserved_special_token_0|>
def refreshCam03():
try:
tmp_photo = URL2PhotoImage(cameraURL03)
image03_label.configure(image=tmp_photo)
image03_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam03).start()
<|reserved_special_token_0|>
def refreshCam05():
try:
tmp_photo = URL2PhotoImage(cameraURL05)
image05_label.configure(image=tmp_photo)
image05_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam05).start()
<|reserved_special_token_0|>
def close(event=None):
rootWindow.quit()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
rootWindow.bind('<Escape>', close)
Timer(0.1, refreshCam01).start()
Timer(0.1, refreshCam02).start()
Timer(0.1, refreshCam03).start()
Timer(0.1, refreshCam04).start()
Timer(0.1, refreshCam05).start()
Timer(0.1, refreshCam06).start()
Timer(0.1, refreshCam07).start()
Timer(0.1, refreshCam08).start()
Timer(0.1, refreshCam09).start()
def URL2PhotoImage(URL):
return ImageTk.PhotoImage(Image.open(BytesIO(requests.get(URL, timeout=
4).content)).resize((int(RWidth / 3), int(RHeight / 3)), Image.
ANTIALIAS))
<|reserved_special_token_0|>
def refreshCam02():
try:
tmp_photo = URL2PhotoImage(cameraURL02)
image02_label.configure(image=tmp_photo)
image02_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam02).start()
def refreshCam03():
try:
tmp_photo = URL2PhotoImage(cameraURL03)
image03_label.configure(image=tmp_photo)
image03_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam03).start()
def refreshCam04():
try:
tmp_photo = URL2PhotoImage(cameraURL04)
image04_label.configure(image=tmp_photo)
image04_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam04).start()
def refreshCam05():
try:
tmp_photo = URL2PhotoImage(cameraURL05)
image05_label.configure(image=tmp_photo)
image05_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam05).start()
<|reserved_special_token_0|>
def refreshCam07():
try:
tmp_photo = URL2PhotoImage(cameraURL07)
image07_label.configure(image=tmp_photo)
image07_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam07).start()
def refreshCam08():
try:
tmp_photo = URL2PhotoImage(cameraURL08)
image08_label.configure(image=tmp_photo)
image08_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam08).start()
def refreshCam09():
try:
tmp_photo = URL2PhotoImage(cameraURL09)
image09_label.configure(image=tmp_photo)
image09_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam09).start()
def close(event=None):
rootWindow.quit()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
rootWindow.bind('<Escape>', close)
Timer(0.1, refreshCam01).start()
Timer(0.1, refreshCam02).start()
Timer(0.1, refreshCam03).start()
Timer(0.1, refreshCam04).start()
Timer(0.1, refreshCam05).start()
Timer(0.1, refreshCam06).start()
Timer(0.1, refreshCam07).start()
Timer(0.1, refreshCam08).start()
Timer(0.1, refreshCam09).start()
def URL2PhotoImage(URL):
return ImageTk.PhotoImage(Image.open(BytesIO(requests.get(URL, timeout=
4).content)).resize((int(RWidth / 3), int(RHeight / 3)), Image.
ANTIALIAS))
def refreshCam01():
try:
tmp_photo = URL2PhotoImage(cameraURL01)
image01_label.configure(image=tmp_photo)
image01_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam01).start()
def refreshCam02():
try:
tmp_photo = URL2PhotoImage(cameraURL02)
image02_label.configure(image=tmp_photo)
image02_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam02).start()
def refreshCam03():
try:
tmp_photo = URL2PhotoImage(cameraURL03)
image03_label.configure(image=tmp_photo)
image03_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam03).start()
def refreshCam04():
try:
tmp_photo = URL2PhotoImage(cameraURL04)
image04_label.configure(image=tmp_photo)
image04_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam04).start()
def refreshCam05():
try:
tmp_photo = URL2PhotoImage(cameraURL05)
image05_label.configure(image=tmp_photo)
image05_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam05).start()
<|reserved_special_token_0|>
def refreshCam07():
try:
tmp_photo = URL2PhotoImage(cameraURL07)
image07_label.configure(image=tmp_photo)
image07_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam07).start()
def refreshCam08():
try:
tmp_photo = URL2PhotoImage(cameraURL08)
image08_label.configure(image=tmp_photo)
image08_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam08).start()
def refreshCam09():
try:
tmp_photo = URL2PhotoImage(cameraURL09)
image09_label.configure(image=tmp_photo)
image09_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam09).start()
def close(event=None):
rootWindow.quit()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
rootWindow = tkinter.Tk()
RWidth = rootWindow.winfo_screenwidth()
RHeight = rootWindow.winfo_screenheight()
rootWindow.overrideredirect(True)
rootWindow.geometry('%dx%d' % (RWidth, RHeight))
cameraURL01 = (
'http://209.251.247.251:82/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1507301122'
)
cameraURL02 = (
'http://108.209.209.13/webcapture.jpg?command=snap&channel=1?1507300788'
)
cameraURL03 = (
'http://72.81.132.14:60001/SnapshotJPEG?Resolution=640x480&amp;Quality=Clarity&amp;1507300872'
)
cameraURL04 = 'http://24.98.52.12:8082/cgi-bin/viewer/video.jpg?r=1507300889'
cameraURL05 = (
'http://80.24.185.230:86/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1515078226'
)
cameraURL06 = (
'http://24.23.232.13:50001/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1507300932'
)
cameraURL07 = (
'http://80.24.185.230:81/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1515078327'
)
cameraURL08 = (
'http://80.24.185.230:82/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1515078336'
)
cameraURL09 = (
'http://63.172.41.245/webcapture.jpg?command=snap&channel=1?1508162812'
)
image01_label = tkinter.Label()
image02_label = tkinter.Label()
image03_label = tkinter.Label()
image04_label = tkinter.Label()
image05_label = tkinter.Label()
image06_label = tkinter.Label()
image07_label = tkinter.Label()
image08_label = tkinter.Label()
image09_label = tkinter.Label()
image01_label.grid(row=0, column=0)
image02_label.grid(row=0, column=1)
image03_label.grid(row=0, column=2)
image04_label.grid(row=1, column=0)
image05_label.grid(row=1, column=1)
image06_label.grid(row=1, column=2)
image07_label.grid(row=2, column=0)
image08_label.grid(row=2, column=1)
image09_label.grid(row=2, column=2)
def main():
rootWindow.bind('<Escape>', close)
Timer(0.1, refreshCam01).start()
Timer(0.1, refreshCam02).start()
Timer(0.1, refreshCam03).start()
Timer(0.1, refreshCam04).start()
Timer(0.1, refreshCam05).start()
Timer(0.1, refreshCam06).start()
Timer(0.1, refreshCam07).start()
Timer(0.1, refreshCam08).start()
Timer(0.1, refreshCam09).start()
def URL2PhotoImage(URL):
return ImageTk.PhotoImage(Image.open(BytesIO(requests.get(URL, timeout=
4).content)).resize((int(RWidth / 3), int(RHeight / 3)), Image.
ANTIALIAS))
def refreshCam01():
try:
tmp_photo = URL2PhotoImage(cameraURL01)
image01_label.configure(image=tmp_photo)
image01_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam01).start()
def refreshCam02():
try:
tmp_photo = URL2PhotoImage(cameraURL02)
image02_label.configure(image=tmp_photo)
image02_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam02).start()
def refreshCam03():
try:
tmp_photo = URL2PhotoImage(cameraURL03)
image03_label.configure(image=tmp_photo)
image03_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam03).start()
def refreshCam04():
try:
tmp_photo = URL2PhotoImage(cameraURL04)
image04_label.configure(image=tmp_photo)
image04_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam04).start()
def refreshCam05():
try:
tmp_photo = URL2PhotoImage(cameraURL05)
image05_label.configure(image=tmp_photo)
image05_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam05).start()
def refreshCam06():
try:
tmp_photo = URL2PhotoImage(cameraURL06)
image06_label.configure(image=tmp_photo)
image06_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam06).start()
def refreshCam07():
try:
tmp_photo = URL2PhotoImage(cameraURL07)
image07_label.configure(image=tmp_photo)
image07_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam07).start()
def refreshCam08():
try:
tmp_photo = URL2PhotoImage(cameraURL08)
image08_label.configure(image=tmp_photo)
image08_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam08).start()
def refreshCam09():
try:
tmp_photo = URL2PhotoImage(cameraURL09)
image09_label.configure(image=tmp_photo)
image09_label.image = tmp_photo
except:
pass
if rootWindow.state() == 'normal':
Timer(0.05, refreshCam09).start()
def close(event=None):
rootWindow.quit()
if __name__ == '__main__':
main()
rootWindow.mainloop()
<|reserved_special_token_1|>
#!/usr/bin/python3
import tkinter
from PIL import Image, ImageTk
import requests
from io import BytesIO
from threading import Timer
rootWindow = tkinter.Tk()
# the following makes the program full-screen
RWidth = rootWindow.winfo_screenwidth()
RHeight = rootWindow.winfo_screenheight()
#
rootWindow.overrideredirect(True) # without a close option
rootWindow.geometry(("%dx%d")%(RWidth,RHeight))
cameraURL01="http://209.251.247.251:82/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1507301122"
cameraURL02="http://108.209.209.13/webcapture.jpg?command=snap&channel=1?1507300788"
cameraURL03="http://72.81.132.14:60001/SnapshotJPEG?Resolution=640x480&amp;Quality=Clarity&amp;1507300872"
cameraURL04="http://24.98.52.12:8082/cgi-bin/viewer/video.jpg?r=1507300889"
cameraURL05="http://80.24.185.230:86/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1515078226"
cameraURL06="http://24.23.232.13:50001/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1507300932"
cameraURL07="http://80.24.185.230:81/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1515078327"
cameraURL08="http://80.24.185.230:82/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1515078336"
cameraURL09="http://63.172.41.245/webcapture.jpg?command=snap&channel=1?1508162812"
image01_label = tkinter.Label()
image02_label = tkinter.Label()
image03_label = tkinter.Label()
image04_label = tkinter.Label()
image05_label = tkinter.Label()
image06_label = tkinter.Label()
image07_label = tkinter.Label()
image08_label = tkinter.Label()
image09_label = tkinter.Label()
image01_label.grid(row=0, column=0)
image02_label.grid(row=0, column=1)
image03_label.grid(row=0, column=2)
image04_label.grid(row=1, column=0)
image05_label.grid(row=1, column=1)
image06_label.grid(row=1, column=2)
image07_label.grid(row=2, column=0)
image08_label.grid(row=2, column=1)
image09_label.grid(row=2, column=2)
def main():
rootWindow.bind('<Escape>', close)
Timer(0.1, refreshCam01).start()
Timer(0.1, refreshCam02).start()
Timer(0.1, refreshCam03).start()
Timer(0.1, refreshCam04).start()
Timer(0.1, refreshCam05).start()
Timer(0.1, refreshCam06).start()
Timer(0.1, refreshCam07).start()
Timer(0.1, refreshCam08).start()
Timer(0.1, refreshCam09).start()
def URL2PhotoImage(URL):
return ImageTk.PhotoImage(Image.open(BytesIO(requests.get(URL, timeout=4).content)).resize((int(RWidth/3),int(RHeight/3)), Image.ANTIALIAS))
def refreshCam01():
try:
tmp_photo = URL2PhotoImage(cameraURL01)
image01_label.configure(image=tmp_photo)
image01_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection
except:
pass
if rootWindow.state() == 'normal': Timer(0.05, refreshCam01).start()
def refreshCam02():
try:
tmp_photo = URL2PhotoImage(cameraURL02)
image02_label.configure(image=tmp_photo)
image02_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection
except:
pass
if rootWindow.state() == 'normal': Timer(0.05, refreshCam02).start()
def refreshCam03():
try:
tmp_photo = URL2PhotoImage(cameraURL03)
image03_label.configure(image=tmp_photo)
image03_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection
except:
pass
if rootWindow.state() == 'normal': Timer(0.05, refreshCam03).start()
def refreshCam04():
try:
tmp_photo = URL2PhotoImage(cameraURL04)
image04_label.configure(image=tmp_photo)
image04_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection
except:
pass
if rootWindow.state() == 'normal': Timer(0.05, refreshCam04).start()
def refreshCam05():
try:
tmp_photo = URL2PhotoImage(cameraURL05)
image05_label.configure(image=tmp_photo)
image05_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection
except:
pass
if rootWindow.state() == 'normal': Timer(0.05, refreshCam05).start()
def refreshCam06():
try:
tmp_photo = URL2PhotoImage(cameraURL06)
image06_label.configure(image=tmp_photo)
image06_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection
except:
pass
if rootWindow.state() == 'normal': Timer(0.05, refreshCam06).start()
def refreshCam07():
try:
tmp_photo = URL2PhotoImage(cameraURL07)
image07_label.configure(image=tmp_photo)
image07_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection
except:
pass
if rootWindow.state() == 'normal': Timer(0.05, refreshCam07).start()
def refreshCam08():
try:
tmp_photo = URL2PhotoImage(cameraURL08)
image08_label.configure(image=tmp_photo)
image08_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection
except:
pass
if rootWindow.state() == 'normal': Timer(0.05, refreshCam08).start()
def refreshCam09():
try:
tmp_photo = URL2PhotoImage(cameraURL09)
image09_label.configure(image=tmp_photo)
image09_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection
except:
pass
if rootWindow.state() == 'normal': Timer(0.05, refreshCam09).start()
def close(event=None):
rootWindow.quit()
# start the subprocess, main loop, and gui
if __name__ == '__main__':
main()
rootWindow.mainloop()
|
flexible
|
{
"blob_id": "be63e8e6e98c9afed66cae033a7f41f1be1561a8",
"index": 8077,
"step-1": "<mask token>\n\n\ndef refreshCam03():\n try:\n tmp_photo = URL2PhotoImage(cameraURL03)\n image03_label.configure(image=tmp_photo)\n image03_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam03).start()\n\n\n<mask token>\n\n\ndef refreshCam05():\n try:\n tmp_photo = URL2PhotoImage(cameraURL05)\n image05_label.configure(image=tmp_photo)\n image05_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam05).start()\n\n\n<mask token>\n\n\ndef close(event=None):\n rootWindow.quit()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n rootWindow.bind('<Escape>', close)\n Timer(0.1, refreshCam01).start()\n Timer(0.1, refreshCam02).start()\n Timer(0.1, refreshCam03).start()\n Timer(0.1, refreshCam04).start()\n Timer(0.1, refreshCam05).start()\n Timer(0.1, refreshCam06).start()\n Timer(0.1, refreshCam07).start()\n Timer(0.1, refreshCam08).start()\n Timer(0.1, refreshCam09).start()\n\n\ndef URL2PhotoImage(URL):\n return ImageTk.PhotoImage(Image.open(BytesIO(requests.get(URL, timeout=\n 4).content)).resize((int(RWidth / 3), int(RHeight / 3)), Image.\n ANTIALIAS))\n\n\n<mask token>\n\n\ndef refreshCam02():\n try:\n tmp_photo = URL2PhotoImage(cameraURL02)\n image02_label.configure(image=tmp_photo)\n image02_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam02).start()\n\n\ndef refreshCam03():\n try:\n tmp_photo = URL2PhotoImage(cameraURL03)\n image03_label.configure(image=tmp_photo)\n image03_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam03).start()\n\n\ndef refreshCam04():\n try:\n tmp_photo = URL2PhotoImage(cameraURL04)\n image04_label.configure(image=tmp_photo)\n image04_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam04).start()\n\n\ndef refreshCam05():\n try:\n tmp_photo = URL2PhotoImage(cameraURL05)\n image05_label.configure(image=tmp_photo)\n image05_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam05).start()\n\n\n<mask token>\n\n\ndef refreshCam07():\n try:\n tmp_photo = URL2PhotoImage(cameraURL07)\n image07_label.configure(image=tmp_photo)\n image07_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam07).start()\n\n\ndef refreshCam08():\n try:\n tmp_photo = URL2PhotoImage(cameraURL08)\n image08_label.configure(image=tmp_photo)\n image08_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam08).start()\n\n\ndef refreshCam09():\n try:\n tmp_photo = URL2PhotoImage(cameraURL09)\n image09_label.configure(image=tmp_photo)\n image09_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam09).start()\n\n\ndef close(event=None):\n rootWindow.quit()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n rootWindow.bind('<Escape>', close)\n Timer(0.1, refreshCam01).start()\n Timer(0.1, refreshCam02).start()\n Timer(0.1, refreshCam03).start()\n Timer(0.1, refreshCam04).start()\n Timer(0.1, refreshCam05).start()\n Timer(0.1, refreshCam06).start()\n Timer(0.1, refreshCam07).start()\n Timer(0.1, refreshCam08).start()\n Timer(0.1, refreshCam09).start()\n\n\ndef URL2PhotoImage(URL):\n return ImageTk.PhotoImage(Image.open(BytesIO(requests.get(URL, timeout=\n 4).content)).resize((int(RWidth / 3), int(RHeight / 3)), Image.\n ANTIALIAS))\n\n\ndef refreshCam01():\n try:\n tmp_photo = URL2PhotoImage(cameraURL01)\n image01_label.configure(image=tmp_photo)\n image01_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam01).start()\n\n\ndef refreshCam02():\n try:\n tmp_photo = URL2PhotoImage(cameraURL02)\n image02_label.configure(image=tmp_photo)\n image02_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam02).start()\n\n\ndef refreshCam03():\n try:\n tmp_photo = URL2PhotoImage(cameraURL03)\n image03_label.configure(image=tmp_photo)\n image03_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam03).start()\n\n\ndef refreshCam04():\n try:\n tmp_photo = URL2PhotoImage(cameraURL04)\n image04_label.configure(image=tmp_photo)\n image04_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam04).start()\n\n\ndef refreshCam05():\n try:\n tmp_photo = URL2PhotoImage(cameraURL05)\n image05_label.configure(image=tmp_photo)\n image05_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam05).start()\n\n\n<mask token>\n\n\ndef refreshCam07():\n try:\n tmp_photo = URL2PhotoImage(cameraURL07)\n image07_label.configure(image=tmp_photo)\n image07_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam07).start()\n\n\ndef refreshCam08():\n try:\n tmp_photo = URL2PhotoImage(cameraURL08)\n image08_label.configure(image=tmp_photo)\n image08_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam08).start()\n\n\ndef refreshCam09():\n try:\n tmp_photo = URL2PhotoImage(cameraURL09)\n image09_label.configure(image=tmp_photo)\n image09_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam09).start()\n\n\ndef close(event=None):\n rootWindow.quit()\n\n\n<mask token>\n",
"step-4": "<mask token>\nrootWindow = tkinter.Tk()\nRWidth = rootWindow.winfo_screenwidth()\nRHeight = rootWindow.winfo_screenheight()\nrootWindow.overrideredirect(True)\nrootWindow.geometry('%dx%d' % (RWidth, RHeight))\ncameraURL01 = (\n 'http://209.251.247.251:82/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1507301122'\n )\ncameraURL02 = (\n 'http://108.209.209.13/webcapture.jpg?command=snap&channel=1?1507300788'\n )\ncameraURL03 = (\n 'http://72.81.132.14:60001/SnapshotJPEG?Resolution=640x480&amp;Quality=Clarity&amp;1507300872'\n )\ncameraURL04 = 'http://24.98.52.12:8082/cgi-bin/viewer/video.jpg?r=1507300889'\ncameraURL05 = (\n 'http://80.24.185.230:86/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1515078226'\n )\ncameraURL06 = (\n 'http://24.23.232.13:50001/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1507300932'\n )\ncameraURL07 = (\n 'http://80.24.185.230:81/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1515078327'\n )\ncameraURL08 = (\n 'http://80.24.185.230:82/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1515078336'\n )\ncameraURL09 = (\n 'http://63.172.41.245/webcapture.jpg?command=snap&channel=1?1508162812'\n )\nimage01_label = tkinter.Label()\nimage02_label = tkinter.Label()\nimage03_label = tkinter.Label()\nimage04_label = tkinter.Label()\nimage05_label = tkinter.Label()\nimage06_label = tkinter.Label()\nimage07_label = tkinter.Label()\nimage08_label = tkinter.Label()\nimage09_label = tkinter.Label()\nimage01_label.grid(row=0, column=0)\nimage02_label.grid(row=0, column=1)\nimage03_label.grid(row=0, column=2)\nimage04_label.grid(row=1, column=0)\nimage05_label.grid(row=1, column=1)\nimage06_label.grid(row=1, column=2)\nimage07_label.grid(row=2, column=0)\nimage08_label.grid(row=2, column=1)\nimage09_label.grid(row=2, column=2)\n\n\ndef main():\n rootWindow.bind('<Escape>', close)\n Timer(0.1, refreshCam01).start()\n Timer(0.1, refreshCam02).start()\n Timer(0.1, refreshCam03).start()\n Timer(0.1, refreshCam04).start()\n Timer(0.1, refreshCam05).start()\n Timer(0.1, refreshCam06).start()\n Timer(0.1, refreshCam07).start()\n Timer(0.1, refreshCam08).start()\n Timer(0.1, refreshCam09).start()\n\n\ndef URL2PhotoImage(URL):\n return ImageTk.PhotoImage(Image.open(BytesIO(requests.get(URL, timeout=\n 4).content)).resize((int(RWidth / 3), int(RHeight / 3)), Image.\n ANTIALIAS))\n\n\ndef refreshCam01():\n try:\n tmp_photo = URL2PhotoImage(cameraURL01)\n image01_label.configure(image=tmp_photo)\n image01_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam01).start()\n\n\ndef refreshCam02():\n try:\n tmp_photo = URL2PhotoImage(cameraURL02)\n image02_label.configure(image=tmp_photo)\n image02_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam02).start()\n\n\ndef refreshCam03():\n try:\n tmp_photo = URL2PhotoImage(cameraURL03)\n image03_label.configure(image=tmp_photo)\n image03_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam03).start()\n\n\ndef refreshCam04():\n try:\n tmp_photo = URL2PhotoImage(cameraURL04)\n image04_label.configure(image=tmp_photo)\n image04_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam04).start()\n\n\ndef refreshCam05():\n try:\n tmp_photo = URL2PhotoImage(cameraURL05)\n image05_label.configure(image=tmp_photo)\n image05_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam05).start()\n\n\ndef refreshCam06():\n try:\n tmp_photo = URL2PhotoImage(cameraURL06)\n image06_label.configure(image=tmp_photo)\n image06_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam06).start()\n\n\ndef refreshCam07():\n try:\n tmp_photo = URL2PhotoImage(cameraURL07)\n image07_label.configure(image=tmp_photo)\n image07_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam07).start()\n\n\ndef refreshCam08():\n try:\n tmp_photo = URL2PhotoImage(cameraURL08)\n image08_label.configure(image=tmp_photo)\n image08_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam08).start()\n\n\ndef refreshCam09():\n try:\n tmp_photo = URL2PhotoImage(cameraURL09)\n image09_label.configure(image=tmp_photo)\n image09_label.image = tmp_photo\n except:\n pass\n if rootWindow.state() == 'normal':\n Timer(0.05, refreshCam09).start()\n\n\ndef close(event=None):\n rootWindow.quit()\n\n\nif __name__ == '__main__':\n main()\n rootWindow.mainloop()\n",
"step-5": "#!/usr/bin/python3\n\nimport tkinter\nfrom PIL import Image, ImageTk\nimport requests\nfrom io import BytesIO\nfrom threading import Timer\n\n\nrootWindow = tkinter.Tk()\n\n# the following makes the program full-screen\nRWidth = rootWindow.winfo_screenwidth()\nRHeight = rootWindow.winfo_screenheight()\n#\nrootWindow.overrideredirect(True)\t# without a close option\nrootWindow.geometry((\"%dx%d\")%(RWidth,RHeight))\n\ncameraURL01=\"http://209.251.247.251:82/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1507301122\"\ncameraURL02=\"http://108.209.209.13/webcapture.jpg?command=snap&channel=1?1507300788\"\ncameraURL03=\"http://72.81.132.14:60001/SnapshotJPEG?Resolution=640x480&amp;Quality=Clarity&amp;1507300872\"\ncameraURL04=\"http://24.98.52.12:8082/cgi-bin/viewer/video.jpg?r=1507300889\"\ncameraURL05=\"http://80.24.185.230:86/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1515078226\"\ncameraURL06=\"http://24.23.232.13:50001/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1507300932\"\ncameraURL07=\"http://80.24.185.230:81/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1515078327\"\ncameraURL08=\"http://80.24.185.230:82/cgi-bin/camera?resolution=640&amp;quality=1&amp;Language=0&amp;1515078336\"\ncameraURL09=\"http://63.172.41.245/webcapture.jpg?command=snap&channel=1?1508162812\"\n\n\nimage01_label = tkinter.Label()\nimage02_label = tkinter.Label()\nimage03_label = tkinter.Label()\nimage04_label = tkinter.Label()\nimage05_label = tkinter.Label()\nimage06_label = tkinter.Label()\nimage07_label = tkinter.Label()\nimage08_label = tkinter.Label()\nimage09_label = tkinter.Label()\nimage01_label.grid(row=0, column=0)\nimage02_label.grid(row=0, column=1)\nimage03_label.grid(row=0, column=2)\nimage04_label.grid(row=1, column=0)\nimage05_label.grid(row=1, column=1)\nimage06_label.grid(row=1, column=2)\nimage07_label.grid(row=2, column=0)\nimage08_label.grid(row=2, column=1)\nimage09_label.grid(row=2, column=2)\n\n\t\ndef main():\n\trootWindow.bind('<Escape>', close)\n\tTimer(0.1, refreshCam01).start()\n\tTimer(0.1, refreshCam02).start()\n\tTimer(0.1, refreshCam03).start()\n\tTimer(0.1, refreshCam04).start()\n\tTimer(0.1, refreshCam05).start()\n\tTimer(0.1, refreshCam06).start()\n\tTimer(0.1, refreshCam07).start()\n\tTimer(0.1, refreshCam08).start()\n\tTimer(0.1, refreshCam09).start()\n\n\ndef URL2PhotoImage(URL):\n\treturn ImageTk.PhotoImage(Image.open(BytesIO(requests.get(URL, timeout=4).content)).resize((int(RWidth/3),int(RHeight/3)), Image.ANTIALIAS))\n\t\ndef refreshCam01():\n\ttry:\n\t\ttmp_photo = URL2PhotoImage(cameraURL01)\n\t\timage01_label.configure(image=tmp_photo)\n\t\timage01_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection\n\texcept:\n\t\tpass\n\tif rootWindow.state() == 'normal': Timer(0.05, refreshCam01).start()\n\ndef refreshCam02():\n\ttry:\n\t\ttmp_photo = URL2PhotoImage(cameraURL02)\n\t\timage02_label.configure(image=tmp_photo)\n\t\timage02_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection\n\texcept:\n\t\tpass\n\tif rootWindow.state() == 'normal': Timer(0.05, refreshCam02).start()\n\t\ndef refreshCam03():\n\ttry:\n\t\ttmp_photo = URL2PhotoImage(cameraURL03)\n\t\timage03_label.configure(image=tmp_photo)\n\t\timage03_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection\n\texcept:\n\t\tpass\n\tif rootWindow.state() == 'normal': Timer(0.05, refreshCam03).start()\n\t\ndef refreshCam04():\n\ttry:\n\t\ttmp_photo = URL2PhotoImage(cameraURL04)\n\t\timage04_label.configure(image=tmp_photo)\n\t\timage04_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection\n\texcept:\n\t\tpass\n\tif rootWindow.state() == 'normal': Timer(0.05, refreshCam04).start()\n\t\ndef refreshCam05():\n\ttry:\n\t\ttmp_photo = URL2PhotoImage(cameraURL05)\n\t\timage05_label.configure(image=tmp_photo)\n\t\timage05_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection\n\texcept:\n\t\tpass\n\tif rootWindow.state() == 'normal': Timer(0.05, refreshCam05).start()\n\t\ndef refreshCam06():\n\ttry:\n\t\ttmp_photo = URL2PhotoImage(cameraURL06)\n\t\timage06_label.configure(image=tmp_photo)\n\t\timage06_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection\n\texcept:\n\t\tpass\n\tif rootWindow.state() == 'normal': Timer(0.05, refreshCam06).start()\n\t\ndef refreshCam07():\n\ttry:\n\t\ttmp_photo = URL2PhotoImage(cameraURL07)\n\t\timage07_label.configure(image=tmp_photo)\n\t\timage07_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection\n\texcept:\n\t\tpass\n\tif rootWindow.state() == 'normal': Timer(0.05, refreshCam07).start()\n\t\ndef refreshCam08():\n\ttry:\n\t\ttmp_photo = URL2PhotoImage(cameraURL08)\n\t\timage08_label.configure(image=tmp_photo)\n\t\timage08_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection\n\texcept:\n\t\tpass\n\tif rootWindow.state() == 'normal': Timer(0.05, refreshCam08).start()\n\t\ndef refreshCam09():\n\ttry:\n\t\ttmp_photo = URL2PhotoImage(cameraURL09)\n\t\timage09_label.configure(image=tmp_photo)\n\t\timage09_label.image = tmp_photo # keep a reference to prevent tkinter garbage collection\n\texcept:\n\t\tpass\n\tif rootWindow.state() == 'normal': Timer(0.05, refreshCam09).start()\n\ndef close(event=None):\n\trootWindow.quit()\n\n# start the subprocess, main loop, and gui\nif __name__ == '__main__':\n\tmain()\n\trootWindow.mainloop()\n\t\n",
"step-ids": [
3,
10,
11,
14,
16
]
}
|
[
3,
10,
11,
14,
16
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_msg_body(msg):
type = msg.get_content_maintype()
if type == 'multipart':
for part in msg.get_payload():
if part.get_content_maintype() == 'text':
return part.get_payload()
elif type == 'text':
return msg.get_payload()
def lambda_handler(event, context):
s3_bucket = event['Records'][0]['s3']['bucket']['name']
s3_key = event['Records'][0]['s3']['object']['key']
client = boto3.client('s3')
data = client.get_object(Bucket=s3_bucket, Key=s3_key)
contents = data['Body'].read()
msg = Parser(policy=default).parsestr(contents.decode('ascii'))
frm = msg['from']
to = msg['to']
time = msg['date']
subject = msg['subject']
body = get_msg_body(msg)
body = ' '.join(body.split()).strip()
print(time)
r = requests.post(endpoint, data={'data': body}, headers={
'Content-Type': 'application/x-www-form-urlencoded'})
r = json.loads(r.text)
print(r)
label = int(float(r['predicted_label']))
if label == 1:
label = 'SPAM'
else:
label = 'HAM'
p = float(r['predicted_probability'])
print(label, p)
if len(body) > 250:
body = body[0:250]
return_msg = ('We received your email sent at ' + time +
"with the subject '" + subject +
"""'.
Here is a 240 character sample of the email body:
""" +
body + """
The email was categorized as """ + label + ' with a ' +
str(p) + ' % confidence.')
client = boto3.client('ses')
status = client.send_email(Source='hamspamreply@hw3tiz2102.xyz',
Destination={'ToAddresses': [frm]}, Message={'Subject': {'Data':
'Ham/Spam Analysis'}, 'Body': {'Text': {'Data': return_msg}}})
print(status)
return {'statusCode': 200, 'body': json.dumps('LF2 successfull!')}
<|reserved_special_token_1|>
<|reserved_special_token_0|>
endpoint = 'https://5295t8jcs0.execute-api.us-east-1.amazonaws.com/Prod'
def get_msg_body(msg):
type = msg.get_content_maintype()
if type == 'multipart':
for part in msg.get_payload():
if part.get_content_maintype() == 'text':
return part.get_payload()
elif type == 'text':
return msg.get_payload()
def lambda_handler(event, context):
s3_bucket = event['Records'][0]['s3']['bucket']['name']
s3_key = event['Records'][0]['s3']['object']['key']
client = boto3.client('s3')
data = client.get_object(Bucket=s3_bucket, Key=s3_key)
contents = data['Body'].read()
msg = Parser(policy=default).parsestr(contents.decode('ascii'))
frm = msg['from']
to = msg['to']
time = msg['date']
subject = msg['subject']
body = get_msg_body(msg)
body = ' '.join(body.split()).strip()
print(time)
r = requests.post(endpoint, data={'data': body}, headers={
'Content-Type': 'application/x-www-form-urlencoded'})
r = json.loads(r.text)
print(r)
label = int(float(r['predicted_label']))
if label == 1:
label = 'SPAM'
else:
label = 'HAM'
p = float(r['predicted_probability'])
print(label, p)
if len(body) > 250:
body = body[0:250]
return_msg = ('We received your email sent at ' + time +
"with the subject '" + subject +
"""'.
Here is a 240 character sample of the email body:
""" +
body + """
The email was categorized as """ + label + ' with a ' +
str(p) + ' % confidence.')
client = boto3.client('ses')
status = client.send_email(Source='hamspamreply@hw3tiz2102.xyz',
Destination={'ToAddresses': [frm]}, Message={'Subject': {'Data':
'Ham/Spam Analysis'}, 'Body': {'Text': {'Data': return_msg}}})
print(status)
return {'statusCode': 200, 'body': json.dumps('LF2 successfull!')}
<|reserved_special_token_1|>
import json
import requests
import random
import boto3
from email.parser import BytesParser, Parser
from email.policy import default
endpoint = 'https://5295t8jcs0.execute-api.us-east-1.amazonaws.com/Prod'
def get_msg_body(msg):
type = msg.get_content_maintype()
if type == 'multipart':
for part in msg.get_payload():
if part.get_content_maintype() == 'text':
return part.get_payload()
elif type == 'text':
return msg.get_payload()
def lambda_handler(event, context):
s3_bucket = event['Records'][0]['s3']['bucket']['name']
s3_key = event['Records'][0]['s3']['object']['key']
client = boto3.client('s3')
data = client.get_object(Bucket=s3_bucket, Key=s3_key)
contents = data['Body'].read()
msg = Parser(policy=default).parsestr(contents.decode('ascii'))
frm = msg['from']
to = msg['to']
time = msg['date']
subject = msg['subject']
body = get_msg_body(msg)
body = ' '.join(body.split()).strip()
print(time)
r = requests.post(endpoint, data={'data': body}, headers={
'Content-Type': 'application/x-www-form-urlencoded'})
r = json.loads(r.text)
print(r)
label = int(float(r['predicted_label']))
if label == 1:
label = 'SPAM'
else:
label = 'HAM'
p = float(r['predicted_probability'])
print(label, p)
if len(body) > 250:
body = body[0:250]
return_msg = ('We received your email sent at ' + time +
"with the subject '" + subject +
"""'.
Here is a 240 character sample of the email body:
""" +
body + """
The email was categorized as """ + label + ' with a ' +
str(p) + ' % confidence.')
client = boto3.client('ses')
status = client.send_email(Source='hamspamreply@hw3tiz2102.xyz',
Destination={'ToAddresses': [frm]}, Message={'Subject': {'Data':
'Ham/Spam Analysis'}, 'Body': {'Text': {'Data': return_msg}}})
print(status)
return {'statusCode': 200, 'body': json.dumps('LF2 successfull!')}
<|reserved_special_token_1|>
import json
import requests
import random
import boto3
from email.parser import BytesParser, Parser
from email.policy import default
##################################
endpoint = 'https://5295t8jcs0.execute-api.us-east-1.amazonaws.com/Prod'
##################################
def get_msg_body(msg):
type = msg.get_content_maintype()
if type == 'multipart':
for part in msg.get_payload():
if part.get_content_maintype() == 'text':
return part.get_payload()
elif type == 'text':
return msg.get_payload()
def lambda_handler(event, context):
s3_bucket = event['Records'][0]['s3']['bucket']['name']
s3_key = event['Records'][0]['s3']['object']['key']
# s3_bucket = 'hw3-storemails'
# s3_key = '097caauj2ee2puftdrlohllf5748p70e1seovc81'
client = boto3.client('s3')
data = client.get_object(Bucket=s3_bucket, Key=s3_key)
contents = data['Body'].read()
msg = Parser(policy=default).parsestr(contents.decode('ascii'))
frm = msg['from']
to = msg['to']
time = msg['date']
subject = msg['subject']
body = get_msg_body(msg)
body = " ".join(body.split()).strip()
print(time)
r = requests.post(endpoint, data = {'data':body}, headers = {'Content-Type': 'application/x-www-form-urlencoded'})
r = json.loads(r.text)
print(r)
label = int(float(r['predicted_label']))
if label == 1:
label = 'SPAM'
else: label = 'HAM'
p = float(r['predicted_probability'])
print(label, p)
if len(body)>250: body = body[0:250]
return_msg = 'We received your email sent at ' +\
time + 'with the subject \'' + subject +\
'\'.\n\nHere is a 240 character sample of the email body:\n\n' +\
body + '\n\nThe email was categorized as ' + label +\
' with a ' + str(p) + ' % confidence.'
client = boto3.client('ses')
status = client.send_email(
Source='hamspamreply@hw3tiz2102.xyz',
Destination={
'ToAddresses': [
frm,
],
},
Message={
'Subject': {
'Data': 'Ham/Spam Analysis'
},
'Body': {
'Text': {
'Data': return_msg,
}
}
},
)
print(status)
return {
'statusCode': 200,
'body': json.dumps('LF2 successfull!')
}
|
flexible
|
{
"blob_id": "cc99811321083147540a00e8029b792c8afc2ada",
"index": 3233,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_msg_body(msg):\n type = msg.get_content_maintype()\n if type == 'multipart':\n for part in msg.get_payload():\n if part.get_content_maintype() == 'text':\n return part.get_payload()\n elif type == 'text':\n return msg.get_payload()\n\n\ndef lambda_handler(event, context):\n s3_bucket = event['Records'][0]['s3']['bucket']['name']\n s3_key = event['Records'][0]['s3']['object']['key']\n client = boto3.client('s3')\n data = client.get_object(Bucket=s3_bucket, Key=s3_key)\n contents = data['Body'].read()\n msg = Parser(policy=default).parsestr(contents.decode('ascii'))\n frm = msg['from']\n to = msg['to']\n time = msg['date']\n subject = msg['subject']\n body = get_msg_body(msg)\n body = ' '.join(body.split()).strip()\n print(time)\n r = requests.post(endpoint, data={'data': body}, headers={\n 'Content-Type': 'application/x-www-form-urlencoded'})\n r = json.loads(r.text)\n print(r)\n label = int(float(r['predicted_label']))\n if label == 1:\n label = 'SPAM'\n else:\n label = 'HAM'\n p = float(r['predicted_probability'])\n print(label, p)\n if len(body) > 250:\n body = body[0:250]\n return_msg = ('We received your email sent at ' + time +\n \"with the subject '\" + subject +\n \"\"\"'.\n\nHere is a 240 character sample of the email body:\n\n\"\"\" +\n body + \"\"\"\n\nThe email was categorized as \"\"\" + label + ' with a ' +\n str(p) + ' % confidence.')\n client = boto3.client('ses')\n status = client.send_email(Source='hamspamreply@hw3tiz2102.xyz',\n Destination={'ToAddresses': [frm]}, Message={'Subject': {'Data':\n 'Ham/Spam Analysis'}, 'Body': {'Text': {'Data': return_msg}}})\n print(status)\n return {'statusCode': 200, 'body': json.dumps('LF2 successfull!')}\n",
"step-3": "<mask token>\nendpoint = 'https://5295t8jcs0.execute-api.us-east-1.amazonaws.com/Prod'\n\n\ndef get_msg_body(msg):\n type = msg.get_content_maintype()\n if type == 'multipart':\n for part in msg.get_payload():\n if part.get_content_maintype() == 'text':\n return part.get_payload()\n elif type == 'text':\n return msg.get_payload()\n\n\ndef lambda_handler(event, context):\n s3_bucket = event['Records'][0]['s3']['bucket']['name']\n s3_key = event['Records'][0]['s3']['object']['key']\n client = boto3.client('s3')\n data = client.get_object(Bucket=s3_bucket, Key=s3_key)\n contents = data['Body'].read()\n msg = Parser(policy=default).parsestr(contents.decode('ascii'))\n frm = msg['from']\n to = msg['to']\n time = msg['date']\n subject = msg['subject']\n body = get_msg_body(msg)\n body = ' '.join(body.split()).strip()\n print(time)\n r = requests.post(endpoint, data={'data': body}, headers={\n 'Content-Type': 'application/x-www-form-urlencoded'})\n r = json.loads(r.text)\n print(r)\n label = int(float(r['predicted_label']))\n if label == 1:\n label = 'SPAM'\n else:\n label = 'HAM'\n p = float(r['predicted_probability'])\n print(label, p)\n if len(body) > 250:\n body = body[0:250]\n return_msg = ('We received your email sent at ' + time +\n \"with the subject '\" + subject +\n \"\"\"'.\n\nHere is a 240 character sample of the email body:\n\n\"\"\" +\n body + \"\"\"\n\nThe email was categorized as \"\"\" + label + ' with a ' +\n str(p) + ' % confidence.')\n client = boto3.client('ses')\n status = client.send_email(Source='hamspamreply@hw3tiz2102.xyz',\n Destination={'ToAddresses': [frm]}, Message={'Subject': {'Data':\n 'Ham/Spam Analysis'}, 'Body': {'Text': {'Data': return_msg}}})\n print(status)\n return {'statusCode': 200, 'body': json.dumps('LF2 successfull!')}\n",
"step-4": "import json\nimport requests\nimport random\nimport boto3\nfrom email.parser import BytesParser, Parser\nfrom email.policy import default\nendpoint = 'https://5295t8jcs0.execute-api.us-east-1.amazonaws.com/Prod'\n\n\ndef get_msg_body(msg):\n type = msg.get_content_maintype()\n if type == 'multipart':\n for part in msg.get_payload():\n if part.get_content_maintype() == 'text':\n return part.get_payload()\n elif type == 'text':\n return msg.get_payload()\n\n\ndef lambda_handler(event, context):\n s3_bucket = event['Records'][0]['s3']['bucket']['name']\n s3_key = event['Records'][0]['s3']['object']['key']\n client = boto3.client('s3')\n data = client.get_object(Bucket=s3_bucket, Key=s3_key)\n contents = data['Body'].read()\n msg = Parser(policy=default).parsestr(contents.decode('ascii'))\n frm = msg['from']\n to = msg['to']\n time = msg['date']\n subject = msg['subject']\n body = get_msg_body(msg)\n body = ' '.join(body.split()).strip()\n print(time)\n r = requests.post(endpoint, data={'data': body}, headers={\n 'Content-Type': 'application/x-www-form-urlencoded'})\n r = json.loads(r.text)\n print(r)\n label = int(float(r['predicted_label']))\n if label == 1:\n label = 'SPAM'\n else:\n label = 'HAM'\n p = float(r['predicted_probability'])\n print(label, p)\n if len(body) > 250:\n body = body[0:250]\n return_msg = ('We received your email sent at ' + time +\n \"with the subject '\" + subject +\n \"\"\"'.\n\nHere is a 240 character sample of the email body:\n\n\"\"\" +\n body + \"\"\"\n\nThe email was categorized as \"\"\" + label + ' with a ' +\n str(p) + ' % confidence.')\n client = boto3.client('ses')\n status = client.send_email(Source='hamspamreply@hw3tiz2102.xyz',\n Destination={'ToAddresses': [frm]}, Message={'Subject': {'Data':\n 'Ham/Spam Analysis'}, 'Body': {'Text': {'Data': return_msg}}})\n print(status)\n return {'statusCode': 200, 'body': json.dumps('LF2 successfull!')}\n",
"step-5": "import json\nimport requests\nimport random\nimport boto3\nfrom email.parser import BytesParser, Parser\nfrom email.policy import default\n\n##################################\nendpoint = 'https://5295t8jcs0.execute-api.us-east-1.amazonaws.com/Prod'\n##################################\n\ndef get_msg_body(msg):\n type = msg.get_content_maintype()\n\n if type == 'multipart':\n for part in msg.get_payload():\n if part.get_content_maintype() == 'text':\n return part.get_payload()\n elif type == 'text':\n return msg.get_payload()\n\ndef lambda_handler(event, context):\n \n s3_bucket = event['Records'][0]['s3']['bucket']['name']\n s3_key = event['Records'][0]['s3']['object']['key']\n \n# s3_bucket = 'hw3-storemails'\n# s3_key = '097caauj2ee2puftdrlohllf5748p70e1seovc81'\n \n client = boto3.client('s3')\n data = client.get_object(Bucket=s3_bucket, Key=s3_key)\n contents = data['Body'].read()\n \n msg = Parser(policy=default).parsestr(contents.decode('ascii'))\n frm = msg['from']\n to = msg['to']\n time = msg['date']\n subject = msg['subject']\n \n body = get_msg_body(msg)\n body = \" \".join(body.split()).strip()\n \n print(time)\n \n r = requests.post(endpoint, data = {'data':body}, headers = {'Content-Type': 'application/x-www-form-urlencoded'})\n r = json.loads(r.text)\n \n print(r)\n\n label = int(float(r['predicted_label']))\n if label == 1:\n label = 'SPAM'\n else: label = 'HAM'\n p = float(r['predicted_probability'])\n \n print(label, p)\n \n if len(body)>250: body = body[0:250]\n \n return_msg = 'We received your email sent at ' +\\\n time + 'with the subject \\'' + subject +\\\n '\\'.\\n\\nHere is a 240 character sample of the email body:\\n\\n' +\\\n body + '\\n\\nThe email was categorized as ' + label +\\\n ' with a ' + str(p) + ' % confidence.'\n\n client = boto3.client('ses')\n\n status = client.send_email(\n Source='hamspamreply@hw3tiz2102.xyz',\n Destination={\n 'ToAddresses': [\n frm,\n ],\n },\n Message={\n 'Subject': {\n 'Data': 'Ham/Spam Analysis'\n \n },\n 'Body': {\n 'Text': {\n 'Data': return_msg,\n }\n }\n },\n )\n \n print(status)\n \n return {\n 'statusCode': 200,\n 'body': json.dumps('LF2 successfull!')\n }\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ==============================================================================
# Created By : Karl Thompson
# Created Date: Mon March 25 17:34:00 CDT 2019
# ==============================================================================
"""nasdaq_itch_vwap - Generate a table of running volume-weighted average price
[VWAP] for NASDAQ stocks at trading hours based on Nasdaq TotalView-ITCH 5 data.
Data available at: ftp://emi.nasdaq.com/ITCH/01302019.NASDAQ_ITCH50.gz
If you use this code in your work, please cite the following:
Karl H. Thompson, NASDAQ-ITCH-VWAP, (2019), GitHub repository,
https://github.com/karlhthompson/nasdaq-itch-vwap"""
# ==============================================================================
# Imports
# ==============================================================================
import pandas as pd
import struct
import gzip
import csv
# function to parse select messages in ITCH data:
def parse_itch_data(itch_data):
# read the first byte of each message in the data file:
msg_header = itch_data.read(1)
# initialize the csv file that will store parsed Add Order and Add Order
# with MPID messages:
add_order_data = open('add_order_data.csv','w')
add_order_wrtr = csv.writer(add_order_data)
# initialize the csv file that will store parsed Order Executed messages:
ord_exec_data = open('ord_exec_data.csv','w')
ord_exec_wrtr = csv.writer(ord_exec_data)
# initialize the csv file that will store parsed Order Executed With Price
# messages:
ord_exec_pr_data = open('ord_exec_pr_data.csv','w')
ord_exec_pr_wrtr = csv.writer(ord_exec_pr_data)
# initialize the csv file that will store parsed Trade messages:
trade_data = open('trade_data.csv','w')
trade_wrtr = csv.writer(trade_data)
# iterate over all messages in the data file:
while msg_header:
# process Add Order and Add Order with MPID messages:
if msg_header == b'A' or msg_header == b'F':
message = itch_data.read(35)
if len(message) < 35: break
un_pkd = struct.unpack('>4s6sQcI8cI',message)
re_pkd = struct.pack('>s4s2s6sQsI8sI',msg_header,un_pkd[0],
b'\x00\x00',un_pkd[1],un_pkd[2],un_pkd[3],un_pkd[4],
b''.join(list(un_pkd[5:13])),un_pkd[13])
parsed_ao = list(struct.unpack('>sHHQQsI8sI',re_pkd))
# filter for data with valid Buy/Sell Indicators:
if parsed_ao[5] == b'B' or parsed_ao[5] == b'S':
# further filter for data with plausible field values:
if (parsed_ao[4] < 1e14 and parsed_ao[6] < 1e8):
# write the parsed message to the csv file:
try:
sto = parsed_ao[7].decode() # stock
except:
sto = '0' # Write 0 if stock byte decode fails
tim = parsed_ao[3] # timestamp
ref = parsed_ao[4] # order reference number
sha = parsed_ao[6] # shares
pri = float(parsed_ao[8])/1e4 # price
add_order_wrtr.writerow([sto, tim, ref, sha, pri])
# process Order Executed messages:
if msg_header == b'E':
message = itch_data.read(30)
if len(message) < 30: break
un_pkd = struct.unpack('>4s6sQIQ',message)
re_pkd = struct.pack('>s4s2s6sQIQ',msg_header,un_pkd[0],b'\x00\x00',
un_pkd[1],un_pkd[2],un_pkd[3],un_pkd[4])
parsed_oe = list(struct.unpack('>sHHQQIQ',re_pkd))
# filter for data with plausible field values:
if (parsed_oe[4] < 1e14 and parsed_oe[5] < 1e8 and parsed_oe[6] < 1e11):
# write the parsed message to the csv file:
ref = parsed_oe[4] # order reference number
sha = parsed_oe[5] # shares
ord_exec_wrtr.writerow([ref, sha])
# process Order Executed With Price messages:
if msg_header == b'C':
message = itch_data.read(35)
if len(message) < 35: break
un_pkd = struct.unpack('>4s6sQIQcI',message)
re_pkd = struct.pack('>s4s2s6sQIQsI',msg_header,un_pkd[0],
b'\x00\x00',un_pkd[1],un_pkd[2],un_pkd[3],un_pkd[4],un_pkd[5],
un_pkd[6])
parsed_oewp = list(struct.unpack('>sHHQQIQsI',re_pkd))
# filter for data with plausible field values:
if (parsed_oewp[4] < 1e14 and parsed_oewp[5] < 1e6 and
parsed_oewp[6] < 1e10 and parsed_oewp[7] == b'Y'):
# write the parsed message to the csv file:
ref = parsed_oewp[4] # order reference number
sha = parsed_oewp[5] # shares
pri = float(parsed_oewp[8])/1e4 # new price
ord_exec_pr_wrtr.writerow([ref, sha, pri])
# process Trade messages:
if msg_header == b'P':
message = itch_data.read(43)
if len(message) < 43: break
un_pkd = struct.unpack('>4s6sQcI8cIQ',message)
re_pkd = struct.pack('>s4s2s6sQsI8sIQ',msg_header,un_pkd[0],
b'\x00\x00',un_pkd[1],un_pkd[2],un_pkd[3],un_pkd[4],
b''.join(list(un_pkd[5:13])),un_pkd[13],un_pkd[14])
parsed_t = list(struct.unpack('>sHHQQsI8sIQ',re_pkd))
# filter for data with valid Order Reference Numbers
# and Buy/Sell Indicators:
if parsed_t[4] == 0 and parsed_t[5] == b'B':
# write the parsed message to the csv file:
sto = parsed_t[7].decode() # stock
tim = parsed_t[3] # timestamp
sha = parsed_t[6] # shares
pri = float(parsed_t[8])/1e4 # price
pro = parsed_t[6]*float(parsed_t[8])/1e4 # product
trade_wrtr.writerow([sto, tim, sha, pri, pro])
# advance the file position to the next message:
msg_header = itch_data.read(1)
# close the csv files:
add_order_data.close()
ord_exec_data.close()
ord_exec_pr_data.close()
trade_data.close()
# function to calculate the hourly VWAP based on parsed ITCH data:
def calculate_vwap():
# import the parsed Add Order data into a Pandas dataframe:
add_order_df = pd.read_csv('add_order_data.csv', index_col = None,
names = ['Stock', 'Timestamp', 'Reference', 'Shares', 'Price'])
# import the parsed Order Executed data into a Pandas dataframe:
ord_exec_df = pd.read_csv('ord_exec_data.csv', index_col = None,
names = ['Reference', 'Shares'])
# import the parsed Order Executed With Price data into a Pandas dataframe:
ord_exec_pr_df = pd.read_csv('ord_exec_pr_data.csv', index_col = None,
names = ['Reference', 'Shares', 'Price'])
# import the parsed Trade data into a Pandas dataframe:
trade_1_df = pd.read_csv('trade_data.csv', index_col = 0,
names=['Stock', 'Timestamp', 'Shares', 'Price', 'Product'])
# merge the Order Executed data with the Add Order data to extract
# the executed trades data within:
trade_2_df = ord_exec_df.merge(add_order_df,on=['Reference'],how='inner')
trade_2_df = trade_2_df[trade_2_df['Stock']!='0']
trade_2_df = trade_2_df[['Stock', 'Timestamp', 'Shares_x', 'Price']].set_index('Stock')
trade_2_df = trade_2_df.rename(columns={"Shares_x": "Shares"})
trade_2_df['Product'] = trade_2_df['Price']*trade_2_df['Shares']
# merge the Order Executed With Price data with the Add Order data
# to extract the executed trades data within:
trade_3_df = ord_exec_pr_df.merge(add_order_df,on=['Reference'],how='inner')
trade_3_df = trade_3_df[trade_3_df['Stock']!='0']
trade_3_df = trade_3_df[['Stock', 'Timestamp', 'Shares_x', 'Price_x']].set_index('Stock')
trade_3_df = trade_3_df.rename(columns={"Shares_x": "Shares", "Price_x": "Price"})
trade_3_df['Product'] = trade_3_df['Price']*trade_3_df['Shares']
# concatenate all three trade dataframes (trades from Trade messages,
# trades from Executed Order messages, and trades from Executed Order
# With Price messages) into a comprehensive dataframe:
trade_df = pd.concat([trade_1_df, trade_2_df, trade_3_df])
# create a dataframe for hourly running VWAP values:
vwap_df = trade_df.groupby(['Stock']).all().drop(
columns=['Timestamp', 'Shares', 'Price', 'Product'])
# create a list of trading hours in nanoseconds:
hour_list = [3.6e12 * i for i in [9.5, 10, 11, 12, 13, 14, 15, 16]]
# iterate over the trading hours list:
for hour in hour_list:
# extract data for trades that occurred before the specified hour:
trade_df_copy = trade_df[trade_df.Timestamp <= hour]
# group the trade dataframe by stock:
trade_df_groups = trade_df_copy.groupby(['Stock'])
# calculate the mean for all trade data:
trade_df_mean = trade_df_groups.mean(numeric_only=False)
# calculate the VWAP for all stocks:
trade_df_mean['VWAP'] = trade_df_mean['Product']/trade_df_mean['Shares']
# merge the calculated VWAP fields into the VWAP dataframe:
vwap_df = pd.merge(vwap_df,trade_df_mean['VWAP'],on=['Stock'],how='left')
# adjust the column names in the VWAP dataframe:
vwap_df.columns = ['VWAP at 09:30AM','VWAP at 10:00AM','VWAP at 11:00AM',
'VWAP at 12:00PM','VWAP at 01:00PM','VWAP at 02:00PM',
'VWAP at 03:00PM', 'VWAP at 04:00PM']
# save the hourly VWAP table in Excel format:
vwap_df.to_excel("NASDAQ_VWAP_01_30_2019.xlsx")
if __name__ == '__main__':
# open the ITCH data file:
itch_data = gzip.open('01302019.NASDAQ_ITCH50.gz','rb')
# parse the data:
parse_itch_data(itch_data)
# close the ITCH data file:
itch_data.close()
# calculate the hourly VWAP for all stocks:
calculate_vwap()
|
normal
|
{
"blob_id": "806124926008078e592141d80d08ccfbb3046dbf",
"index": 7092,
"step-1": "<mask token>\n\n\ndef calculate_vwap():\n add_order_df = pd.read_csv('add_order_data.csv', index_col=None, names=\n ['Stock', 'Timestamp', 'Reference', 'Shares', 'Price'])\n ord_exec_df = pd.read_csv('ord_exec_data.csv', index_col=None, names=[\n 'Reference', 'Shares'])\n ord_exec_pr_df = pd.read_csv('ord_exec_pr_data.csv', index_col=None,\n names=['Reference', 'Shares', 'Price'])\n trade_1_df = pd.read_csv('trade_data.csv', index_col=0, names=['Stock',\n 'Timestamp', 'Shares', 'Price', 'Product'])\n trade_2_df = ord_exec_df.merge(add_order_df, on=['Reference'], how='inner')\n trade_2_df = trade_2_df[trade_2_df['Stock'] != '0']\n trade_2_df = trade_2_df[['Stock', 'Timestamp', 'Shares_x', 'Price']\n ].set_index('Stock')\n trade_2_df = trade_2_df.rename(columns={'Shares_x': 'Shares'})\n trade_2_df['Product'] = trade_2_df['Price'] * trade_2_df['Shares']\n trade_3_df = ord_exec_pr_df.merge(add_order_df, on=['Reference'], how=\n 'inner')\n trade_3_df = trade_3_df[trade_3_df['Stock'] != '0']\n trade_3_df = trade_3_df[['Stock', 'Timestamp', 'Shares_x', 'Price_x']\n ].set_index('Stock')\n trade_3_df = trade_3_df.rename(columns={'Shares_x': 'Shares', 'Price_x':\n 'Price'})\n trade_3_df['Product'] = trade_3_df['Price'] * trade_3_df['Shares']\n trade_df = pd.concat([trade_1_df, trade_2_df, trade_3_df])\n vwap_df = trade_df.groupby(['Stock']).all().drop(columns=['Timestamp',\n 'Shares', 'Price', 'Product'])\n hour_list = [(3600000000000.0 * i) for i in [9.5, 10, 11, 12, 13, 14, \n 15, 16]]\n for hour in hour_list:\n trade_df_copy = trade_df[trade_df.Timestamp <= hour]\n trade_df_groups = trade_df_copy.groupby(['Stock'])\n trade_df_mean = trade_df_groups.mean(numeric_only=False)\n trade_df_mean['VWAP'] = trade_df_mean['Product'] / trade_df_mean[\n 'Shares']\n vwap_df = pd.merge(vwap_df, trade_df_mean['VWAP'], on=['Stock'],\n how='left')\n vwap_df.columns = ['VWAP at 09:30AM', 'VWAP at 10:00AM',\n 'VWAP at 11:00AM', 'VWAP at 12:00PM', 'VWAP at 01:00PM',\n 'VWAP at 02:00PM', 'VWAP at 03:00PM', 'VWAP at 04:00PM']\n vwap_df.to_excel('NASDAQ_VWAP_01_30_2019.xlsx')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef parse_itch_data(itch_data):\n msg_header = itch_data.read(1)\n add_order_data = open('add_order_data.csv', 'w')\n add_order_wrtr = csv.writer(add_order_data)\n ord_exec_data = open('ord_exec_data.csv', 'w')\n ord_exec_wrtr = csv.writer(ord_exec_data)\n ord_exec_pr_data = open('ord_exec_pr_data.csv', 'w')\n ord_exec_pr_wrtr = csv.writer(ord_exec_pr_data)\n trade_data = open('trade_data.csv', 'w')\n trade_wrtr = csv.writer(trade_data)\n while msg_header:\n if msg_header == b'A' or msg_header == b'F':\n message = itch_data.read(35)\n if len(message) < 35:\n break\n un_pkd = struct.unpack('>4s6sQcI8cI', message)\n re_pkd = struct.pack('>s4s2s6sQsI8sI', msg_header, un_pkd[0],\n b'\\x00\\x00', un_pkd[1], un_pkd[2], un_pkd[3], un_pkd[4],\n b''.join(list(un_pkd[5:13])), un_pkd[13])\n parsed_ao = list(struct.unpack('>sHHQQsI8sI', re_pkd))\n if parsed_ao[5] == b'B' or parsed_ao[5] == b'S':\n if parsed_ao[4] < 100000000000000.0 and parsed_ao[6\n ] < 100000000.0:\n try:\n sto = parsed_ao[7].decode()\n except:\n sto = '0'\n tim = parsed_ao[3]\n ref = parsed_ao[4]\n sha = parsed_ao[6]\n pri = float(parsed_ao[8]) / 10000.0\n add_order_wrtr.writerow([sto, tim, ref, sha, pri])\n if msg_header == b'E':\n message = itch_data.read(30)\n if len(message) < 30:\n break\n un_pkd = struct.unpack('>4s6sQIQ', message)\n re_pkd = struct.pack('>s4s2s6sQIQ', msg_header, un_pkd[0],\n b'\\x00\\x00', un_pkd[1], un_pkd[2], un_pkd[3], un_pkd[4])\n parsed_oe = list(struct.unpack('>sHHQQIQ', re_pkd))\n if parsed_oe[4] < 100000000000000.0 and parsed_oe[5\n ] < 100000000.0 and parsed_oe[6] < 100000000000.0:\n ref = parsed_oe[4]\n sha = parsed_oe[5]\n ord_exec_wrtr.writerow([ref, sha])\n if msg_header == b'C':\n message = itch_data.read(35)\n if len(message) < 35:\n break\n un_pkd = struct.unpack('>4s6sQIQcI', message)\n re_pkd = struct.pack('>s4s2s6sQIQsI', msg_header, un_pkd[0],\n b'\\x00\\x00', un_pkd[1], un_pkd[2], un_pkd[3], un_pkd[4],\n un_pkd[5], un_pkd[6])\n parsed_oewp = list(struct.unpack('>sHHQQIQsI', re_pkd))\n if parsed_oewp[4] < 100000000000000.0 and parsed_oewp[5\n ] < 1000000.0 and parsed_oewp[6\n ] < 10000000000.0 and parsed_oewp[7] == b'Y':\n ref = parsed_oewp[4]\n sha = parsed_oewp[5]\n pri = float(parsed_oewp[8]) / 10000.0\n ord_exec_pr_wrtr.writerow([ref, sha, pri])\n if msg_header == b'P':\n message = itch_data.read(43)\n if len(message) < 43:\n break\n un_pkd = struct.unpack('>4s6sQcI8cIQ', message)\n re_pkd = struct.pack('>s4s2s6sQsI8sIQ', msg_header, un_pkd[0],\n b'\\x00\\x00', un_pkd[1], un_pkd[2], un_pkd[3], un_pkd[4],\n b''.join(list(un_pkd[5:13])), un_pkd[13], un_pkd[14])\n parsed_t = list(struct.unpack('>sHHQQsI8sIQ', re_pkd))\n if parsed_t[4] == 0 and parsed_t[5] == b'B':\n sto = parsed_t[7].decode()\n tim = parsed_t[3]\n sha = parsed_t[6]\n pri = float(parsed_t[8]) / 10000.0\n pro = parsed_t[6] * float(parsed_t[8]) / 10000.0\n trade_wrtr.writerow([sto, tim, sha, pri, pro])\n msg_header = itch_data.read(1)\n add_order_data.close()\n ord_exec_data.close()\n ord_exec_pr_data.close()\n trade_data.close()\n\n\ndef calculate_vwap():\n add_order_df = pd.read_csv('add_order_data.csv', index_col=None, names=\n ['Stock', 'Timestamp', 'Reference', 'Shares', 'Price'])\n ord_exec_df = pd.read_csv('ord_exec_data.csv', index_col=None, names=[\n 'Reference', 'Shares'])\n ord_exec_pr_df = pd.read_csv('ord_exec_pr_data.csv', index_col=None,\n names=['Reference', 'Shares', 'Price'])\n trade_1_df = pd.read_csv('trade_data.csv', index_col=0, names=['Stock',\n 'Timestamp', 'Shares', 'Price', 'Product'])\n trade_2_df = ord_exec_df.merge(add_order_df, on=['Reference'], how='inner')\n trade_2_df = trade_2_df[trade_2_df['Stock'] != '0']\n trade_2_df = trade_2_df[['Stock', 'Timestamp', 'Shares_x', 'Price']\n ].set_index('Stock')\n trade_2_df = trade_2_df.rename(columns={'Shares_x': 'Shares'})\n trade_2_df['Product'] = trade_2_df['Price'] * trade_2_df['Shares']\n trade_3_df = ord_exec_pr_df.merge(add_order_df, on=['Reference'], how=\n 'inner')\n trade_3_df = trade_3_df[trade_3_df['Stock'] != '0']\n trade_3_df = trade_3_df[['Stock', 'Timestamp', 'Shares_x', 'Price_x']\n ].set_index('Stock')\n trade_3_df = trade_3_df.rename(columns={'Shares_x': 'Shares', 'Price_x':\n 'Price'})\n trade_3_df['Product'] = trade_3_df['Price'] * trade_3_df['Shares']\n trade_df = pd.concat([trade_1_df, trade_2_df, trade_3_df])\n vwap_df = trade_df.groupby(['Stock']).all().drop(columns=['Timestamp',\n 'Shares', 'Price', 'Product'])\n hour_list = [(3600000000000.0 * i) for i in [9.5, 10, 11, 12, 13, 14, \n 15, 16]]\n for hour in hour_list:\n trade_df_copy = trade_df[trade_df.Timestamp <= hour]\n trade_df_groups = trade_df_copy.groupby(['Stock'])\n trade_df_mean = trade_df_groups.mean(numeric_only=False)\n trade_df_mean['VWAP'] = trade_df_mean['Product'] / trade_df_mean[\n 'Shares']\n vwap_df = pd.merge(vwap_df, trade_df_mean['VWAP'], on=['Stock'],\n how='left')\n vwap_df.columns = ['VWAP at 09:30AM', 'VWAP at 10:00AM',\n 'VWAP at 11:00AM', 'VWAP at 12:00PM', 'VWAP at 01:00PM',\n 'VWAP at 02:00PM', 'VWAP at 03:00PM', 'VWAP at 04:00PM']\n vwap_df.to_excel('NASDAQ_VWAP_01_30_2019.xlsx')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef parse_itch_data(itch_data):\n msg_header = itch_data.read(1)\n add_order_data = open('add_order_data.csv', 'w')\n add_order_wrtr = csv.writer(add_order_data)\n ord_exec_data = open('ord_exec_data.csv', 'w')\n ord_exec_wrtr = csv.writer(ord_exec_data)\n ord_exec_pr_data = open('ord_exec_pr_data.csv', 'w')\n ord_exec_pr_wrtr = csv.writer(ord_exec_pr_data)\n trade_data = open('trade_data.csv', 'w')\n trade_wrtr = csv.writer(trade_data)\n while msg_header:\n if msg_header == b'A' or msg_header == b'F':\n message = itch_data.read(35)\n if len(message) < 35:\n break\n un_pkd = struct.unpack('>4s6sQcI8cI', message)\n re_pkd = struct.pack('>s4s2s6sQsI8sI', msg_header, un_pkd[0],\n b'\\x00\\x00', un_pkd[1], un_pkd[2], un_pkd[3], un_pkd[4],\n b''.join(list(un_pkd[5:13])), un_pkd[13])\n parsed_ao = list(struct.unpack('>sHHQQsI8sI', re_pkd))\n if parsed_ao[5] == b'B' or parsed_ao[5] == b'S':\n if parsed_ao[4] < 100000000000000.0 and parsed_ao[6\n ] < 100000000.0:\n try:\n sto = parsed_ao[7].decode()\n except:\n sto = '0'\n tim = parsed_ao[3]\n ref = parsed_ao[4]\n sha = parsed_ao[6]\n pri = float(parsed_ao[8]) / 10000.0\n add_order_wrtr.writerow([sto, tim, ref, sha, pri])\n if msg_header == b'E':\n message = itch_data.read(30)\n if len(message) < 30:\n break\n un_pkd = struct.unpack('>4s6sQIQ', message)\n re_pkd = struct.pack('>s4s2s6sQIQ', msg_header, un_pkd[0],\n b'\\x00\\x00', un_pkd[1], un_pkd[2], un_pkd[3], un_pkd[4])\n parsed_oe = list(struct.unpack('>sHHQQIQ', re_pkd))\n if parsed_oe[4] < 100000000000000.0 and parsed_oe[5\n ] < 100000000.0 and parsed_oe[6] < 100000000000.0:\n ref = parsed_oe[4]\n sha = parsed_oe[5]\n ord_exec_wrtr.writerow([ref, sha])\n if msg_header == b'C':\n message = itch_data.read(35)\n if len(message) < 35:\n break\n un_pkd = struct.unpack('>4s6sQIQcI', message)\n re_pkd = struct.pack('>s4s2s6sQIQsI', msg_header, un_pkd[0],\n b'\\x00\\x00', un_pkd[1], un_pkd[2], un_pkd[3], un_pkd[4],\n un_pkd[5], un_pkd[6])\n parsed_oewp = list(struct.unpack('>sHHQQIQsI', re_pkd))\n if parsed_oewp[4] < 100000000000000.0 and parsed_oewp[5\n ] < 1000000.0 and parsed_oewp[6\n ] < 10000000000.0 and parsed_oewp[7] == b'Y':\n ref = parsed_oewp[4]\n sha = parsed_oewp[5]\n pri = float(parsed_oewp[8]) / 10000.0\n ord_exec_pr_wrtr.writerow([ref, sha, pri])\n if msg_header == b'P':\n message = itch_data.read(43)\n if len(message) < 43:\n break\n un_pkd = struct.unpack('>4s6sQcI8cIQ', message)\n re_pkd = struct.pack('>s4s2s6sQsI8sIQ', msg_header, un_pkd[0],\n b'\\x00\\x00', un_pkd[1], un_pkd[2], un_pkd[3], un_pkd[4],\n b''.join(list(un_pkd[5:13])), un_pkd[13], un_pkd[14])\n parsed_t = list(struct.unpack('>sHHQQsI8sIQ', re_pkd))\n if parsed_t[4] == 0 and parsed_t[5] == b'B':\n sto = parsed_t[7].decode()\n tim = parsed_t[3]\n sha = parsed_t[6]\n pri = float(parsed_t[8]) / 10000.0\n pro = parsed_t[6] * float(parsed_t[8]) / 10000.0\n trade_wrtr.writerow([sto, tim, sha, pri, pro])\n msg_header = itch_data.read(1)\n add_order_data.close()\n ord_exec_data.close()\n ord_exec_pr_data.close()\n trade_data.close()\n\n\ndef calculate_vwap():\n add_order_df = pd.read_csv('add_order_data.csv', index_col=None, names=\n ['Stock', 'Timestamp', 'Reference', 'Shares', 'Price'])\n ord_exec_df = pd.read_csv('ord_exec_data.csv', index_col=None, names=[\n 'Reference', 'Shares'])\n ord_exec_pr_df = pd.read_csv('ord_exec_pr_data.csv', index_col=None,\n names=['Reference', 'Shares', 'Price'])\n trade_1_df = pd.read_csv('trade_data.csv', index_col=0, names=['Stock',\n 'Timestamp', 'Shares', 'Price', 'Product'])\n trade_2_df = ord_exec_df.merge(add_order_df, on=['Reference'], how='inner')\n trade_2_df = trade_2_df[trade_2_df['Stock'] != '0']\n trade_2_df = trade_2_df[['Stock', 'Timestamp', 'Shares_x', 'Price']\n ].set_index('Stock')\n trade_2_df = trade_2_df.rename(columns={'Shares_x': 'Shares'})\n trade_2_df['Product'] = trade_2_df['Price'] * trade_2_df['Shares']\n trade_3_df = ord_exec_pr_df.merge(add_order_df, on=['Reference'], how=\n 'inner')\n trade_3_df = trade_3_df[trade_3_df['Stock'] != '0']\n trade_3_df = trade_3_df[['Stock', 'Timestamp', 'Shares_x', 'Price_x']\n ].set_index('Stock')\n trade_3_df = trade_3_df.rename(columns={'Shares_x': 'Shares', 'Price_x':\n 'Price'})\n trade_3_df['Product'] = trade_3_df['Price'] * trade_3_df['Shares']\n trade_df = pd.concat([trade_1_df, trade_2_df, trade_3_df])\n vwap_df = trade_df.groupby(['Stock']).all().drop(columns=['Timestamp',\n 'Shares', 'Price', 'Product'])\n hour_list = [(3600000000000.0 * i) for i in [9.5, 10, 11, 12, 13, 14, \n 15, 16]]\n for hour in hour_list:\n trade_df_copy = trade_df[trade_df.Timestamp <= hour]\n trade_df_groups = trade_df_copy.groupby(['Stock'])\n trade_df_mean = trade_df_groups.mean(numeric_only=False)\n trade_df_mean['VWAP'] = trade_df_mean['Product'] / trade_df_mean[\n 'Shares']\n vwap_df = pd.merge(vwap_df, trade_df_mean['VWAP'], on=['Stock'],\n how='left')\n vwap_df.columns = ['VWAP at 09:30AM', 'VWAP at 10:00AM',\n 'VWAP at 11:00AM', 'VWAP at 12:00PM', 'VWAP at 01:00PM',\n 'VWAP at 02:00PM', 'VWAP at 03:00PM', 'VWAP at 04:00PM']\n vwap_df.to_excel('NASDAQ_VWAP_01_30_2019.xlsx')\n\n\nif __name__ == '__main__':\n itch_data = gzip.open('01302019.NASDAQ_ITCH50.gz', 'rb')\n parse_itch_data(itch_data)\n itch_data.close()\n calculate_vwap()\n",
"step-4": "<mask token>\nimport pandas as pd\nimport struct\nimport gzip\nimport csv\n\n\ndef parse_itch_data(itch_data):\n msg_header = itch_data.read(1)\n add_order_data = open('add_order_data.csv', 'w')\n add_order_wrtr = csv.writer(add_order_data)\n ord_exec_data = open('ord_exec_data.csv', 'w')\n ord_exec_wrtr = csv.writer(ord_exec_data)\n ord_exec_pr_data = open('ord_exec_pr_data.csv', 'w')\n ord_exec_pr_wrtr = csv.writer(ord_exec_pr_data)\n trade_data = open('trade_data.csv', 'w')\n trade_wrtr = csv.writer(trade_data)\n while msg_header:\n if msg_header == b'A' or msg_header == b'F':\n message = itch_data.read(35)\n if len(message) < 35:\n break\n un_pkd = struct.unpack('>4s6sQcI8cI', message)\n re_pkd = struct.pack('>s4s2s6sQsI8sI', msg_header, un_pkd[0],\n b'\\x00\\x00', un_pkd[1], un_pkd[2], un_pkd[3], un_pkd[4],\n b''.join(list(un_pkd[5:13])), un_pkd[13])\n parsed_ao = list(struct.unpack('>sHHQQsI8sI', re_pkd))\n if parsed_ao[5] == b'B' or parsed_ao[5] == b'S':\n if parsed_ao[4] < 100000000000000.0 and parsed_ao[6\n ] < 100000000.0:\n try:\n sto = parsed_ao[7].decode()\n except:\n sto = '0'\n tim = parsed_ao[3]\n ref = parsed_ao[4]\n sha = parsed_ao[6]\n pri = float(parsed_ao[8]) / 10000.0\n add_order_wrtr.writerow([sto, tim, ref, sha, pri])\n if msg_header == b'E':\n message = itch_data.read(30)\n if len(message) < 30:\n break\n un_pkd = struct.unpack('>4s6sQIQ', message)\n re_pkd = struct.pack('>s4s2s6sQIQ', msg_header, un_pkd[0],\n b'\\x00\\x00', un_pkd[1], un_pkd[2], un_pkd[3], un_pkd[4])\n parsed_oe = list(struct.unpack('>sHHQQIQ', re_pkd))\n if parsed_oe[4] < 100000000000000.0 and parsed_oe[5\n ] < 100000000.0 and parsed_oe[6] < 100000000000.0:\n ref = parsed_oe[4]\n sha = parsed_oe[5]\n ord_exec_wrtr.writerow([ref, sha])\n if msg_header == b'C':\n message = itch_data.read(35)\n if len(message) < 35:\n break\n un_pkd = struct.unpack('>4s6sQIQcI', message)\n re_pkd = struct.pack('>s4s2s6sQIQsI', msg_header, un_pkd[0],\n b'\\x00\\x00', un_pkd[1], un_pkd[2], un_pkd[3], un_pkd[4],\n un_pkd[5], un_pkd[6])\n parsed_oewp = list(struct.unpack('>sHHQQIQsI', re_pkd))\n if parsed_oewp[4] < 100000000000000.0 and parsed_oewp[5\n ] < 1000000.0 and parsed_oewp[6\n ] < 10000000000.0 and parsed_oewp[7] == b'Y':\n ref = parsed_oewp[4]\n sha = parsed_oewp[5]\n pri = float(parsed_oewp[8]) / 10000.0\n ord_exec_pr_wrtr.writerow([ref, sha, pri])\n if msg_header == b'P':\n message = itch_data.read(43)\n if len(message) < 43:\n break\n un_pkd = struct.unpack('>4s6sQcI8cIQ', message)\n re_pkd = struct.pack('>s4s2s6sQsI8sIQ', msg_header, un_pkd[0],\n b'\\x00\\x00', un_pkd[1], un_pkd[2], un_pkd[3], un_pkd[4],\n b''.join(list(un_pkd[5:13])), un_pkd[13], un_pkd[14])\n parsed_t = list(struct.unpack('>sHHQQsI8sIQ', re_pkd))\n if parsed_t[4] == 0 and parsed_t[5] == b'B':\n sto = parsed_t[7].decode()\n tim = parsed_t[3]\n sha = parsed_t[6]\n pri = float(parsed_t[8]) / 10000.0\n pro = parsed_t[6] * float(parsed_t[8]) / 10000.0\n trade_wrtr.writerow([sto, tim, sha, pri, pro])\n msg_header = itch_data.read(1)\n add_order_data.close()\n ord_exec_data.close()\n ord_exec_pr_data.close()\n trade_data.close()\n\n\ndef calculate_vwap():\n add_order_df = pd.read_csv('add_order_data.csv', index_col=None, names=\n ['Stock', 'Timestamp', 'Reference', 'Shares', 'Price'])\n ord_exec_df = pd.read_csv('ord_exec_data.csv', index_col=None, names=[\n 'Reference', 'Shares'])\n ord_exec_pr_df = pd.read_csv('ord_exec_pr_data.csv', index_col=None,\n names=['Reference', 'Shares', 'Price'])\n trade_1_df = pd.read_csv('trade_data.csv', index_col=0, names=['Stock',\n 'Timestamp', 'Shares', 'Price', 'Product'])\n trade_2_df = ord_exec_df.merge(add_order_df, on=['Reference'], how='inner')\n trade_2_df = trade_2_df[trade_2_df['Stock'] != '0']\n trade_2_df = trade_2_df[['Stock', 'Timestamp', 'Shares_x', 'Price']\n ].set_index('Stock')\n trade_2_df = trade_2_df.rename(columns={'Shares_x': 'Shares'})\n trade_2_df['Product'] = trade_2_df['Price'] * trade_2_df['Shares']\n trade_3_df = ord_exec_pr_df.merge(add_order_df, on=['Reference'], how=\n 'inner')\n trade_3_df = trade_3_df[trade_3_df['Stock'] != '0']\n trade_3_df = trade_3_df[['Stock', 'Timestamp', 'Shares_x', 'Price_x']\n ].set_index('Stock')\n trade_3_df = trade_3_df.rename(columns={'Shares_x': 'Shares', 'Price_x':\n 'Price'})\n trade_3_df['Product'] = trade_3_df['Price'] * trade_3_df['Shares']\n trade_df = pd.concat([trade_1_df, trade_2_df, trade_3_df])\n vwap_df = trade_df.groupby(['Stock']).all().drop(columns=['Timestamp',\n 'Shares', 'Price', 'Product'])\n hour_list = [(3600000000000.0 * i) for i in [9.5, 10, 11, 12, 13, 14, \n 15, 16]]\n for hour in hour_list:\n trade_df_copy = trade_df[trade_df.Timestamp <= hour]\n trade_df_groups = trade_df_copy.groupby(['Stock'])\n trade_df_mean = trade_df_groups.mean(numeric_only=False)\n trade_df_mean['VWAP'] = trade_df_mean['Product'] / trade_df_mean[\n 'Shares']\n vwap_df = pd.merge(vwap_df, trade_df_mean['VWAP'], on=['Stock'],\n how='left')\n vwap_df.columns = ['VWAP at 09:30AM', 'VWAP at 10:00AM',\n 'VWAP at 11:00AM', 'VWAP at 12:00PM', 'VWAP at 01:00PM',\n 'VWAP at 02:00PM', 'VWAP at 03:00PM', 'VWAP at 04:00PM']\n vwap_df.to_excel('NASDAQ_VWAP_01_30_2019.xlsx')\n\n\nif __name__ == '__main__':\n itch_data = gzip.open('01302019.NASDAQ_ITCH50.gz', 'rb')\n parse_itch_data(itch_data)\n itch_data.close()\n calculate_vwap()\n",
"step-5": "#!/usr/bin/env python3\r\n# -*- coding: utf-8 -*-\r\n# ==============================================================================\r\n# Created By : Karl Thompson\r\n# Created Date: Mon March 25 17:34:00 CDT 2019\r\n# ==============================================================================\r\n\"\"\"nasdaq_itch_vwap - Generate a table of running volume-weighted average price\r\n[VWAP] for NASDAQ stocks at trading hours based on Nasdaq TotalView-ITCH 5 data.\r\nData available at: ftp://emi.nasdaq.com/ITCH/01302019.NASDAQ_ITCH50.gz\r\nIf you use this code in your work, please cite the following:\r\nKarl H. Thompson, NASDAQ-ITCH-VWAP, (2019), GitHub repository, \r\nhttps://github.com/karlhthompson/nasdaq-itch-vwap\"\"\"\r\n# ==============================================================================\r\n# Imports\r\n# ==============================================================================\r\nimport pandas as pd\r\nimport struct\r\nimport gzip\r\nimport csv\r\n\r\n# function to parse select messages in ITCH data:\r\ndef parse_itch_data(itch_data):\r\n\r\n # read the first byte of each message in the data file:\r\n msg_header = itch_data.read(1)\r\n\r\n # initialize the csv file that will store parsed Add Order and Add Order \r\n # with MPID messages:\r\n add_order_data = open('add_order_data.csv','w')\r\n add_order_wrtr = csv.writer(add_order_data)\r\n\r\n # initialize the csv file that will store parsed Order Executed messages:\r\n ord_exec_data = open('ord_exec_data.csv','w')\r\n ord_exec_wrtr = csv.writer(ord_exec_data)\r\n\r\n # initialize the csv file that will store parsed Order Executed With Price \r\n # messages:\r\n ord_exec_pr_data = open('ord_exec_pr_data.csv','w')\r\n ord_exec_pr_wrtr = csv.writer(ord_exec_pr_data)\r\n\r\n # initialize the csv file that will store parsed Trade messages:\r\n trade_data = open('trade_data.csv','w')\r\n trade_wrtr = csv.writer(trade_data)\r\n\r\n # iterate over all messages in the data file:\r\n while msg_header:\r\n\r\n # process Add Order and Add Order with MPID messages:\r\n if msg_header == b'A' or msg_header == b'F':\r\n message = itch_data.read(35)\r\n if len(message) < 35: break\r\n un_pkd = struct.unpack('>4s6sQcI8cI',message)\r\n re_pkd = struct.pack('>s4s2s6sQsI8sI',msg_header,un_pkd[0],\r\n b'\\x00\\x00',un_pkd[1],un_pkd[2],un_pkd[3],un_pkd[4],\r\n b''.join(list(un_pkd[5:13])),un_pkd[13])\r\n parsed_ao = list(struct.unpack('>sHHQQsI8sI',re_pkd))\r\n # filter for data with valid Buy/Sell Indicators:\r\n if parsed_ao[5] == b'B' or parsed_ao[5] == b'S':\r\n # further filter for data with plausible field values:\r\n if (parsed_ao[4] < 1e14 and parsed_ao[6] < 1e8):\r\n # write the parsed message to the csv file:\r\n try:\r\n sto = parsed_ao[7].decode() # stock\r\n except:\r\n sto = '0' # Write 0 if stock byte decode fails\r\n tim = parsed_ao[3] # timestamp\r\n ref = parsed_ao[4] # order reference number\r\n sha = parsed_ao[6] # shares\r\n pri = float(parsed_ao[8])/1e4 # price\r\n add_order_wrtr.writerow([sto, tim, ref, sha, pri])\r\n\r\n # process Order Executed messages:\r\n if msg_header == b'E':\r\n message = itch_data.read(30)\r\n if len(message) < 30: break\r\n un_pkd = struct.unpack('>4s6sQIQ',message)\r\n re_pkd = struct.pack('>s4s2s6sQIQ',msg_header,un_pkd[0],b'\\x00\\x00',\r\n un_pkd[1],un_pkd[2],un_pkd[3],un_pkd[4])\r\n parsed_oe = list(struct.unpack('>sHHQQIQ',re_pkd))\r\n # filter for data with plausible field values:\r\n if (parsed_oe[4] < 1e14 and parsed_oe[5] < 1e8 and parsed_oe[6] < 1e11):\r\n # write the parsed message to the csv file:\r\n ref = parsed_oe[4] # order reference number\r\n sha = parsed_oe[5] # shares\r\n ord_exec_wrtr.writerow([ref, sha])\r\n\r\n # process Order Executed With Price messages:\r\n if msg_header == b'C':\r\n message = itch_data.read(35)\r\n if len(message) < 35: break\r\n un_pkd = struct.unpack('>4s6sQIQcI',message)\r\n re_pkd = struct.pack('>s4s2s6sQIQsI',msg_header,un_pkd[0],\r\n b'\\x00\\x00',un_pkd[1],un_pkd[2],un_pkd[3],un_pkd[4],un_pkd[5],\r\n un_pkd[6])\r\n parsed_oewp = list(struct.unpack('>sHHQQIQsI',re_pkd))\r\n # filter for data with plausible field values:\r\n if (parsed_oewp[4] < 1e14 and parsed_oewp[5] < 1e6 and\r\n parsed_oewp[6] < 1e10 and parsed_oewp[7] == b'Y'):\r\n # write the parsed message to the csv file:\r\n ref = parsed_oewp[4] # order reference number\r\n sha = parsed_oewp[5] # shares\r\n pri = float(parsed_oewp[8])/1e4 # new price\r\n ord_exec_pr_wrtr.writerow([ref, sha, pri])\r\n\r\n # process Trade messages:\r\n if msg_header == b'P':\r\n message = itch_data.read(43)\r\n if len(message) < 43: break\r\n un_pkd = struct.unpack('>4s6sQcI8cIQ',message)\r\n re_pkd = struct.pack('>s4s2s6sQsI8sIQ',msg_header,un_pkd[0],\r\n b'\\x00\\x00',un_pkd[1],un_pkd[2],un_pkd[3],un_pkd[4],\r\n b''.join(list(un_pkd[5:13])),un_pkd[13],un_pkd[14])\r\n parsed_t = list(struct.unpack('>sHHQQsI8sIQ',re_pkd))\r\n # filter for data with valid Order Reference Numbers\r\n # and Buy/Sell Indicators:\r\n if parsed_t[4] == 0 and parsed_t[5] == b'B':\r\n # write the parsed message to the csv file:\r\n sto = parsed_t[7].decode() # stock\r\n tim = parsed_t[3] # timestamp\r\n sha = parsed_t[6] # shares\r\n pri = float(parsed_t[8])/1e4 # price\r\n pro = parsed_t[6]*float(parsed_t[8])/1e4 # product\r\n trade_wrtr.writerow([sto, tim, sha, pri, pro])\r\n\r\n # advance the file position to the next message:\r\n msg_header = itch_data.read(1)\r\n\r\n # close the csv files:\r\n add_order_data.close()\r\n ord_exec_data.close()\r\n ord_exec_pr_data.close()\r\n trade_data.close()\r\n\r\n\r\n# function to calculate the hourly VWAP based on parsed ITCH data:\r\ndef calculate_vwap():\r\n\r\n # import the parsed Add Order data into a Pandas dataframe:\r\n add_order_df = pd.read_csv('add_order_data.csv', index_col = None, \r\n names = ['Stock', 'Timestamp', 'Reference', 'Shares', 'Price'])\r\n\r\n # import the parsed Order Executed data into a Pandas dataframe:\r\n ord_exec_df = pd.read_csv('ord_exec_data.csv', index_col = None, \r\n names = ['Reference', 'Shares'])\r\n\r\n # import the parsed Order Executed With Price data into a Pandas dataframe:\r\n ord_exec_pr_df = pd.read_csv('ord_exec_pr_data.csv', index_col = None,\r\n names = ['Reference', 'Shares', 'Price'])\r\n\r\n # import the parsed Trade data into a Pandas dataframe:\r\n trade_1_df = pd.read_csv('trade_data.csv', index_col = 0,\r\n names=['Stock', 'Timestamp', 'Shares', 'Price', 'Product'])\r\n \r\n # merge the Order Executed data with the Add Order data to extract\r\n # the executed trades data within:\r\n trade_2_df = ord_exec_df.merge(add_order_df,on=['Reference'],how='inner')\r\n trade_2_df = trade_2_df[trade_2_df['Stock']!='0']\r\n trade_2_df = trade_2_df[['Stock', 'Timestamp', 'Shares_x', 'Price']].set_index('Stock')\r\n trade_2_df = trade_2_df.rename(columns={\"Shares_x\": \"Shares\"})\r\n trade_2_df['Product'] = trade_2_df['Price']*trade_2_df['Shares']\r\n\r\n # merge the Order Executed With Price data with the Add Order data\r\n # to extract the executed trades data within:\r\n trade_3_df = ord_exec_pr_df.merge(add_order_df,on=['Reference'],how='inner')\r\n trade_3_df = trade_3_df[trade_3_df['Stock']!='0']\r\n trade_3_df = trade_3_df[['Stock', 'Timestamp', 'Shares_x', 'Price_x']].set_index('Stock')\r\n trade_3_df = trade_3_df.rename(columns={\"Shares_x\": \"Shares\", \"Price_x\": \"Price\"})\r\n trade_3_df['Product'] = trade_3_df['Price']*trade_3_df['Shares']\r\n\r\n # concatenate all three trade dataframes (trades from Trade messages,\r\n # trades from Executed Order messages, and trades from Executed Order\r\n # With Price messages) into a comprehensive dataframe:\r\n trade_df = pd.concat([trade_1_df, trade_2_df, trade_3_df])\r\n\r\n # create a dataframe for hourly running VWAP values:\r\n vwap_df = trade_df.groupby(['Stock']).all().drop(\r\n columns=['Timestamp', 'Shares', 'Price', 'Product'])\r\n\r\n # create a list of trading hours in nanoseconds:\r\n hour_list = [3.6e12 * i for i in [9.5, 10, 11, 12, 13, 14, 15, 16]]\r\n\r\n # iterate over the trading hours list:\r\n for hour in hour_list:\r\n # extract data for trades that occurred before the specified hour:\r\n trade_df_copy = trade_df[trade_df.Timestamp <= hour]\r\n # group the trade dataframe by stock:\r\n trade_df_groups = trade_df_copy.groupby(['Stock'])\r\n # calculate the mean for all trade data:\r\n trade_df_mean = trade_df_groups.mean(numeric_only=False)\r\n # calculate the VWAP for all stocks:\r\n trade_df_mean['VWAP'] = trade_df_mean['Product']/trade_df_mean['Shares']\r\n # merge the calculated VWAP fields into the VWAP dataframe:\r\n vwap_df = pd.merge(vwap_df,trade_df_mean['VWAP'],on=['Stock'],how='left')\r\n\r\n # adjust the column names in the VWAP dataframe:\r\n vwap_df.columns = ['VWAP at 09:30AM','VWAP at 10:00AM','VWAP at 11:00AM',\r\n 'VWAP at 12:00PM','VWAP at 01:00PM','VWAP at 02:00PM',\r\n 'VWAP at 03:00PM', 'VWAP at 04:00PM']\r\n\r\n # save the hourly VWAP table in Excel format:\r\n vwap_df.to_excel(\"NASDAQ_VWAP_01_30_2019.xlsx\")\r\n\r\nif __name__ == '__main__':\r\n \r\n # open the ITCH data file:\r\n itch_data = gzip.open('01302019.NASDAQ_ITCH50.gz','rb')\r\n\r\n # parse the data:\r\n parse_itch_data(itch_data)\r\n\r\n # close the ITCH data file:\r\n itch_data.close()\r\n\r\n # calculate the hourly VWAP for all stocks:\r\n calculate_vwap()\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
with open(filename) as f:
numbers = json.load(f)
print(numbers)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
filename = 'numbers.json'
with open(filename) as f:
numbers = json.load(f)
print(numbers)
<|reserved_special_token_1|>
import json
filename = 'numbers.json'
with open(filename) as f:
numbers = json.load(f)
print(numbers)
<|reserved_special_token_1|>
import json
# numbers=[2,3,5,7,11,13]
filename='numbers.json'
with open(filename) as f:
numbers=json.load(f)
print(numbers)
|
flexible
|
{
"blob_id": "8da775bd87bfeab5e30956e62bcdba6c04e26b27",
"index": 6720,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open(filename) as f:\n numbers = json.load(f)\nprint(numbers)\n",
"step-3": "<mask token>\nfilename = 'numbers.json'\nwith open(filename) as f:\n numbers = json.load(f)\nprint(numbers)\n",
"step-4": "import json\nfilename = 'numbers.json'\nwith open(filename) as f:\n numbers = json.load(f)\nprint(numbers)\n",
"step-5": "import json\n\n# numbers=[2,3,5,7,11,13]\n\nfilename='numbers.json'\n\nwith open(filename) as f:\n numbers=json.load(f)\n\nprint(numbers)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import pytest
import sys
sys.path.insert(0, '..')
from task_05 import task5
def test_mults():
assert task5.mults(3, 5, 10) == 23
assert task5.mults(5, 3, 10) == 23
assert task5.mults(3, 2, 10) == 32
assert task5.mults(7, 8, 50) == 364
|
normal
|
{
"blob_id": "1c8622167240243da05a241e3630f79cdf36d7a8",
"index": 4776,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_mults():\n assert task5.mults(3, 5, 10) == 23\n assert task5.mults(5, 3, 10) == 23\n assert task5.mults(3, 2, 10) == 32\n assert task5.mults(7, 8, 50) == 364\n",
"step-3": "<mask token>\nsys.path.insert(0, '..')\n<mask token>\n\n\ndef test_mults():\n assert task5.mults(3, 5, 10) == 23\n assert task5.mults(5, 3, 10) == 23\n assert task5.mults(3, 2, 10) == 32\n assert task5.mults(7, 8, 50) == 364\n",
"step-4": "import pytest\nimport sys\nsys.path.insert(0, '..')\nfrom task_05 import task5\n\n\ndef test_mults():\n assert task5.mults(3, 5, 10) == 23\n assert task5.mults(5, 3, 10) == 23\n assert task5.mults(3, 2, 10) == 32\n assert task5.mults(7, 8, 50) == 364\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
/home/lidija/anaconda3/lib/python3.6/sre_constants.py
|
normal
|
{
"blob_id": "700b0b12c75fa502da984319016f6f44bc0d52cc",
"index": 5126,
"step-1": "/home/lidija/anaconda3/lib/python3.6/sre_constants.py",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@singlevideos.route('/')
def index():
return render_template('singlevideos/single.html')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
singlevideos = Blueprint('singlevideos', __name__, template_folder='templates')
@singlevideos.route('/')
def index():
return render_template('singlevideos/single.html')
<|reserved_special_token_1|>
from flask import Blueprint, render_template
from bashtube import cache
singlevideos = Blueprint('singlevideos', __name__, template_folder='templates')
@singlevideos.route('/')
def index():
return render_template('singlevideos/single.html')
|
flexible
|
{
"blob_id": "ee10bca1126b20378c4e9cea4d2dc7ed6a2044ab",
"index": 9187,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@singlevideos.route('/')\ndef index():\n return render_template('singlevideos/single.html')\n",
"step-3": "<mask token>\nsinglevideos = Blueprint('singlevideos', __name__, template_folder='templates')\n\n\n@singlevideos.route('/')\ndef index():\n return render_template('singlevideos/single.html')\n",
"step-4": "from flask import Blueprint, render_template\nfrom bashtube import cache\nsinglevideos = Blueprint('singlevideos', __name__, template_folder='templates')\n\n\n@singlevideos.route('/')\ndef index():\n return render_template('singlevideos/single.html')\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
def sum_string(string):
list_chars = [zerone for zerone in string if zerone in ["0", "1"]]
return list_chars
def check_triads(trio, final_str):
list_occur_zero = [i for i in range(len(final_str)) if final_str.startswith(trio + '0', i)]
list_occur_one = [i for i in range(len(final_str)) if final_str.startswith(trio + '1', i)]
return [len(list_occur_zero), len(list_occur_one)]
number_str = ""
list_str = []
list_triads = ['000', '001', '010', '011', '100', '101', '110', '111']
while len(list_str) < 100:
print('Print a random string containing 0 or 1:')
number_str = input()
list_str.extend(sum_string(number_str))
if len(list_str) < 100:
print(f'Current data length is {len(list_str)}, {(100 - len(list_str))} symbols left')
print("\nFinal data string:")
final_st = ''.join(list_str)
print(f"{final_st}\n")
for tri in list_triads:
values = check_triads(tri, final_st)
print(f"{tri}: {values[0]},{values[1]}")
|
normal
|
{
"blob_id": "29304bdbf93b0b1308025db1d35a92346c6dcbe0",
"index": 3799,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef check_triads(trio, final_str):\n list_occur_zero = [i for i in range(len(final_str)) if final_str.\n startswith(trio + '0', i)]\n list_occur_one = [i for i in range(len(final_str)) if final_str.\n startswith(trio + '1', i)]\n return [len(list_occur_zero), len(list_occur_one)]\n\n\n<mask token>\n",
"step-3": "def sum_string(string):\n list_chars = [zerone for zerone in string if zerone in ['0', '1']]\n return list_chars\n\n\ndef check_triads(trio, final_str):\n list_occur_zero = [i for i in range(len(final_str)) if final_str.\n startswith(trio + '0', i)]\n list_occur_one = [i for i in range(len(final_str)) if final_str.\n startswith(trio + '1', i)]\n return [len(list_occur_zero), len(list_occur_one)]\n\n\n<mask token>\n",
"step-4": "def sum_string(string):\n list_chars = [zerone for zerone in string if zerone in ['0', '1']]\n return list_chars\n\n\ndef check_triads(trio, final_str):\n list_occur_zero = [i for i in range(len(final_str)) if final_str.\n startswith(trio + '0', i)]\n list_occur_one = [i for i in range(len(final_str)) if final_str.\n startswith(trio + '1', i)]\n return [len(list_occur_zero), len(list_occur_one)]\n\n\n<mask token>\nwhile len(list_str) < 100:\n print('Print a random string containing 0 or 1:')\n number_str = input()\n list_str.extend(sum_string(number_str))\n if len(list_str) < 100:\n print(\n f'Current data length is {len(list_str)}, {100 - len(list_str)} symbols left'\n )\nprint(\"\"\"\nFinal data string:\"\"\")\n<mask token>\nprint(f'{final_st}\\n')\nfor tri in list_triads:\n values = check_triads(tri, final_st)\n print(f'{tri}: {values[0]},{values[1]}')\n",
"step-5": "def sum_string(string):\n list_chars = [zerone for zerone in string if zerone in [\"0\", \"1\"]]\n return list_chars\n\n\ndef check_triads(trio, final_str):\n list_occur_zero = [i for i in range(len(final_str)) if final_str.startswith(trio + '0', i)]\n list_occur_one = [i for i in range(len(final_str)) if final_str.startswith(trio + '1', i)]\n\n return [len(list_occur_zero), len(list_occur_one)]\n\n\nnumber_str = \"\"\nlist_str = []\nlist_triads = ['000', '001', '010', '011', '100', '101', '110', '111']\n\nwhile len(list_str) < 100:\n print('Print a random string containing 0 or 1:')\n number_str = input()\n list_str.extend(sum_string(number_str))\n\n if len(list_str) < 100:\n print(f'Current data length is {len(list_str)}, {(100 - len(list_str))} symbols left')\n\nprint(\"\\nFinal data string:\")\nfinal_st = ''.join(list_str)\nprint(f\"{final_st}\\n\")\n\nfor tri in list_triads:\n values = check_triads(tri, final_st)\n print(f\"{tri}: {values[0]},{values[1]}\")\n\n\n",
"step-ids": [
0,
1,
2,
3,
5
]
}
|
[
0,
1,
2,
3,
5
] |
# Accepted
def bubble_sort(a_list, n):
num_reverse = 0
for i in range(n):
for j in range(n - i - 1):
# With a for roop (reversed order),
# index starts -1, -2 ,...,
# NOT -0, -1, ...
if a_list[-j - 2] > a_list[-j - 1]:
tmp_elem = a_list[-j - 1]
a_list[-j - 1] = a_list[-j - 2]
a_list[-j - 2] = tmp_elem
num_reverse += 1
return a_list, num_reverse
def main():
# Input
n = int(input())
a_list = list(map(int, input().split()))
# Sort
a_list_reversed, num_reverse = bubble_sort(a_list, n)
# Output
print(" ".join(map(str, a_list_reversed)))
print(num_reverse)
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "fef1273552350bfaf075d90279c9f10a965cae25",
"index": 2939,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n n = int(input())\n a_list = list(map(int, input().split()))\n a_list_reversed, num_reverse = bubble_sort(a_list, n)\n print(' '.join(map(str, a_list_reversed)))\n print(num_reverse)\n\n\n<mask token>\n",
"step-3": "def bubble_sort(a_list, n):\n num_reverse = 0\n for i in range(n):\n for j in range(n - i - 1):\n if a_list[-j - 2] > a_list[-j - 1]:\n tmp_elem = a_list[-j - 1]\n a_list[-j - 1] = a_list[-j - 2]\n a_list[-j - 2] = tmp_elem\n num_reverse += 1\n return a_list, num_reverse\n\n\ndef main():\n n = int(input())\n a_list = list(map(int, input().split()))\n a_list_reversed, num_reverse = bubble_sort(a_list, n)\n print(' '.join(map(str, a_list_reversed)))\n print(num_reverse)\n\n\n<mask token>\n",
"step-4": "def bubble_sort(a_list, n):\n num_reverse = 0\n for i in range(n):\n for j in range(n - i - 1):\n if a_list[-j - 2] > a_list[-j - 1]:\n tmp_elem = a_list[-j - 1]\n a_list[-j - 1] = a_list[-j - 2]\n a_list[-j - 2] = tmp_elem\n num_reverse += 1\n return a_list, num_reverse\n\n\ndef main():\n n = int(input())\n a_list = list(map(int, input().split()))\n a_list_reversed, num_reverse = bubble_sort(a_list, n)\n print(' '.join(map(str, a_list_reversed)))\n print(num_reverse)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "# Accepted\ndef bubble_sort(a_list, n):\n num_reverse = 0\n for i in range(n):\n for j in range(n - i - 1):\n # With a for roop (reversed order), \n # index starts -1, -2 ,...,\n # NOT -0, -1, ...\n if a_list[-j - 2] > a_list[-j - 1]:\n tmp_elem = a_list[-j - 1]\n a_list[-j - 1] = a_list[-j - 2]\n a_list[-j - 2] = tmp_elem\n num_reverse += 1\n return a_list, num_reverse\n\n\ndef main():\n # Input\n n = int(input())\n a_list = list(map(int, input().split()))\n # Sort\n a_list_reversed, num_reverse = bubble_sort(a_list, n)\n # Output\n print(\" \".join(map(str, a_list_reversed)))\n print(num_reverse)\n\n\nif __name__ == '__main__':\n main()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print('D0')
print(D0)
print('D1')
print(D1)
print('D2')
print(D2)
print('D3')
print(D3)
<|reserved_special_token_0|>
print('D2')
print(D2)
<|reserved_special_token_0|>
print('D3')
print(D3)
<|reserved_special_token_0|>
print('*** the final sum result is: ')
print(D0 + D1[2] + D2[1, 1] + D3[1, 1, 1])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
D0 = np.array(2)
D1 = np.array([3, 4, 5, 6, 7])
D2 = np.array([[8, 1, 3], [2, 3, 4], [6, 2, 5]])
D3 = np.array([[[1, 2, 4], [3, 3, 2], [1, 9, 1]], [[6, 8, 7], [9, 1, 0], [8,
2, 3]], [[5, 4, 1], [5, 7, 2], [3, 5, 9]]])
print('D0')
print(D0)
print('D1')
print(D1)
print('D2')
print(D2)
print('D3')
print(D3)
<|reserved_special_token_0|>
D2[0, 0] = 100
print('D2')
print(D2)
D3[1, 0, 1] = 100
D3[1, 2, 0] = 100
print('D3')
print(D3)
<|reserved_special_token_0|>
print('*** the final sum result is: ')
print(D0 + D1[2] + D2[1, 1] + D3[1, 1, 1])
<|reserved_special_token_1|>
import numpy as np
<|reserved_special_token_0|>
D0 = np.array(2)
D1 = np.array([3, 4, 5, 6, 7])
D2 = np.array([[8, 1, 3], [2, 3, 4], [6, 2, 5]])
D3 = np.array([[[1, 2, 4], [3, 3, 2], [1, 9, 1]], [[6, 8, 7], [9, 1, 0], [8,
2, 3]], [[5, 4, 1], [5, 7, 2], [3, 5, 9]]])
print('D0')
print(D0)
print('D1')
print(D1)
print('D2')
print(D2)
print('D3')
print(D3)
<|reserved_special_token_0|>
D2[0, 0] = 100
print('D2')
print(D2)
D3[1, 0, 1] = 100
D3[1, 2, 0] = 100
print('D3')
print(D3)
<|reserved_special_token_0|>
print('*** the final sum result is: ')
print(D0 + D1[2] + D2[1, 1] + D3[1, 1, 1])
<|reserved_special_token_1|>
import numpy as np
'''
1. Create 0-D array, 1-D array, 2-D array, 3-D array with following value
0-D: [2]
1-D: [3, 4, 5, 6, 7]
2-D: [[8, 1, 3], [2, 3, 4], [6, 2, 5]]
3-D: [[[1, 2, 4], [3, 3, 2], [1, 9, 1]], [[6, 8, 7], [9, 1, 0], [8, 2, 3]], [[5, 4, 1], [5, 7, 2], [3, 5, 9]]]
print them
'''
D0 = np.array(2)
D1 = np.array([3, 4, 5, 6, 7])
D2 = np.array([[8, 1, 3], [2, 3, 4], [6, 2, 5]])
D3 = np.array([[[1, 2, 4], [3, 3, 2], [1, 9, 1]], [[6, 8, 7], [9, 1, 0], [8, 2, 3]], [[5, 4, 1], [5, 7, 2], [3, 5, 9]]])
print('D0')
print(D0)
print('D1')
print(D1)
print('D2')
print(D2)
print('D3')
print(D3)
'''
2. Use index to change all value 8 to 100 in 4 arrays
array[index1, index2] = newValue
for example: 2-D array should be changed as : [[100, 1, 3], [2, 3, 4], [6, 2, 5]]
print them
'''
D2[0, 0] = 100
print('D2')
print(D2)
D3[1, 0, 1] = 100
D3[1, 2, 0] = 100
print('D3')
print(D3)
'''
3. Print the sum of all following values
a. the value of 0-D array
b. the middle of 1-D array
c. the center of 2-D array
d. the center of 3-D array ( the center of middle 2-D array )
* The value should be 11
'''
print('*** the final sum result is: ')
print(D0 + D1[2] + D2[1, 1] + D3[1, 1, 1])
|
flexible
|
{
"blob_id": "a868ecb6ea6a5c7a186ddd8fa4fb76d96efeb21d",
"index": 4140,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('D0')\nprint(D0)\nprint('D1')\nprint(D1)\nprint('D2')\nprint(D2)\nprint('D3')\nprint(D3)\n<mask token>\nprint('D2')\nprint(D2)\n<mask token>\nprint('D3')\nprint(D3)\n<mask token>\nprint('*** the final sum result is: ')\nprint(D0 + D1[2] + D2[1, 1] + D3[1, 1, 1])\n",
"step-3": "<mask token>\nD0 = np.array(2)\nD1 = np.array([3, 4, 5, 6, 7])\nD2 = np.array([[8, 1, 3], [2, 3, 4], [6, 2, 5]])\nD3 = np.array([[[1, 2, 4], [3, 3, 2], [1, 9, 1]], [[6, 8, 7], [9, 1, 0], [8,\n 2, 3]], [[5, 4, 1], [5, 7, 2], [3, 5, 9]]])\nprint('D0')\nprint(D0)\nprint('D1')\nprint(D1)\nprint('D2')\nprint(D2)\nprint('D3')\nprint(D3)\n<mask token>\nD2[0, 0] = 100\nprint('D2')\nprint(D2)\nD3[1, 0, 1] = 100\nD3[1, 2, 0] = 100\nprint('D3')\nprint(D3)\n<mask token>\nprint('*** the final sum result is: ')\nprint(D0 + D1[2] + D2[1, 1] + D3[1, 1, 1])\n",
"step-4": "import numpy as np\n<mask token>\nD0 = np.array(2)\nD1 = np.array([3, 4, 5, 6, 7])\nD2 = np.array([[8, 1, 3], [2, 3, 4], [6, 2, 5]])\nD3 = np.array([[[1, 2, 4], [3, 3, 2], [1, 9, 1]], [[6, 8, 7], [9, 1, 0], [8,\n 2, 3]], [[5, 4, 1], [5, 7, 2], [3, 5, 9]]])\nprint('D0')\nprint(D0)\nprint('D1')\nprint(D1)\nprint('D2')\nprint(D2)\nprint('D3')\nprint(D3)\n<mask token>\nD2[0, 0] = 100\nprint('D2')\nprint(D2)\nD3[1, 0, 1] = 100\nD3[1, 2, 0] = 100\nprint('D3')\nprint(D3)\n<mask token>\nprint('*** the final sum result is: ')\nprint(D0 + D1[2] + D2[1, 1] + D3[1, 1, 1])\n",
"step-5": "import numpy as np\r\n\r\n'''\r\n1. Create 0-D array, 1-D array, 2-D array, 3-D array with following value\r\n\r\n\t0-D: [2]\r\n\t1-D: [3, 4, 5, 6, 7]\r\n\t2-D: [[8, 1, 3], [2, 3, 4], [6, 2, 5]]\r\n\t3-D: [[[1, 2, 4], [3, 3, 2], [1, 9, 1]], [[6, 8, 7], [9, 1, 0], [8, 2, 3]], [[5, 4, 1], [5, 7, 2], [3, 5, 9]]]\r\n\r\n\tprint them\r\n'''\r\nD0 = np.array(2)\r\nD1 = np.array([3, 4, 5, 6, 7])\r\nD2 = np.array([[8, 1, 3], [2, 3, 4], [6, 2, 5]])\r\nD3 = np.array([[[1, 2, 4], [3, 3, 2], [1, 9, 1]], [[6, 8, 7], [9, 1, 0], [8, 2, 3]], [[5, 4, 1], [5, 7, 2], [3, 5, 9]]])\r\nprint('D0')\r\nprint(D0)\r\nprint('D1')\r\nprint(D1)\r\nprint('D2')\r\nprint(D2)\r\nprint('D3')\r\nprint(D3)\r\n\r\n'''\r\n2. Use index to change all value 8 to 100 in 4 arrays\r\n\r\n\tarray[index1, index2] = newValue\r\n for example: 2-D array should be changed as : [[100, 1, 3], [2, 3, 4], [6, 2, 5]]\r\n\r\n\tprint them\r\n'''\r\nD2[0, 0] = 100\r\nprint('D2')\r\nprint(D2)\r\nD3[1, 0, 1] = 100\r\nD3[1, 2, 0] = 100\r\nprint('D3')\r\nprint(D3)\r\n'''\r\n3. Print the sum of all following values\r\n\r\n\ta. the value of 0-D array\r\n\tb. the middle of 1-D array\r\n\tc. the center of 2-D array\r\n\td. the center of 3-D array ( the center of middle 2-D array )\r\n\r\n\t* The value should be 11\r\n'''\r\nprint('*** the final sum result is: ')\r\nprint(D0 + D1[2] + D2[1, 1] + D3[1, 1, 1])",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SaleAdvancePaymentInv(models.TransientModel):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SaleAdvancePaymentInv(models.TransientModel):
_inherit = 'sale.advance.payment.inv'
date_start_invoice_timesheet = fields.Date(string='Start Date', help=
'Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will be invoiced without distinction.'
, required=True)
date_end_invoice_timesheet = fields.Date(string='End Date', help=
'Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will be invoiced without distinction.'
, required=True)
<|reserved_special_token_1|>
from odoo import models, fields, api, _
class SaleAdvancePaymentInv(models.TransientModel):
_inherit = 'sale.advance.payment.inv'
date_start_invoice_timesheet = fields.Date(string='Start Date', help=
'Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will be invoiced without distinction.'
, required=True)
date_end_invoice_timesheet = fields.Date(string='End Date', help=
'Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will be invoiced without distinction.'
, required=True)
<|reserved_special_token_1|>
from odoo import models, fields, api, _
class SaleAdvancePaymentInv(models.TransientModel):
_inherit = "sale.advance.payment.inv"
date_start_invoice_timesheet = fields.Date(
string='Start Date',
help="Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. "
"If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will "
"be invoiced without distinction.", required=True)
date_end_invoice_timesheet = fields.Date(
string='End Date',
help="Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. "
"If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will "
"be invoiced without distinction.", required=True)
|
flexible
|
{
"blob_id": "75b1674066958a8fa28e74121a35d688bcc473d9",
"index": 9743,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass SaleAdvancePaymentInv(models.TransientModel):\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass SaleAdvancePaymentInv(models.TransientModel):\n _inherit = 'sale.advance.payment.inv'\n date_start_invoice_timesheet = fields.Date(string='Start Date', help=\n 'Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will be invoiced without distinction.'\n , required=True)\n date_end_invoice_timesheet = fields.Date(string='End Date', help=\n 'Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will be invoiced without distinction.'\n , required=True)\n",
"step-4": "from odoo import models, fields, api, _\n\n\nclass SaleAdvancePaymentInv(models.TransientModel):\n _inherit = 'sale.advance.payment.inv'\n date_start_invoice_timesheet = fields.Date(string='Start Date', help=\n 'Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will be invoiced without distinction.'\n , required=True)\n date_end_invoice_timesheet = fields.Date(string='End Date', help=\n 'Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will be invoiced without distinction.'\n , required=True)\n",
"step-5": "from odoo import models, fields, api, _\n\n\nclass SaleAdvancePaymentInv(models.TransientModel):\n _inherit = \"sale.advance.payment.inv\"\n\n date_start_invoice_timesheet = fields.Date(\n string='Start Date',\n help=\"Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. \"\n \"If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will \"\n \"be invoiced without distinction.\", required=True)\n date_end_invoice_timesheet = fields.Date(\n string='End Date',\n help=\"Only timesheets not yet invoiced (and validated, if applicable) from this period will be invoiced. \"\n \"If the period is not indicated, all timesheets not yet invoiced (and validated, if applicable) will \"\n \"be invoiced without distinction.\", required=True)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Solution:
def levelOrder(self, root):
if root is None:
return []
currentList = [root]
nextList = []
solution = []
while currentList:
thisLevel = [node.val for node in currentList]
solution.append(thisLevel)
for node in currentList:
if node.left is not None:
nextList.append(node.left)
if node.right is not None:
nextList.append(node.right)
currentList, nextList = nextList, currentList
del nextList[:]
return solution
|
flexible
|
{
"blob_id": "d9f176262dcaf055414fbc43b476117250249b63",
"index": 4696,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n",
"step-3": "class Solution:\n\n def levelOrder(self, root):\n if root is None:\n return []\n currentList = [root]\n nextList = []\n solution = []\n while currentList:\n thisLevel = [node.val for node in currentList]\n solution.append(thisLevel)\n for node in currentList:\n if node.left is not None:\n nextList.append(node.left)\n if node.right is not None:\n nextList.append(node.right)\n currentList, nextList = nextList, currentList\n del nextList[:]\n return solution\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import typ
@typ.typ(items=[int])
def gnome_sort(items):
"""
>>> gnome_sort([])
[]
>>> gnome_sort([1])
[1]
>>> gnome_sort([2,1])
[1, 2]
>>> gnome_sort([1,2])
[1, 2]
>>> gnome_sort([1,2,2])
[1, 2, 2]
"""
i = 0
n = len(items)
while i < n:
if i and items[i] < items[i - 1]:
items[i], items[i - 1] = items[i - 1], items[i]
i -= 1
else:
i += 1
return items
|
normal
|
{
"blob_id": "70aba6c94b7050113adf7ae48bd4e13aa9a34587",
"index": 1023,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@typ.typ(items=[int])\ndef gnome_sort(items):\n \"\"\"\n >>> gnome_sort([])\n []\n >>> gnome_sort([1])\n [1]\n >>> gnome_sort([2,1])\n [1, 2]\n >>> gnome_sort([1,2])\n [1, 2]\n >>> gnome_sort([1,2,2])\n [1, 2, 2]\n \"\"\"\n i = 0\n n = len(items)\n while i < n:\n if i and items[i] < items[i - 1]:\n items[i], items[i - 1] = items[i - 1], items[i]\n i -= 1\n else:\n i += 1\n return items\n",
"step-3": "import typ\n\n\n@typ.typ(items=[int])\ndef gnome_sort(items):\n \"\"\"\n >>> gnome_sort([])\n []\n >>> gnome_sort([1])\n [1]\n >>> gnome_sort([2,1])\n [1, 2]\n >>> gnome_sort([1,2])\n [1, 2]\n >>> gnome_sort([1,2,2])\n [1, 2, 2]\n \"\"\"\n i = 0\n n = len(items)\n while i < n:\n if i and items[i] < items[i - 1]:\n items[i], items[i - 1] = items[i - 1], items[i]\n i -= 1\n else:\n i += 1\n return items\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def main():
a, b = map(int, input().split())
diff = abs(max(b, a) - min(a, b))
if diff % 2 != 0:
print('IMPOSSIBLE')
else:
bigger = max(a, b)
ans = bigger - diff // 2
print(ans)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def main():
a, b = map(int, input().split())
diff = abs(max(b, a) - min(a, b))
if diff % 2 != 0:
print('IMPOSSIBLE')
else:
bigger = max(a, b)
ans = bigger - diff // 2
print(ans)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
def main():
a, b = map(int, input().split())
diff = abs(max(b, a) - min(a, b))
if diff % 2 != 0:
print("IMPOSSIBLE")
else:
bigger = max(a, b)
ans = bigger - (diff//2)
print(ans)
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "f73cbc25152a63bb6552e2cd8272c67a1f4277ba",
"index": 9044,
"step-1": "<mask token>\n",
"step-2": "def main():\n a, b = map(int, input().split())\n diff = abs(max(b, a) - min(a, b))\n if diff % 2 != 0:\n print('IMPOSSIBLE')\n else:\n bigger = max(a, b)\n ans = bigger - diff // 2\n print(ans)\n\n\n<mask token>\n",
"step-3": "def main():\n a, b = map(int, input().split())\n diff = abs(max(b, a) - min(a, b))\n if diff % 2 != 0:\n print('IMPOSSIBLE')\n else:\n bigger = max(a, b)\n ans = bigger - diff // 2\n print(ans)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "def main():\n a, b = map(int, input().split())\n diff = abs(max(b, a) - min(a, b))\n if diff % 2 != 0:\n print(\"IMPOSSIBLE\")\n else:\n bigger = max(a, b)\n ans = bigger - (diff//2)\n print(ans)\n\n\nif __name__ == \"__main__\":\n main()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class RedshiftClusterSubnetGroup(resource.BaseResource):
<|reserved_special_token_0|>
def __init__(self, cmd_prefix):
super(RedshiftClusterSubnetGroup, self).__init__(user_managed=False)
self.cmd_prefix = cmd_prefix
self.name = 'pkb-' + FLAGS.run_uri
self.subnet_id = ''
def _Create(self):
cmd = self.cmd_prefix + ['redshift', 'create-cluster-subnet-group',
'--cluster-subnet-group-name', self.name, '--description',
'Cluster Subnet Group for run uri {}'.format(FLAGS.run_uri),
'--subnet-ids', self.subnet_id]
vm_util.IssueCommand(cmd)
def _Delete(self):
"""Delete a redshift cluster subnet group."""
cmd = self.cmd_prefix + ['redshift', 'delete-cluster-subnet-group',
'--cluster-subnet-group-name', self.name]
vm_util.IssueCommand(cmd, raise_on_failure=False)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class RedshiftClusterSubnetGroup(resource.BaseResource):
"""Cluster Subnet Group associated with a Redshift cluster launched in a vpc.
A cluster subnet group allows you to specify a set of subnets in your VPC.
Attributes:
name: A string name of the cluster subnet group.
subnet_id: A string name of the subnet id associated with the group.
"""
def __init__(self, cmd_prefix):
super(RedshiftClusterSubnetGroup, self).__init__(user_managed=False)
self.cmd_prefix = cmd_prefix
self.name = 'pkb-' + FLAGS.run_uri
self.subnet_id = ''
def _Create(self):
cmd = self.cmd_prefix + ['redshift', 'create-cluster-subnet-group',
'--cluster-subnet-group-name', self.name, '--description',
'Cluster Subnet Group for run uri {}'.format(FLAGS.run_uri),
'--subnet-ids', self.subnet_id]
vm_util.IssueCommand(cmd)
def _Delete(self):
"""Delete a redshift cluster subnet group."""
cmd = self.cmd_prefix + ['redshift', 'delete-cluster-subnet-group',
'--cluster-subnet-group-name', self.name]
vm_util.IssueCommand(cmd, raise_on_failure=False)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
FLAGS = flags.FLAGS
class RedshiftClusterSubnetGroup(resource.BaseResource):
"""Cluster Subnet Group associated with a Redshift cluster launched in a vpc.
A cluster subnet group allows you to specify a set of subnets in your VPC.
Attributes:
name: A string name of the cluster subnet group.
subnet_id: A string name of the subnet id associated with the group.
"""
def __init__(self, cmd_prefix):
super(RedshiftClusterSubnetGroup, self).__init__(user_managed=False)
self.cmd_prefix = cmd_prefix
self.name = 'pkb-' + FLAGS.run_uri
self.subnet_id = ''
def _Create(self):
cmd = self.cmd_prefix + ['redshift', 'create-cluster-subnet-group',
'--cluster-subnet-group-name', self.name, '--description',
'Cluster Subnet Group for run uri {}'.format(FLAGS.run_uri),
'--subnet-ids', self.subnet_id]
vm_util.IssueCommand(cmd)
def _Delete(self):
"""Delete a redshift cluster subnet group."""
cmd = self.cmd_prefix + ['redshift', 'delete-cluster-subnet-group',
'--cluster-subnet-group-name', self.name]
vm_util.IssueCommand(cmd, raise_on_failure=False)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from absl import flags
from perfkitbenchmarker import resource
from perfkitbenchmarker import vm_util
FLAGS = flags.FLAGS
class RedshiftClusterSubnetGroup(resource.BaseResource):
"""Cluster Subnet Group associated with a Redshift cluster launched in a vpc.
A cluster subnet group allows you to specify a set of subnets in your VPC.
Attributes:
name: A string name of the cluster subnet group.
subnet_id: A string name of the subnet id associated with the group.
"""
def __init__(self, cmd_prefix):
super(RedshiftClusterSubnetGroup, self).__init__(user_managed=False)
self.cmd_prefix = cmd_prefix
self.name = 'pkb-' + FLAGS.run_uri
self.subnet_id = ''
def _Create(self):
cmd = self.cmd_prefix + ['redshift', 'create-cluster-subnet-group',
'--cluster-subnet-group-name', self.name, '--description',
'Cluster Subnet Group for run uri {}'.format(FLAGS.run_uri),
'--subnet-ids', self.subnet_id]
vm_util.IssueCommand(cmd)
def _Delete(self):
"""Delete a redshift cluster subnet group."""
cmd = self.cmd_prefix + ['redshift', 'delete-cluster-subnet-group',
'--cluster-subnet-group-name', self.name]
vm_util.IssueCommand(cmd, raise_on_failure=False)
<|reserved_special_token_1|>
# Copyright 2019 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing class for AWS's Redshift Cluster Subnet Group."""
from absl import flags
from perfkitbenchmarker import resource
from perfkitbenchmarker import vm_util
FLAGS = flags.FLAGS
class RedshiftClusterSubnetGroup(resource.BaseResource):
"""Cluster Subnet Group associated with a Redshift cluster launched in a vpc.
A cluster subnet group allows you to specify a set of subnets in your VPC.
Attributes:
name: A string name of the cluster subnet group.
subnet_id: A string name of the subnet id associated with the group.
"""
def __init__(self, cmd_prefix):
super(RedshiftClusterSubnetGroup, self).__init__(user_managed=False)
self.cmd_prefix = cmd_prefix
self.name = 'pkb-' + FLAGS.run_uri
self.subnet_id = ''
def _Create(self):
cmd = self.cmd_prefix + [
'redshift', 'create-cluster-subnet-group',
'--cluster-subnet-group-name', self.name, '--description',
'Cluster Subnet Group for run uri {}'.format(
FLAGS.run_uri), '--subnet-ids', self.subnet_id
]
vm_util.IssueCommand(cmd)
def _Delete(self):
"""Delete a redshift cluster subnet group."""
cmd = self.cmd_prefix + [
'redshift', 'delete-cluster-subnet-group',
'--cluster-subnet-group-name', self.name
]
vm_util.IssueCommand(cmd, raise_on_failure=False)
|
flexible
|
{
"blob_id": "9cebce7f97a1848885883692cd0f494cce6bae7f",
"index": 5263,
"step-1": "<mask token>\n\n\nclass RedshiftClusterSubnetGroup(resource.BaseResource):\n <mask token>\n\n def __init__(self, cmd_prefix):\n super(RedshiftClusterSubnetGroup, self).__init__(user_managed=False)\n self.cmd_prefix = cmd_prefix\n self.name = 'pkb-' + FLAGS.run_uri\n self.subnet_id = ''\n\n def _Create(self):\n cmd = self.cmd_prefix + ['redshift', 'create-cluster-subnet-group',\n '--cluster-subnet-group-name', self.name, '--description',\n 'Cluster Subnet Group for run uri {}'.format(FLAGS.run_uri),\n '--subnet-ids', self.subnet_id]\n vm_util.IssueCommand(cmd)\n\n def _Delete(self):\n \"\"\"Delete a redshift cluster subnet group.\"\"\"\n cmd = self.cmd_prefix + ['redshift', 'delete-cluster-subnet-group',\n '--cluster-subnet-group-name', self.name]\n vm_util.IssueCommand(cmd, raise_on_failure=False)\n",
"step-2": "<mask token>\n\n\nclass RedshiftClusterSubnetGroup(resource.BaseResource):\n \"\"\"Cluster Subnet Group associated with a Redshift cluster launched in a vpc.\n\n A cluster subnet group allows you to specify a set of subnets in your VPC.\n\n\n Attributes:\n name: A string name of the cluster subnet group.\n subnet_id: A string name of the subnet id associated with the group.\n \"\"\"\n\n def __init__(self, cmd_prefix):\n super(RedshiftClusterSubnetGroup, self).__init__(user_managed=False)\n self.cmd_prefix = cmd_prefix\n self.name = 'pkb-' + FLAGS.run_uri\n self.subnet_id = ''\n\n def _Create(self):\n cmd = self.cmd_prefix + ['redshift', 'create-cluster-subnet-group',\n '--cluster-subnet-group-name', self.name, '--description',\n 'Cluster Subnet Group for run uri {}'.format(FLAGS.run_uri),\n '--subnet-ids', self.subnet_id]\n vm_util.IssueCommand(cmd)\n\n def _Delete(self):\n \"\"\"Delete a redshift cluster subnet group.\"\"\"\n cmd = self.cmd_prefix + ['redshift', 'delete-cluster-subnet-group',\n '--cluster-subnet-group-name', self.name]\n vm_util.IssueCommand(cmd, raise_on_failure=False)\n",
"step-3": "<mask token>\nFLAGS = flags.FLAGS\n\n\nclass RedshiftClusterSubnetGroup(resource.BaseResource):\n \"\"\"Cluster Subnet Group associated with a Redshift cluster launched in a vpc.\n\n A cluster subnet group allows you to specify a set of subnets in your VPC.\n\n\n Attributes:\n name: A string name of the cluster subnet group.\n subnet_id: A string name of the subnet id associated with the group.\n \"\"\"\n\n def __init__(self, cmd_prefix):\n super(RedshiftClusterSubnetGroup, self).__init__(user_managed=False)\n self.cmd_prefix = cmd_prefix\n self.name = 'pkb-' + FLAGS.run_uri\n self.subnet_id = ''\n\n def _Create(self):\n cmd = self.cmd_prefix + ['redshift', 'create-cluster-subnet-group',\n '--cluster-subnet-group-name', self.name, '--description',\n 'Cluster Subnet Group for run uri {}'.format(FLAGS.run_uri),\n '--subnet-ids', self.subnet_id]\n vm_util.IssueCommand(cmd)\n\n def _Delete(self):\n \"\"\"Delete a redshift cluster subnet group.\"\"\"\n cmd = self.cmd_prefix + ['redshift', 'delete-cluster-subnet-group',\n '--cluster-subnet-group-name', self.name]\n vm_util.IssueCommand(cmd, raise_on_failure=False)\n",
"step-4": "<mask token>\nfrom absl import flags\nfrom perfkitbenchmarker import resource\nfrom perfkitbenchmarker import vm_util\nFLAGS = flags.FLAGS\n\n\nclass RedshiftClusterSubnetGroup(resource.BaseResource):\n \"\"\"Cluster Subnet Group associated with a Redshift cluster launched in a vpc.\n\n A cluster subnet group allows you to specify a set of subnets in your VPC.\n\n\n Attributes:\n name: A string name of the cluster subnet group.\n subnet_id: A string name of the subnet id associated with the group.\n \"\"\"\n\n def __init__(self, cmd_prefix):\n super(RedshiftClusterSubnetGroup, self).__init__(user_managed=False)\n self.cmd_prefix = cmd_prefix\n self.name = 'pkb-' + FLAGS.run_uri\n self.subnet_id = ''\n\n def _Create(self):\n cmd = self.cmd_prefix + ['redshift', 'create-cluster-subnet-group',\n '--cluster-subnet-group-name', self.name, '--description',\n 'Cluster Subnet Group for run uri {}'.format(FLAGS.run_uri),\n '--subnet-ids', self.subnet_id]\n vm_util.IssueCommand(cmd)\n\n def _Delete(self):\n \"\"\"Delete a redshift cluster subnet group.\"\"\"\n cmd = self.cmd_prefix + ['redshift', 'delete-cluster-subnet-group',\n '--cluster-subnet-group-name', self.name]\n vm_util.IssueCommand(cmd, raise_on_failure=False)\n",
"step-5": "# Copyright 2019 PerfKitBenchmarker Authors. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Module containing class for AWS's Redshift Cluster Subnet Group.\"\"\"\n\nfrom absl import flags\nfrom perfkitbenchmarker import resource\nfrom perfkitbenchmarker import vm_util\n\nFLAGS = flags.FLAGS\n\n\nclass RedshiftClusterSubnetGroup(resource.BaseResource):\n \"\"\"Cluster Subnet Group associated with a Redshift cluster launched in a vpc.\n\n A cluster subnet group allows you to specify a set of subnets in your VPC.\n\n\n Attributes:\n name: A string name of the cluster subnet group.\n subnet_id: A string name of the subnet id associated with the group.\n \"\"\"\n\n def __init__(self, cmd_prefix):\n super(RedshiftClusterSubnetGroup, self).__init__(user_managed=False)\n self.cmd_prefix = cmd_prefix\n self.name = 'pkb-' + FLAGS.run_uri\n self.subnet_id = ''\n\n def _Create(self):\n cmd = self.cmd_prefix + [\n 'redshift', 'create-cluster-subnet-group',\n '--cluster-subnet-group-name', self.name, '--description',\n 'Cluster Subnet Group for run uri {}'.format(\n FLAGS.run_uri), '--subnet-ids', self.subnet_id\n ]\n vm_util.IssueCommand(cmd)\n\n def _Delete(self):\n \"\"\"Delete a redshift cluster subnet group.\"\"\"\n cmd = self.cmd_prefix + [\n 'redshift', 'delete-cluster-subnet-group',\n '--cluster-subnet-group-name', self.name\n ]\n vm_util.IssueCommand(cmd, raise_on_failure=False)\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
import pandas as pd
import json
import spacy
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.decomposition import NMF
nlp = spacy.load('en_core_web_sm')
list_data = []
list_data_only_reviews = []
list_data_reviewerid = []
result = []
l = []
for line in open('Automotive_5.json', 'r'):
list_data.append(json.loads(line))
for item in list_data:
list_data_only_reviews.append(item['reviewText'])
list_data_reviewerid.append(item['reviewerID'])
# opening the csv file in 'w+' mode
file = open('review_file.csv', 'w+')
# writing the data into the file
with file:
df = pd.DataFrame(list_data_only_reviews, columns=['Reviews'])
df.to_csv(file,index=False)
npr = pd.read_csv('review_file.csv')
tfidf = TfidfVectorizer(max_df=0.8,min_df=5,stop_words='english')
dtm = tfidf.fit_transform(npr['Reviews'].values.astype('U'))
nmf_model = NMF(n_components=20,random_state=50)
nmf_model.fit(dtm)
#returns index positions that sort the array
#checking which word in the topic has high probability
for i,topic in enumerate(nmf_model.components_):
print(f"THE TOP 30 WORDS FOR TOPIC #{i}")
print([tfidf.get_feature_names()[i] for i in topic.argsort()[-30:] if len(tfidf.get_feature_names()[i]) > 5])
print('\n')
#probability of a document belonging to a topic
topic_results = nmf_model.transform(dtm)
npr['Topic'] = topic_results.argmax(axis=1)
topic_label = {0:'plastic', 1:'winter batteries', 2:'engines', 3:'liquid', 4:'wind', 5:'shipping', 6:'light',
7:'quality', 8:'instructions', 9:'worked', 10:'rubber', 11:'cleaning', 12:'pressure', 13:'washing',
14:'recommendation', 15:'advertise', 16:'bucket', 17:'camp', 18:'brush', 19:'travel'}
npr['Topic Label'] = npr['Topic'].map(topic_label)
npr = npr.assign(Reviews=list_data_reviewerid)
npr.to_csv('classified_output.csv')
|
normal
|
{
"blob_id": "43b519d7db2e46a0bf9317eddac1f5cf6b7b79e3",
"index": 6417,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor line in open('Automotive_5.json', 'r'):\n list_data.append(json.loads(line))\nfor item in list_data:\n list_data_only_reviews.append(item['reviewText'])\n list_data_reviewerid.append(item['reviewerID'])\n<mask token>\nwith file:\n df = pd.DataFrame(list_data_only_reviews, columns=['Reviews'])\n df.to_csv(file, index=False)\n<mask token>\nnmf_model.fit(dtm)\nfor i, topic in enumerate(nmf_model.components_):\n print(f'THE TOP 30 WORDS FOR TOPIC #{i}')\n print([tfidf.get_feature_names()[i] for i in topic.argsort()[-30:] if \n len(tfidf.get_feature_names()[i]) > 5])\n print('\\n')\n<mask token>\nnpr.to_csv('classified_output.csv')\n",
"step-3": "<mask token>\nnlp = spacy.load('en_core_web_sm')\nlist_data = []\nlist_data_only_reviews = []\nlist_data_reviewerid = []\nresult = []\nl = []\nfor line in open('Automotive_5.json', 'r'):\n list_data.append(json.loads(line))\nfor item in list_data:\n list_data_only_reviews.append(item['reviewText'])\n list_data_reviewerid.append(item['reviewerID'])\nfile = open('review_file.csv', 'w+')\nwith file:\n df = pd.DataFrame(list_data_only_reviews, columns=['Reviews'])\n df.to_csv(file, index=False)\nnpr = pd.read_csv('review_file.csv')\ntfidf = TfidfVectorizer(max_df=0.8, min_df=5, stop_words='english')\ndtm = tfidf.fit_transform(npr['Reviews'].values.astype('U'))\nnmf_model = NMF(n_components=20, random_state=50)\nnmf_model.fit(dtm)\nfor i, topic in enumerate(nmf_model.components_):\n print(f'THE TOP 30 WORDS FOR TOPIC #{i}')\n print([tfidf.get_feature_names()[i] for i in topic.argsort()[-30:] if \n len(tfidf.get_feature_names()[i]) > 5])\n print('\\n')\ntopic_results = nmf_model.transform(dtm)\nnpr['Topic'] = topic_results.argmax(axis=1)\ntopic_label = {(0): 'plastic', (1): 'winter batteries', (2): 'engines', (3):\n 'liquid', (4): 'wind', (5): 'shipping', (6): 'light', (7): 'quality', (\n 8): 'instructions', (9): 'worked', (10): 'rubber', (11): 'cleaning', (\n 12): 'pressure', (13): 'washing', (14): 'recommendation', (15):\n 'advertise', (16): 'bucket', (17): 'camp', (18): 'brush', (19): 'travel'}\nnpr['Topic Label'] = npr['Topic'].map(topic_label)\nnpr = npr.assign(Reviews=list_data_reviewerid)\nnpr.to_csv('classified_output.csv')\n",
"step-4": "import pandas as pd\nimport json\nimport spacy\nfrom sklearn.feature_extraction.text import TfidfVectorizer\nfrom sklearn.decomposition import NMF\nnlp = spacy.load('en_core_web_sm')\nlist_data = []\nlist_data_only_reviews = []\nlist_data_reviewerid = []\nresult = []\nl = []\nfor line in open('Automotive_5.json', 'r'):\n list_data.append(json.loads(line))\nfor item in list_data:\n list_data_only_reviews.append(item['reviewText'])\n list_data_reviewerid.append(item['reviewerID'])\nfile = open('review_file.csv', 'w+')\nwith file:\n df = pd.DataFrame(list_data_only_reviews, columns=['Reviews'])\n df.to_csv(file, index=False)\nnpr = pd.read_csv('review_file.csv')\ntfidf = TfidfVectorizer(max_df=0.8, min_df=5, stop_words='english')\ndtm = tfidf.fit_transform(npr['Reviews'].values.astype('U'))\nnmf_model = NMF(n_components=20, random_state=50)\nnmf_model.fit(dtm)\nfor i, topic in enumerate(nmf_model.components_):\n print(f'THE TOP 30 WORDS FOR TOPIC #{i}')\n print([tfidf.get_feature_names()[i] for i in topic.argsort()[-30:] if \n len(tfidf.get_feature_names()[i]) > 5])\n print('\\n')\ntopic_results = nmf_model.transform(dtm)\nnpr['Topic'] = topic_results.argmax(axis=1)\ntopic_label = {(0): 'plastic', (1): 'winter batteries', (2): 'engines', (3):\n 'liquid', (4): 'wind', (5): 'shipping', (6): 'light', (7): 'quality', (\n 8): 'instructions', (9): 'worked', (10): 'rubber', (11): 'cleaning', (\n 12): 'pressure', (13): 'washing', (14): 'recommendation', (15):\n 'advertise', (16): 'bucket', (17): 'camp', (18): 'brush', (19): 'travel'}\nnpr['Topic Label'] = npr['Topic'].map(topic_label)\nnpr = npr.assign(Reviews=list_data_reviewerid)\nnpr.to_csv('classified_output.csv')\n",
"step-5": "import pandas as pd\nimport json\nimport spacy\nfrom sklearn.feature_extraction.text import TfidfVectorizer\nfrom sklearn.decomposition import NMF\n\n\n\nnlp = spacy.load('en_core_web_sm')\nlist_data = []\nlist_data_only_reviews = []\nlist_data_reviewerid = []\nresult = []\nl = []\n\nfor line in open('Automotive_5.json', 'r'):\n list_data.append(json.loads(line))\n\nfor item in list_data:\n list_data_only_reviews.append(item['reviewText'])\n list_data_reviewerid.append(item['reviewerID'])\n \n\n# opening the csv file in 'w+' mode \nfile = open('review_file.csv', 'w+') \n \n# writing the data into the file \nwith file: \n df = pd.DataFrame(list_data_only_reviews, columns=['Reviews'])\n df.to_csv(file,index=False)\n\nnpr = pd.read_csv('review_file.csv')\n\n\ntfidf = TfidfVectorizer(max_df=0.8,min_df=5,stop_words='english')\n\ndtm = tfidf.fit_transform(npr['Reviews'].values.astype('U'))\n\nnmf_model = NMF(n_components=20,random_state=50)\nnmf_model.fit(dtm)\n\n#returns index positions that sort the array\n#checking which word in the topic has high probability\nfor i,topic in enumerate(nmf_model.components_):\n print(f\"THE TOP 30 WORDS FOR TOPIC #{i}\")\n print([tfidf.get_feature_names()[i] for i in topic.argsort()[-30:] if len(tfidf.get_feature_names()[i]) > 5])\n print('\\n')\n\n#probability of a document belonging to a topic\ntopic_results = nmf_model.transform(dtm)\n\n\nnpr['Topic'] = topic_results.argmax(axis=1)\n\ntopic_label = {0:'plastic', 1:'winter batteries', 2:'engines', 3:'liquid', 4:'wind', 5:'shipping', 6:'light',\n 7:'quality', 8:'instructions', 9:'worked', 10:'rubber', 11:'cleaning', 12:'pressure', 13:'washing',\n 14:'recommendation', 15:'advertise', 16:'bucket', 17:'camp', 18:'brush', 19:'travel'}\nnpr['Topic Label'] = npr['Topic'].map(topic_label)\n\nnpr = npr.assign(Reviews=list_data_reviewerid)\n\nnpr.to_csv('classified_output.csv')\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class ServerProfileLearning(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def process_distance(self, streaming_data):
t0 = time.time()
cluster_name = self.hostname + '_general'
t = self.server_profile[cluster_name]
anomaly, max_spread, min_spread, d, date, threshold, quant = (t.
compute_distance_profile(streaming_data, self.distribution,
self.measures, self.train_mode, self.verbose))
return anomaly, max_spread, min_spread, d, date, threshold, quant
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ServerProfileLearning(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def preprocess_data(self, data):
data_prep = data.drop(self.data.columns[1:len(self.data.columns) -
1], axis=1)
data_prep = data_prep.groupby(['label'])
return data_prep
<|reserved_special_token_0|>
def process_distance(self, streaming_data):
t0 = time.time()
cluster_name = self.hostname + '_general'
t = self.server_profile[cluster_name]
anomaly, max_spread, min_spread, d, date, threshold, quant = (t.
compute_distance_profile(streaming_data, self.distribution,
self.measures, self.train_mode, self.verbose))
return anomaly, max_spread, min_spread, d, date, threshold, quant
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ServerProfileLearning(object):
<|reserved_special_token_0|>
def initdict(self):
d = defaultdict(dict)
for i in range(int(24 * 6 * 60 / self.distribution_period) + 1):
d[i] = {}
d[i]['Area_Difference'] = []
d[i]['Max_Spread'] = []
return d
def preprocess_data(self, data):
data_prep = data.drop(self.data.columns[1:len(self.data.columns) -
1], axis=1)
data_prep = data_prep.groupby(['label'])
return data_prep
<|reserved_special_token_0|>
def process_distance(self, streaming_data):
t0 = time.time()
cluster_name = self.hostname + '_general'
t = self.server_profile[cluster_name]
anomaly, max_spread, min_spread, d, date, threshold, quant = (t.
compute_distance_profile(streaming_data, self.distribution,
self.measures, self.train_mode, self.verbose))
return anomaly, max_spread, min_spread, d, date, threshold, quant
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ServerProfileLearning(object):
<|reserved_special_token_0|>
def initdict(self):
d = defaultdict(dict)
for i in range(int(24 * 6 * 60 / self.distribution_period) + 1):
d[i] = {}
d[i]['Area_Difference'] = []
d[i]['Max_Spread'] = []
return d
def preprocess_data(self, data):
data_prep = data.drop(self.data.columns[1:len(self.data.columns) -
1], axis=1)
data_prep = data_prep.groupby(['label'])
return data_prep
def set_profile(self):
t0 = time.time()
t = tsl.TimesSeriesLearning(self.parameters[0, :], self.
distribution_period, self.level_threshold, self.
timestamp_anomaly, self.processus)
t.set_profile(self.data)
self.server_profile[self.hostname + '_general'] = t
print('Learning Server' + self.hostname + ' Done in ' + str(time.
time() - t0))
def process_distance(self, streaming_data):
t0 = time.time()
cluster_name = self.hostname + '_general'
t = self.server_profile[cluster_name]
anomaly, max_spread, min_spread, d, date, threshold, quant = (t.
compute_distance_profile(streaming_data, self.distribution,
self.measures, self.train_mode, self.verbose))
return anomaly, max_spread, min_spread, d, date, threshold, quant
<|reserved_special_token_1|>
import times_series_learning as tsl
import numpy as np
import time
import datetime as dt
import sortedcontainers
import pandas as pd
from collections import defaultdict
class ServerProfileLearning(object):
def __init__(self, data, parameters, distribution, distribution_period, level_threshold,
processus=True, moving_window=60,train_mode=True, verbose=False):
self.label_number = 1 #len(np.unique(data['label'].values))
self.label = 1 #np.unique(data['label'].values)
self.data = data
self.parameters = np.ones((self.label_number + 1, 4)) * parameters # see parameters in times_series_learning
self.data_prep = None
self.hostname = self.data.iloc[0, 1]
self.server_profile = dict()
self.distribution = distribution # distribution of distance list same for all servers all clusters be carefull sorted containers
self.distribution_period = distribution_period # distribution period where we compute metrics
self.level_threshold = level_threshold # level we consider for outliers
self.verbose = verbose
self.processus = processus
self.moving_window = moving_window
self.train_mode = train_mode
self.measures = self.initdict()
self.timestamp_anomaly = pd.DataFrame(columns=['Timestamp','Area_Difference'])
def initdict(self):
d = defaultdict(dict)
for i in range(int((24*6*60)/self.distribution_period)+1):
d[i] = {}
d[i]['Area_Difference'] = []
d[i]['Max_Spread'] = []
return d
# sortedcontainers.SortedDict(sortedcontainers.SortedList())
def preprocess_data(self, data):
data_prep = data.drop(self.data.columns[1:len(self.data.columns) - 1], axis=1)
data_prep = data_prep.groupby(['label'])
return data_prep
def set_profile(self):
t0 = time.time()
t = tsl.TimesSeriesLearning(self.parameters[0, :],
self.distribution_period,
self.level_threshold, self.timestamp_anomaly, self.processus)
t.set_profile(self.data)
self.server_profile[self.hostname + "_general"] = t
#self.data_prep = self.preprocess_data(self.data)
# i = 0
# for k, v in self.data_prep:
# t = tsl.TimesSeriesLearning(self.parameters[i, :],
# self.distribution_period, self.level_threshold, self.processus)
# t.set_profile(v)
# self.server_profile[self.hostname + "_" + str(k)] = t
# print('cluster number ' + str(k) + ' of hostname: ' + self.hostname)
# i += 1
print("Learning Server" + self.hostname + " Done in " + str(time.time() - t0))
# Process distance and update distribution
def process_distance(self, streaming_data):
t0 = time.time()
cluster_name = self.hostname + "_general"
t = self.server_profile[cluster_name]
anomaly, max_spread, min_spread, d, date, threshold, quant = t.compute_distance_profile(streaming_data,
self.distribution,
self.measures,
self.train_mode,
self.verbose)
#streaming_data_prep = self.preprocess_data(streaming_data)
# for k, v in streaming_data_prep:
# cluster_name = self.hostname + "_" + str(k)
# if cluster_name in self.server_profile.keys():
# t = self.server_profile[cluster_name]
# anomaly, max_spread, min_spread, d, date, threshold, quant = t.compute_distance_profile(v,
# self.distribution,
# self.train_mode,
# self.verbose)
# #if anomaly:
# # break
# else:
# print('cluster: ',k)
# print("Logs does not belong to any cluster")
# break
#print("stream proccessed in :", time.time()-t0)
return anomaly, max_spread, min_spread, d, date, threshold, quant
# def simulate_streaming(self, streaming_data,date_start):
# streaming_data.index = pd.to_datetime(streaming_data.timestamp, format='%Y-%m-%d %H:%M:%S')
# streaming_data = streaming_data.sort_index()
# data_list = []
# date = streaming_data.index[0]
# while date < streaming_data.index[-1]:
# data_to_add = streaming_data.loc[date.isoformat():
# (date + dt.timedelta(minutes=self.parameters[2, 0]))].reset_index(drop=True)
# if data_to_add.shape[0]>0:
# data_list.append(data_to_add)
# date += dt.timedelta(minutes=self.parameters[0, 2])
#
# return data
|
flexible
|
{
"blob_id": "53dd753356d8a8d60975c8f4cdaf20de66c2db46",
"index": 3486,
"step-1": "<mask token>\n\n\nclass ServerProfileLearning(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def process_distance(self, streaming_data):\n t0 = time.time()\n cluster_name = self.hostname + '_general'\n t = self.server_profile[cluster_name]\n anomaly, max_spread, min_spread, d, date, threshold, quant = (t.\n compute_distance_profile(streaming_data, self.distribution,\n self.measures, self.train_mode, self.verbose))\n return anomaly, max_spread, min_spread, d, date, threshold, quant\n",
"step-2": "<mask token>\n\n\nclass ServerProfileLearning(object):\n <mask token>\n <mask token>\n\n def preprocess_data(self, data):\n data_prep = data.drop(self.data.columns[1:len(self.data.columns) - \n 1], axis=1)\n data_prep = data_prep.groupby(['label'])\n return data_prep\n <mask token>\n\n def process_distance(self, streaming_data):\n t0 = time.time()\n cluster_name = self.hostname + '_general'\n t = self.server_profile[cluster_name]\n anomaly, max_spread, min_spread, d, date, threshold, quant = (t.\n compute_distance_profile(streaming_data, self.distribution,\n self.measures, self.train_mode, self.verbose))\n return anomaly, max_spread, min_spread, d, date, threshold, quant\n",
"step-3": "<mask token>\n\n\nclass ServerProfileLearning(object):\n <mask token>\n\n def initdict(self):\n d = defaultdict(dict)\n for i in range(int(24 * 6 * 60 / self.distribution_period) + 1):\n d[i] = {}\n d[i]['Area_Difference'] = []\n d[i]['Max_Spread'] = []\n return d\n\n def preprocess_data(self, data):\n data_prep = data.drop(self.data.columns[1:len(self.data.columns) - \n 1], axis=1)\n data_prep = data_prep.groupby(['label'])\n return data_prep\n <mask token>\n\n def process_distance(self, streaming_data):\n t0 = time.time()\n cluster_name = self.hostname + '_general'\n t = self.server_profile[cluster_name]\n anomaly, max_spread, min_spread, d, date, threshold, quant = (t.\n compute_distance_profile(streaming_data, self.distribution,\n self.measures, self.train_mode, self.verbose))\n return anomaly, max_spread, min_spread, d, date, threshold, quant\n",
"step-4": "<mask token>\n\n\nclass ServerProfileLearning(object):\n <mask token>\n\n def initdict(self):\n d = defaultdict(dict)\n for i in range(int(24 * 6 * 60 / self.distribution_period) + 1):\n d[i] = {}\n d[i]['Area_Difference'] = []\n d[i]['Max_Spread'] = []\n return d\n\n def preprocess_data(self, data):\n data_prep = data.drop(self.data.columns[1:len(self.data.columns) - \n 1], axis=1)\n data_prep = data_prep.groupby(['label'])\n return data_prep\n\n def set_profile(self):\n t0 = time.time()\n t = tsl.TimesSeriesLearning(self.parameters[0, :], self.\n distribution_period, self.level_threshold, self.\n timestamp_anomaly, self.processus)\n t.set_profile(self.data)\n self.server_profile[self.hostname + '_general'] = t\n print('Learning Server' + self.hostname + ' Done in ' + str(time.\n time() - t0))\n\n def process_distance(self, streaming_data):\n t0 = time.time()\n cluster_name = self.hostname + '_general'\n t = self.server_profile[cluster_name]\n anomaly, max_spread, min_spread, d, date, threshold, quant = (t.\n compute_distance_profile(streaming_data, self.distribution,\n self.measures, self.train_mode, self.verbose))\n return anomaly, max_spread, min_spread, d, date, threshold, quant\n",
"step-5": "import times_series_learning as tsl\nimport numpy as np\nimport time\nimport datetime as dt\nimport sortedcontainers\nimport pandas as pd\nfrom collections import defaultdict\n\n\nclass ServerProfileLearning(object):\n\n def __init__(self, data, parameters, distribution, distribution_period, level_threshold,\n processus=True, moving_window=60,train_mode=True, verbose=False):\n self.label_number = 1 #len(np.unique(data['label'].values))\n self.label = 1 #np.unique(data['label'].values)\n self.data = data\n self.parameters = np.ones((self.label_number + 1, 4)) * parameters # see parameters in times_series_learning\n self.data_prep = None\n self.hostname = self.data.iloc[0, 1]\n self.server_profile = dict()\n self.distribution = distribution # distribution of distance list same for all servers all clusters be carefull sorted containers\n self.distribution_period = distribution_period # distribution period where we compute metrics\n self.level_threshold = level_threshold # level we consider for outliers\n self.verbose = verbose\n self.processus = processus\n self.moving_window = moving_window\n self.train_mode = train_mode\n self.measures = self.initdict()\n self.timestamp_anomaly = pd.DataFrame(columns=['Timestamp','Area_Difference'])\n\n def initdict(self):\n d = defaultdict(dict)\n for i in range(int((24*6*60)/self.distribution_period)+1):\n d[i] = {}\n d[i]['Area_Difference'] = []\n d[i]['Max_Spread'] = []\n return d\n\n\n # sortedcontainers.SortedDict(sortedcontainers.SortedList())\n\n def preprocess_data(self, data):\n data_prep = data.drop(self.data.columns[1:len(self.data.columns) - 1], axis=1)\n data_prep = data_prep.groupby(['label'])\n return data_prep\n\n def set_profile(self):\n t0 = time.time()\n t = tsl.TimesSeriesLearning(self.parameters[0, :],\n self.distribution_period,\n self.level_threshold, self.timestamp_anomaly, self.processus)\n t.set_profile(self.data)\n self.server_profile[self.hostname + \"_general\"] = t\n #self.data_prep = self.preprocess_data(self.data)\n # i = 0\n # for k, v in self.data_prep:\n # t = tsl.TimesSeriesLearning(self.parameters[i, :],\n # self.distribution_period, self.level_threshold, self.processus)\n # t.set_profile(v)\n # self.server_profile[self.hostname + \"_\" + str(k)] = t\n # print('cluster number ' + str(k) + ' of hostname: ' + self.hostname)\n # i += 1\n print(\"Learning Server\" + self.hostname + \" Done in \" + str(time.time() - t0))\n\n # Process distance and update distribution\n def process_distance(self, streaming_data):\n t0 = time.time()\n cluster_name = self.hostname + \"_general\"\n t = self.server_profile[cluster_name]\n anomaly, max_spread, min_spread, d, date, threshold, quant = t.compute_distance_profile(streaming_data,\n self.distribution,\n self.measures,\n self.train_mode,\n self.verbose)\n #streaming_data_prep = self.preprocess_data(streaming_data)\n # for k, v in streaming_data_prep:\n # cluster_name = self.hostname + \"_\" + str(k)\n # if cluster_name in self.server_profile.keys():\n # t = self.server_profile[cluster_name]\n # anomaly, max_spread, min_spread, d, date, threshold, quant = t.compute_distance_profile(v,\n # self.distribution,\n # self.train_mode,\n # self.verbose)\n # #if anomaly:\n # # break\n # else:\n # print('cluster: ',k)\n # print(\"Logs does not belong to any cluster\")\n # break\n #print(\"stream proccessed in :\", time.time()-t0)\n return anomaly, max_spread, min_spread, d, date, threshold, quant\n\n # def simulate_streaming(self, streaming_data,date_start):\n # streaming_data.index = pd.to_datetime(streaming_data.timestamp, format='%Y-%m-%d %H:%M:%S')\n # streaming_data = streaming_data.sort_index()\n # data_list = []\n # date = streaming_data.index[0]\n # while date < streaming_data.index[-1]:\n # data_to_add = streaming_data.loc[date.isoformat():\n # (date + dt.timedelta(minutes=self.parameters[2, 0]))].reset_index(drop=True)\n # if data_to_add.shape[0]>0:\n # data_list.append(data_to_add)\n # date += dt.timedelta(minutes=self.parameters[0, 2])\n #\n # return data\n",
"step-ids": [
2,
3,
4,
5,
8
]
}
|
[
2,
3,
4,
5,
8
] |
from config import Config
def test_stf_3_2_1_pos(fixture):
seed = fixture.common.get_seed()
fixture.stf.open_stf_exercise('3-2-1', seed)
fixture.stf.open_solution_url(seed)
assert fixture.stf.get_solution() == Config.test_pass_text
fixture.common.back_to_main_page()
def test_stf_3_2_1_neg(fixture):
seed = fixture.common.get_seed()
fixture.stf.open_stf_exercise('3-2-1', seed)
fixture.stf.open_solution_url('test')
assert fixture.stf.get_solution() == Config.test_fail_text
fixture.common.back_to_main_page()
__author__ = 'GiSDeCain'
|
normal
|
{
"blob_id": "028b38a07c71232eb42bedecd734cf7188550239",
"index": 9602,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_stf_3_2_1_neg(fixture):\n seed = fixture.common.get_seed()\n fixture.stf.open_stf_exercise('3-2-1', seed)\n fixture.stf.open_solution_url('test')\n assert fixture.stf.get_solution() == Config.test_fail_text\n fixture.common.back_to_main_page()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef test_stf_3_2_1_pos(fixture):\n seed = fixture.common.get_seed()\n fixture.stf.open_stf_exercise('3-2-1', seed)\n fixture.stf.open_solution_url(seed)\n assert fixture.stf.get_solution() == Config.test_pass_text\n fixture.common.back_to_main_page()\n\n\ndef test_stf_3_2_1_neg(fixture):\n seed = fixture.common.get_seed()\n fixture.stf.open_stf_exercise('3-2-1', seed)\n fixture.stf.open_solution_url('test')\n assert fixture.stf.get_solution() == Config.test_fail_text\n fixture.common.back_to_main_page()\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef test_stf_3_2_1_pos(fixture):\n seed = fixture.common.get_seed()\n fixture.stf.open_stf_exercise('3-2-1', seed)\n fixture.stf.open_solution_url(seed)\n assert fixture.stf.get_solution() == Config.test_pass_text\n fixture.common.back_to_main_page()\n\n\ndef test_stf_3_2_1_neg(fixture):\n seed = fixture.common.get_seed()\n fixture.stf.open_stf_exercise('3-2-1', seed)\n fixture.stf.open_solution_url('test')\n assert fixture.stf.get_solution() == Config.test_fail_text\n fixture.common.back_to_main_page()\n\n\n__author__ = 'GiSDeCain'\n",
"step-5": "from config import Config\n\n\ndef test_stf_3_2_1_pos(fixture):\n seed = fixture.common.get_seed()\n fixture.stf.open_stf_exercise('3-2-1', seed)\n fixture.stf.open_solution_url(seed)\n assert fixture.stf.get_solution() == Config.test_pass_text\n fixture.common.back_to_main_page()\n\n\ndef test_stf_3_2_1_neg(fixture):\n seed = fixture.common.get_seed()\n fixture.stf.open_stf_exercise('3-2-1', seed)\n fixture.stf.open_solution_url('test')\n assert fixture.stf.get_solution() == Config.test_fail_text\n fixture.common.back_to_main_page()\n\n\n__author__ = 'GiSDeCain'\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
while case_num - 1 < T:
data = map(int, input_file.readline().rstrip('\n').split(' '))
typed = data[0]
length = data[1]
probs = map(float, input_file.readline().rstrip('\n').split(' '))
assert that(len(probs)).equals(typed)
enter = 1
def product(probs):
if not probs:
return 1
return reduce(lambda x, y: x * y, probs)
def expected_strokes(typed, length):
finish = length - typed + enter
retype = finish + length + enter
correct = product(probs[:typed])
strokes = correct * finish + (1 - correct) * retype
return strokes
def get_min_backspace_stroke_count(typed, length):
min_strokes = 99999999999999
for backspaces in range(typed + 1):
min_strokes = min(backspaces + expected_strokes(typed -
backspaces, length), min_strokes)
return min_strokes
result = min(length + 2, get_min_backspace_stroke_count(typed, length))
output_file.write('Case #{}: {}\n'.format(case_num, result))
case_num += 1
<|reserved_special_token_1|>
<|reserved_special_token_0|>
input_file = open('input1.txt', 'r')
output_file = open('output1.txt', 'w')
T = int(input_file.readline().rstrip('\n'))
case_num = 1
while case_num - 1 < T:
data = map(int, input_file.readline().rstrip('\n').split(' '))
typed = data[0]
length = data[1]
probs = map(float, input_file.readline().rstrip('\n').split(' '))
assert that(len(probs)).equals(typed)
enter = 1
def product(probs):
if not probs:
return 1
return reduce(lambda x, y: x * y, probs)
def expected_strokes(typed, length):
finish = length - typed + enter
retype = finish + length + enter
correct = product(probs[:typed])
strokes = correct * finish + (1 - correct) * retype
return strokes
def get_min_backspace_stroke_count(typed, length):
min_strokes = 99999999999999
for backspaces in range(typed + 1):
min_strokes = min(backspaces + expected_strokes(typed -
backspaces, length), min_strokes)
return min_strokes
result = min(length + 2, get_min_backspace_stroke_count(typed, length))
output_file.write('Case #{}: {}\n'.format(case_num, result))
case_num += 1
<|reserved_special_token_1|>
import os
import numpy as np
import scipy as sp
import sys
from sure import that
from itertools import combinations, permutations
input_file = open('input1.txt', 'r')
output_file = open('output1.txt', 'w')
T = int(input_file.readline().rstrip('\n'))
case_num = 1
while case_num - 1 < T:
data = map(int, input_file.readline().rstrip('\n').split(' '))
typed = data[0]
length = data[1]
probs = map(float, input_file.readline().rstrip('\n').split(' '))
assert that(len(probs)).equals(typed)
enter = 1
def product(probs):
if not probs:
return 1
return reduce(lambda x, y: x * y, probs)
def expected_strokes(typed, length):
finish = length - typed + enter
retype = finish + length + enter
correct = product(probs[:typed])
strokes = correct * finish + (1 - correct) * retype
return strokes
def get_min_backspace_stroke_count(typed, length):
min_strokes = 99999999999999
for backspaces in range(typed + 1):
min_strokes = min(backspaces + expected_strokes(typed -
backspaces, length), min_strokes)
return min_strokes
result = min(length + 2, get_min_backspace_stroke_count(typed, length))
output_file.write('Case #{}: {}\n'.format(case_num, result))
case_num += 1
<|reserved_special_token_1|>
import os
import numpy as np
import scipy as sp
import sys
from sure import that
from itertools import combinations, permutations
input_file = open('input1.txt', 'r')
output_file = open('output1.txt', 'w')
T = int(input_file.readline().rstrip('\n'))
case_num = 1
while case_num - 1 < T:
# Parse data
data = map(int, input_file.readline().rstrip('\n').split(' '))
typed = data[0]
length = data[1]
probs = map(float, input_file.readline().rstrip('\n').split(' '))
assert that(len(probs)).equals(typed)
enter = 1
def product(probs):
if not probs:
return 1
return reduce(lambda x, y: x * y, probs)
def expected_strokes(typed, length):
finish = length - typed + enter
retype = finish + length + enter
correct = product(probs[:typed])
strokes = correct * finish + (1 - correct) * retype
return strokes
def get_min_backspace_stroke_count(typed, length):
min_strokes = 99999999999999
for backspaces in range(typed + 1):
min_strokes = min(backspaces + expected_strokes(typed - backspaces, length), min_strokes)
return min_strokes
result = min(length + 2, get_min_backspace_stroke_count(typed, length))
# Write result
output_file.write('Case #{}: {}\n'.format(case_num, result))
case_num += 1
|
flexible
|
{
"blob_id": "10c8316aee2107dc84ce7c1427dd62f52a2ce697",
"index": 4549,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile case_num - 1 < T:\n data = map(int, input_file.readline().rstrip('\\n').split(' '))\n typed = data[0]\n length = data[1]\n probs = map(float, input_file.readline().rstrip('\\n').split(' '))\n assert that(len(probs)).equals(typed)\n enter = 1\n\n def product(probs):\n if not probs:\n return 1\n return reduce(lambda x, y: x * y, probs)\n\n def expected_strokes(typed, length):\n finish = length - typed + enter\n retype = finish + length + enter\n correct = product(probs[:typed])\n strokes = correct * finish + (1 - correct) * retype\n return strokes\n\n def get_min_backspace_stroke_count(typed, length):\n min_strokes = 99999999999999\n for backspaces in range(typed + 1):\n min_strokes = min(backspaces + expected_strokes(typed -\n backspaces, length), min_strokes)\n return min_strokes\n result = min(length + 2, get_min_backspace_stroke_count(typed, length))\n output_file.write('Case #{}: {}\\n'.format(case_num, result))\n case_num += 1\n",
"step-3": "<mask token>\ninput_file = open('input1.txt', 'r')\noutput_file = open('output1.txt', 'w')\nT = int(input_file.readline().rstrip('\\n'))\ncase_num = 1\nwhile case_num - 1 < T:\n data = map(int, input_file.readline().rstrip('\\n').split(' '))\n typed = data[0]\n length = data[1]\n probs = map(float, input_file.readline().rstrip('\\n').split(' '))\n assert that(len(probs)).equals(typed)\n enter = 1\n\n def product(probs):\n if not probs:\n return 1\n return reduce(lambda x, y: x * y, probs)\n\n def expected_strokes(typed, length):\n finish = length - typed + enter\n retype = finish + length + enter\n correct = product(probs[:typed])\n strokes = correct * finish + (1 - correct) * retype\n return strokes\n\n def get_min_backspace_stroke_count(typed, length):\n min_strokes = 99999999999999\n for backspaces in range(typed + 1):\n min_strokes = min(backspaces + expected_strokes(typed -\n backspaces, length), min_strokes)\n return min_strokes\n result = min(length + 2, get_min_backspace_stroke_count(typed, length))\n output_file.write('Case #{}: {}\\n'.format(case_num, result))\n case_num += 1\n",
"step-4": "import os\nimport numpy as np\nimport scipy as sp\nimport sys\nfrom sure import that\nfrom itertools import combinations, permutations\ninput_file = open('input1.txt', 'r')\noutput_file = open('output1.txt', 'w')\nT = int(input_file.readline().rstrip('\\n'))\ncase_num = 1\nwhile case_num - 1 < T:\n data = map(int, input_file.readline().rstrip('\\n').split(' '))\n typed = data[0]\n length = data[1]\n probs = map(float, input_file.readline().rstrip('\\n').split(' '))\n assert that(len(probs)).equals(typed)\n enter = 1\n\n def product(probs):\n if not probs:\n return 1\n return reduce(lambda x, y: x * y, probs)\n\n def expected_strokes(typed, length):\n finish = length - typed + enter\n retype = finish + length + enter\n correct = product(probs[:typed])\n strokes = correct * finish + (1 - correct) * retype\n return strokes\n\n def get_min_backspace_stroke_count(typed, length):\n min_strokes = 99999999999999\n for backspaces in range(typed + 1):\n min_strokes = min(backspaces + expected_strokes(typed -\n backspaces, length), min_strokes)\n return min_strokes\n result = min(length + 2, get_min_backspace_stroke_count(typed, length))\n output_file.write('Case #{}: {}\\n'.format(case_num, result))\n case_num += 1\n",
"step-5": "import os\nimport numpy as np\nimport scipy as sp\nimport sys\nfrom sure import that\nfrom itertools import combinations, permutations\n\n\ninput_file = open('input1.txt', 'r')\noutput_file = open('output1.txt', 'w')\n\nT = int(input_file.readline().rstrip('\\n'))\ncase_num = 1\nwhile case_num - 1 < T:\n # Parse data\n data = map(int, input_file.readline().rstrip('\\n').split(' '))\n typed = data[0]\n length = data[1]\n probs = map(float, input_file.readline().rstrip('\\n').split(' '))\n assert that(len(probs)).equals(typed)\n\n enter = 1\n\n def product(probs):\n if not probs:\n return 1\n return reduce(lambda x, y: x * y, probs)\n \n def expected_strokes(typed, length):\n finish = length - typed + enter\n retype = finish + length + enter\n correct = product(probs[:typed])\n strokes = correct * finish + (1 - correct) * retype\n return strokes\n\n def get_min_backspace_stroke_count(typed, length):\n min_strokes = 99999999999999\n for backspaces in range(typed + 1):\n min_strokes = min(backspaces + expected_strokes(typed - backspaces, length), min_strokes)\n return min_strokes\n\n result = min(length + 2, get_min_backspace_stroke_count(typed, length))\n\n # Write result\n output_file.write('Case #{}: {}\\n'.format(case_num, result))\n case_num += 1\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
__author__ = 'vidma'
<|reserved_special_token_1|>
"""
Contain meta-data related functions:
* accessing integration schema: fields, values, constraints on inputs/queries
* tracking fields available
* tracking known (input field) values
"""
# coding=utf-8
__author__ = 'vidma'
|
flexible
|
{
"blob_id": "abdedad2c2b42b54cdba0e61e095ba3df0783b81",
"index": 1172,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n__author__ = 'vidma'\n",
"step-3": "\"\"\"\nContain meta-data related functions:\n\n* accessing integration schema: fields, values, constraints on inputs/queries\n* tracking fields available\n* tracking known (input field) values\n\"\"\"\n# coding=utf-8\n__author__ = 'vidma'\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import sys
def isPalin(s):
result = True
for i in range(len(s)/2):
if s[i] != s[-(i + 1)]:
result = False
break
return result
def main():
curr_large = 0
for i in xrange(900, 1000):
for j in xrange(900, 1000):
prod = i * j
# Turns out list comprehension is more succint, but I
# leave the traditional up method anyway
if str(prod) == str(prod)[::-1] and prod > curr_large:
curr_large = prod
print curr_large
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "1c171c67ca5ef0e9b5f2941eec7a625a8823271f",
"index": 8463,
"step-1": "import sys\n\ndef isPalin(s):\n result = True\n for i in range(len(s)/2):\n if s[i] != s[-(i + 1)]:\n result = False\n break\n return result\n\n\ndef main():\n curr_large = 0\n for i in xrange(900, 1000):\n for j in xrange(900, 1000):\n prod = i * j\n # Turns out list comprehension is more succint, but I \n # leave the traditional up method anyway\n if str(prod) == str(prod)[::-1] and prod > curr_large:\n curr_large = prod\n print curr_large\n\n\nif __name__ == '__main__':\n main()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import pandas as pd
import sweetviz as sv
b = pd.read_csv("final_cricket_players.csv", low_memory=False)
b = b.replace(to_replace="-",value="")
b = b.replace(to_replace="[]",value="")
b = b.replace(to_replace="{}",value="")
b.drop(b.columns[b.columns.str.contains('unnamed',case = False)],axis = 1, inplace = True)
b.to_csv('Cleaned_dataset.csv', index=False)
report = sv.analyze(b, pairwise_analysis='off')
report.show_html()
|
normal
|
{
"blob_id": "f93b7f2939bbee9b0cb5402d3e5f5d6c482d37c4",
"index": 6983,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nb.drop(b.columns[b.columns.str.contains('unnamed', case=False)], axis=1,\n inplace=True)\nb.to_csv('Cleaned_dataset.csv', index=False)\n<mask token>\nreport.show_html()\n",
"step-3": "<mask token>\nb = pd.read_csv('final_cricket_players.csv', low_memory=False)\nb = b.replace(to_replace='-', value='')\nb = b.replace(to_replace='[]', value='')\nb = b.replace(to_replace='{}', value='')\nb.drop(b.columns[b.columns.str.contains('unnamed', case=False)], axis=1,\n inplace=True)\nb.to_csv('Cleaned_dataset.csv', index=False)\nreport = sv.analyze(b, pairwise_analysis='off')\nreport.show_html()\n",
"step-4": "import pandas as pd\nimport sweetviz as sv\nb = pd.read_csv('final_cricket_players.csv', low_memory=False)\nb = b.replace(to_replace='-', value='')\nb = b.replace(to_replace='[]', value='')\nb = b.replace(to_replace='{}', value='')\nb.drop(b.columns[b.columns.str.contains('unnamed', case=False)], axis=1,\n inplace=True)\nb.to_csv('Cleaned_dataset.csv', index=False)\nreport = sv.analyze(b, pairwise_analysis='off')\nreport.show_html()\n",
"step-5": "import pandas as pd\r\nimport sweetviz as sv\r\nb = pd.read_csv(\"final_cricket_players.csv\", low_memory=False)\r\nb = b.replace(to_replace=\"-\",value=\"\")\r\nb = b.replace(to_replace=\"[]\",value=\"\")\r\nb = b.replace(to_replace=\"{}\",value=\"\")\r\n\r\nb.drop(b.columns[b.columns.str.contains('unnamed',case = False)],axis = 1, inplace = True)\r\nb.to_csv('Cleaned_dataset.csv', index=False)\r\nreport = sv.analyze(b, pairwise_analysis='off')\r\nreport.show_html()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python
"""
A package that determines the current day of the week.
"""
from datetime import date
import calendar
# Set the first day of the week as Sunday.
calendar.firstday(calendar.SUNDAY)
def day_of_the_week(arg):
"""
Returns the current day of the week.
"""
if arg == "day":
day_of_the_week = calendar.day_name[date.today().weekday()]
print("Today is " + day_of_the_week + ".")
#Raise exception for invalid argument
else:
raise Exception ("Invalid argument for day of the week")
def info():
"""
Returns information about the package.
"""
info = "This package determines the day of the week."
print(info)
if __name__ == "__main__"
day("today")
info()
|
normal
|
{
"blob_id": "7e23f5598ccfe9aff74d43eb662f860b0404b7ec",
"index": 8333,
"step-1": "#!/usr/bin/env python\n\n\"\"\"\nA package that determines the current day of the week.\n\"\"\"\n\nfrom datetime import date \nimport calendar\n\n# Set the first day of the week as Sunday.\n\ncalendar.firstday(calendar.SUNDAY)\n\ndef day_of_the_week(arg):\n\n\t\"\"\"\n\tReturns the current day of the week.\n\t\"\"\"\n \n if arg == \"day\":\n day_of_the_week = calendar.day_name[date.today().weekday()]\n print(\"Today is \" + day_of_the_week + \".\")\t\n\n\n #Raise exception for invalid argument\n else:\n \traise Exception (\"Invalid argument for day of the week\")\t\n\n\n def info():\n \n \"\"\"\n Returns information about the package.\n\t\"\"\"\n\n\t info = \"This package determines the day of the week.\"\n\t print(info)\n\nif __name__ == \"__main__\"\n\tday(\"today\")\n\tinfo()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class BucketDatasetGenerator:
"""
Provide data distribution of different gears for the bert network.
Args:
data_set (Dataset): The training dataset.
batch_size (Int): The training batchsize.
bucket_list (List): List of different sentence lengths,such as [128, 256, 512]. Default: None.
"""
def __init__(self, data_set, batch_size, bucket_list=None):
self.dataset = data_set
self.batch_size = batch_size
self.bucket_list = bucket_list
self.data_bucket = {bucket: [] for bucket in bucket_list}
bucket_size = len(bucket_list)
self.random_list = np.random.binomial(n=bucket_size - 1, p=0.5,
size=self.__len__())
self.random_list = (self.random_list + 2) % bucket_size
self.random_list = [bucket_list[i] for i in self.random_list]
self.iter = 0
def __next__(self):
for item in self.iterator:
for seq_length in self.bucket_list:
if np.sum(item[1]) <= seq_length:
self.data_bucket[seq_length].append(item)
break
for key in self.data_bucket.keys():
data = self.data_bucket[key]
if len(data) >= self.batch_size and self.random_list[self.iter
] == key:
self.data_bucket[key] = self.data_bucket[key][self.
batch_size:]
arr = data[0]
for i in range(1, self.batch_size):
current_data = data[i]
for j in range(len(current_data)):
arr[j] = np.concatenate((arr[j], current_data[j]))
res = ()
for label in arr:
newlabel = np.reshape(label, (self.batch_size, -1))
res += newlabel,
res += np.array(key, np.int32),
self.iter += 1
return res
raise StopIteration
def __iter__(self):
self.iterator = self.dataset.create_tuple_iterator(output_numpy=True)
return self
def __len__(self):
return self.dataset.get_dataset_size() // self.batch_size - 1
<|reserved_special_token_0|>
def create_squad_dataset(batch_size=1, repeat_count=1, data_file_path=None,
schema_file_path=None, is_training=True, do_shuffle=True, rank_size=1,
rank_id=0):
"""create finetune or evaluation dataset"""
type_cast_op = C.TypeCast(mstype.int32)
if is_training:
print('data_file_path: ', data_file_path)
print('rank_id: ', rank_id)
ds = de.MindDataset([data_file_path], columns_list=['input_ids',
'input_mask', 'segment_ids', 'start_positions', 'end_positions',
'unique_ids', 'is_impossible'], shuffle=do_shuffle, num_shards=
rank_size, shard_id=rank_id)
ds = ds.map(operations=type_cast_op, input_columns='start_positions')
ds = ds.map(operations=type_cast_op, input_columns='end_positions')
else:
ds = de.GeneratorDataset(generator_squad(data_file_path), shuffle=
do_shuffle, column_names=['input_ids', 'input_mask',
'segment_ids', 'unique_ids'])
ds = ds.map(operations=type_cast_op, input_columns='input_ids')
ds = ds.map(operations=type_cast_op, input_columns='input_mask')
ds = ds.map(operations=type_cast_op, input_columns='segment_ids')
ds = ds.map(operations=type_cast_op, input_columns='unique_ids')
ds = ds.repeat(repeat_count)
ds = ds.batch(batch_size, drop_remainder=True)
return ds
def create_eval_dataset(batchsize=32, device_num=1, rank=0, data_dir=None,
schema_dir=None):
"""create evaluation dataset"""
data_files = []
if os.path.isdir(data_dir):
files = os.listdir(data_dir)
for file_name in files:
if 'tfrecord' in file_name:
data_files.append(os.path.join(data_dir, file_name))
else:
data_files.append(data_dir)
data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir !=
'' else None, columns_list=['input_ids', 'input_mask',
'segment_ids', 'next_sentence_labels', 'masked_lm_positions',
'masked_lm_ids', 'masked_lm_weights'], shard_equal_rows=True)
ori_dataset_size = data_set.get_dataset_size()
print('origin eval size: ', ori_dataset_size)
dtypes = data_set.output_types()
shapes = data_set.output_shapes()
output_batches = math.ceil(ori_dataset_size / device_num / batchsize)
padded_num = output_batches * device_num * batchsize - ori_dataset_size
print('padded num: ', padded_num)
if padded_num > 0:
item = {'input_ids': np.zeros(shapes[0], dtypes[0]), 'input_mask':
np.zeros(shapes[1], dtypes[1]), 'segment_ids': np.zeros(shapes[
2], dtypes[2]), 'next_sentence_labels': np.zeros(shapes[3],
dtypes[3]), 'masked_lm_positions': np.zeros(shapes[4], dtypes[4
]), 'masked_lm_ids': np.zeros(shapes[5], dtypes[5]),
'masked_lm_weights': np.zeros(shapes[6], dtypes[6])}
padded_samples = [item for x in range(padded_num)]
padded_ds = de.PaddedDataset(padded_samples)
eval_ds = data_set + padded_ds
sampler = de.DistributedSampler(num_shards=device_num, shard_id=
rank, shuffle=False)
eval_ds.use_sampler(sampler)
else:
eval_ds = de.TFRecordDataset(data_files, schema_dir if schema_dir !=
'' else None, columns_list=['input_ids', 'input_mask',
'segment_ids', 'next_sentence_labels', 'masked_lm_positions',
'masked_lm_ids', 'masked_lm_weights'], num_shards=device_num,
shard_id=rank, shard_equal_rows=True)
type_cast_op = C.TypeCast(mstype.int32)
eval_ds = eval_ds.map(input_columns='masked_lm_ids', operations=
type_cast_op)
eval_ds = eval_ds.map(input_columns='masked_lm_positions', operations=
type_cast_op)
eval_ds = eval_ds.map(input_columns='next_sentence_labels', operations=
type_cast_op)
eval_ds = eval_ds.map(input_columns='segment_ids', operations=type_cast_op)
eval_ds = eval_ds.map(input_columns='input_mask', operations=type_cast_op)
eval_ds = eval_ds.map(input_columns='input_ids', operations=type_cast_op)
eval_ds = eval_ds.batch(batchsize, drop_remainder=True)
print('eval data size: {}'.format(eval_ds.get_dataset_size()))
print('eval repeat count: {}'.format(eval_ds.get_repeat_count()))
return eval_ds
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BucketDatasetGenerator:
"""
Provide data distribution of different gears for the bert network.
Args:
data_set (Dataset): The training dataset.
batch_size (Int): The training batchsize.
bucket_list (List): List of different sentence lengths,such as [128, 256, 512]. Default: None.
"""
def __init__(self, data_set, batch_size, bucket_list=None):
self.dataset = data_set
self.batch_size = batch_size
self.bucket_list = bucket_list
self.data_bucket = {bucket: [] for bucket in bucket_list}
bucket_size = len(bucket_list)
self.random_list = np.random.binomial(n=bucket_size - 1, p=0.5,
size=self.__len__())
self.random_list = (self.random_list + 2) % bucket_size
self.random_list = [bucket_list[i] for i in self.random_list]
self.iter = 0
def __next__(self):
for item in self.iterator:
for seq_length in self.bucket_list:
if np.sum(item[1]) <= seq_length:
self.data_bucket[seq_length].append(item)
break
for key in self.data_bucket.keys():
data = self.data_bucket[key]
if len(data) >= self.batch_size and self.random_list[self.iter
] == key:
self.data_bucket[key] = self.data_bucket[key][self.
batch_size:]
arr = data[0]
for i in range(1, self.batch_size):
current_data = data[i]
for j in range(len(current_data)):
arr[j] = np.concatenate((arr[j], current_data[j]))
res = ()
for label in arr:
newlabel = np.reshape(label, (self.batch_size, -1))
res += newlabel,
res += np.array(key, np.int32),
self.iter += 1
return res
raise StopIteration
def __iter__(self):
self.iterator = self.dataset.create_tuple_iterator(output_numpy=True)
return self
def __len__(self):
return self.dataset.get_dataset_size() // self.batch_size - 1
def create_albert_dataset(device_num=1, rank=0, do_shuffle='true', data_dir
=None, schema_dir=None, batch_size=32, bucket_list=None):
"""create train dataset"""
files = os.listdir(data_dir)
data_files = []
for file_name in files:
if 'tfrecord' in file_name:
data_files.append(os.path.join(data_dir, file_name))
data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir !=
'' else None, columns_list=['input_ids', 'input_mask',
'segment_ids', 'next_sentence_labels', 'masked_lm_positions',
'masked_lm_ids', 'masked_lm_weights'], shuffle=de.Shuffle.FILES if
do_shuffle == 'true' else False, num_shards=device_num, shard_id=
rank, shard_equal_rows=True)
if bucket_list:
bucket_dataset = BucketDatasetGenerator(data_set, batch_size,
bucket_list=bucket_list)
data_set = de.GeneratorDataset(bucket_dataset, column_names=[
'input_ids', 'input_mask', 'segment_ids',
'next_sentence_labels', 'masked_lm_positions', 'masked_lm_ids',
'masked_lm_weights', 'sentence_flag'], shuffle=False)
else:
data_set = data_set.batch(batch_size, drop_remainder=True)
ori_dataset_size = data_set.get_dataset_size()
print('origin dataset size: ', ori_dataset_size)
type_cast_op = C.TypeCast(mstype.int32)
data_set = data_set.map(operations=type_cast_op, input_columns=
'masked_lm_ids')
data_set = data_set.map(operations=type_cast_op, input_columns=
'masked_lm_positions')
data_set = data_set.map(operations=type_cast_op, input_columns=
'next_sentence_labels')
data_set = data_set.map(operations=type_cast_op, input_columns=
'segment_ids')
data_set = data_set.map(operations=type_cast_op, input_columns='input_mask'
)
data_set = data_set.map(operations=type_cast_op, input_columns='input_ids')
logger.info('data size: {}'.format(data_set.get_dataset_size()))
logger.info('repeat count: {}'.format(data_set.get_repeat_count()))
return data_set
def create_classification_dataset(batch_size=1, repeat_count=1,
assessment_method='accuracy', data_file_path=None, schema_file_path=
None, do_shuffle=True, rank_size=1, rank_id=0):
"""create finetune or evaluation dataset"""
type_cast_op = C.TypeCast(mstype.int32)
ds = de.MindDataset([data_file_path], columns_list=['input_ids',
'input_mask', 'segment_ids', 'label_ids'], shuffle=do_shuffle,
num_shards=rank_size, shard_id=rank_id)
if assessment_method == 'Spearman_correlation':
type_cast_op_float = C.TypeCast(mstype.float32)
ds = ds.map(operations=type_cast_op_float, input_columns='label_ids')
else:
ds = ds.map(operations=type_cast_op, input_columns='label_ids')
ds = ds.map(operations=type_cast_op, input_columns='segment_ids')
ds = ds.map(operations=type_cast_op, input_columns='input_mask')
ds = ds.map(operations=type_cast_op, input_columns='input_ids')
ds = ds.repeat(repeat_count)
ds = ds.batch(batch_size, drop_remainder=True)
return ds
<|reserved_special_token_0|>
def generator_squad_train(data_features):
for feature in data_features:
yield feature.input_ids, feature.input_mask, feature.segment_ids, feature.start_position, feature.end_position, feature.unique_id, feature.is_impossible
def create_squad_dataset(batch_size=1, repeat_count=1, data_file_path=None,
schema_file_path=None, is_training=True, do_shuffle=True, rank_size=1,
rank_id=0):
"""create finetune or evaluation dataset"""
type_cast_op = C.TypeCast(mstype.int32)
if is_training:
print('data_file_path: ', data_file_path)
print('rank_id: ', rank_id)
ds = de.MindDataset([data_file_path], columns_list=['input_ids',
'input_mask', 'segment_ids', 'start_positions', 'end_positions',
'unique_ids', 'is_impossible'], shuffle=do_shuffle, num_shards=
rank_size, shard_id=rank_id)
ds = ds.map(operations=type_cast_op, input_columns='start_positions')
ds = ds.map(operations=type_cast_op, input_columns='end_positions')
else:
ds = de.GeneratorDataset(generator_squad(data_file_path), shuffle=
do_shuffle, column_names=['input_ids', 'input_mask',
'segment_ids', 'unique_ids'])
ds = ds.map(operations=type_cast_op, input_columns='input_ids')
ds = ds.map(operations=type_cast_op, input_columns='input_mask')
ds = ds.map(operations=type_cast_op, input_columns='segment_ids')
ds = ds.map(operations=type_cast_op, input_columns='unique_ids')
ds = ds.repeat(repeat_count)
ds = ds.batch(batch_size, drop_remainder=True)
return ds
def create_eval_dataset(batchsize=32, device_num=1, rank=0, data_dir=None,
schema_dir=None):
"""create evaluation dataset"""
data_files = []
if os.path.isdir(data_dir):
files = os.listdir(data_dir)
for file_name in files:
if 'tfrecord' in file_name:
data_files.append(os.path.join(data_dir, file_name))
else:
data_files.append(data_dir)
data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir !=
'' else None, columns_list=['input_ids', 'input_mask',
'segment_ids', 'next_sentence_labels', 'masked_lm_positions',
'masked_lm_ids', 'masked_lm_weights'], shard_equal_rows=True)
ori_dataset_size = data_set.get_dataset_size()
print('origin eval size: ', ori_dataset_size)
dtypes = data_set.output_types()
shapes = data_set.output_shapes()
output_batches = math.ceil(ori_dataset_size / device_num / batchsize)
padded_num = output_batches * device_num * batchsize - ori_dataset_size
print('padded num: ', padded_num)
if padded_num > 0:
item = {'input_ids': np.zeros(shapes[0], dtypes[0]), 'input_mask':
np.zeros(shapes[1], dtypes[1]), 'segment_ids': np.zeros(shapes[
2], dtypes[2]), 'next_sentence_labels': np.zeros(shapes[3],
dtypes[3]), 'masked_lm_positions': np.zeros(shapes[4], dtypes[4
]), 'masked_lm_ids': np.zeros(shapes[5], dtypes[5]),
'masked_lm_weights': np.zeros(shapes[6], dtypes[6])}
padded_samples = [item for x in range(padded_num)]
padded_ds = de.PaddedDataset(padded_samples)
eval_ds = data_set + padded_ds
sampler = de.DistributedSampler(num_shards=device_num, shard_id=
rank, shuffle=False)
eval_ds.use_sampler(sampler)
else:
eval_ds = de.TFRecordDataset(data_files, schema_dir if schema_dir !=
'' else None, columns_list=['input_ids', 'input_mask',
'segment_ids', 'next_sentence_labels', 'masked_lm_positions',
'masked_lm_ids', 'masked_lm_weights'], num_shards=device_num,
shard_id=rank, shard_equal_rows=True)
type_cast_op = C.TypeCast(mstype.int32)
eval_ds = eval_ds.map(input_columns='masked_lm_ids', operations=
type_cast_op)
eval_ds = eval_ds.map(input_columns='masked_lm_positions', operations=
type_cast_op)
eval_ds = eval_ds.map(input_columns='next_sentence_labels', operations=
type_cast_op)
eval_ds = eval_ds.map(input_columns='segment_ids', operations=type_cast_op)
eval_ds = eval_ds.map(input_columns='input_mask', operations=type_cast_op)
eval_ds = eval_ds.map(input_columns='input_ids', operations=type_cast_op)
eval_ds = eval_ds.batch(batchsize, drop_remainder=True)
print('eval data size: {}'.format(eval_ds.get_dataset_size()))
print('eval repeat count: {}'.format(eval_ds.get_repeat_count()))
return eval_ds
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BucketDatasetGenerator:
"""
Provide data distribution of different gears for the bert network.
Args:
data_set (Dataset): The training dataset.
batch_size (Int): The training batchsize.
bucket_list (List): List of different sentence lengths,such as [128, 256, 512]. Default: None.
"""
def __init__(self, data_set, batch_size, bucket_list=None):
self.dataset = data_set
self.batch_size = batch_size
self.bucket_list = bucket_list
self.data_bucket = {bucket: [] for bucket in bucket_list}
bucket_size = len(bucket_list)
self.random_list = np.random.binomial(n=bucket_size - 1, p=0.5,
size=self.__len__())
self.random_list = (self.random_list + 2) % bucket_size
self.random_list = [bucket_list[i] for i in self.random_list]
self.iter = 0
def __next__(self):
for item in self.iterator:
for seq_length in self.bucket_list:
if np.sum(item[1]) <= seq_length:
self.data_bucket[seq_length].append(item)
break
for key in self.data_bucket.keys():
data = self.data_bucket[key]
if len(data) >= self.batch_size and self.random_list[self.iter
] == key:
self.data_bucket[key] = self.data_bucket[key][self.
batch_size:]
arr = data[0]
for i in range(1, self.batch_size):
current_data = data[i]
for j in range(len(current_data)):
arr[j] = np.concatenate((arr[j], current_data[j]))
res = ()
for label in arr:
newlabel = np.reshape(label, (self.batch_size, -1))
res += newlabel,
res += np.array(key, np.int32),
self.iter += 1
return res
raise StopIteration
def __iter__(self):
self.iterator = self.dataset.create_tuple_iterator(output_numpy=True)
return self
def __len__(self):
return self.dataset.get_dataset_size() // self.batch_size - 1
def create_albert_dataset(device_num=1, rank=0, do_shuffle='true', data_dir
=None, schema_dir=None, batch_size=32, bucket_list=None):
"""create train dataset"""
files = os.listdir(data_dir)
data_files = []
for file_name in files:
if 'tfrecord' in file_name:
data_files.append(os.path.join(data_dir, file_name))
data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir !=
'' else None, columns_list=['input_ids', 'input_mask',
'segment_ids', 'next_sentence_labels', 'masked_lm_positions',
'masked_lm_ids', 'masked_lm_weights'], shuffle=de.Shuffle.FILES if
do_shuffle == 'true' else False, num_shards=device_num, shard_id=
rank, shard_equal_rows=True)
if bucket_list:
bucket_dataset = BucketDatasetGenerator(data_set, batch_size,
bucket_list=bucket_list)
data_set = de.GeneratorDataset(bucket_dataset, column_names=[
'input_ids', 'input_mask', 'segment_ids',
'next_sentence_labels', 'masked_lm_positions', 'masked_lm_ids',
'masked_lm_weights', 'sentence_flag'], shuffle=False)
else:
data_set = data_set.batch(batch_size, drop_remainder=True)
ori_dataset_size = data_set.get_dataset_size()
print('origin dataset size: ', ori_dataset_size)
type_cast_op = C.TypeCast(mstype.int32)
data_set = data_set.map(operations=type_cast_op, input_columns=
'masked_lm_ids')
data_set = data_set.map(operations=type_cast_op, input_columns=
'masked_lm_positions')
data_set = data_set.map(operations=type_cast_op, input_columns=
'next_sentence_labels')
data_set = data_set.map(operations=type_cast_op, input_columns=
'segment_ids')
data_set = data_set.map(operations=type_cast_op, input_columns='input_mask'
)
data_set = data_set.map(operations=type_cast_op, input_columns='input_ids')
logger.info('data size: {}'.format(data_set.get_dataset_size()))
logger.info('repeat count: {}'.format(data_set.get_repeat_count()))
return data_set
def create_classification_dataset(batch_size=1, repeat_count=1,
assessment_method='accuracy', data_file_path=None, schema_file_path=
None, do_shuffle=True, rank_size=1, rank_id=0):
"""create finetune or evaluation dataset"""
type_cast_op = C.TypeCast(mstype.int32)
ds = de.MindDataset([data_file_path], columns_list=['input_ids',
'input_mask', 'segment_ids', 'label_ids'], shuffle=do_shuffle,
num_shards=rank_size, shard_id=rank_id)
if assessment_method == 'Spearman_correlation':
type_cast_op_float = C.TypeCast(mstype.float32)
ds = ds.map(operations=type_cast_op_float, input_columns='label_ids')
else:
ds = ds.map(operations=type_cast_op, input_columns='label_ids')
ds = ds.map(operations=type_cast_op, input_columns='segment_ids')
ds = ds.map(operations=type_cast_op, input_columns='input_mask')
ds = ds.map(operations=type_cast_op, input_columns='input_ids')
ds = ds.repeat(repeat_count)
ds = ds.batch(batch_size, drop_remainder=True)
return ds
def generator_squad(data_features):
for feature in data_features:
yield feature.input_ids, feature.input_mask, feature.segment_ids, feature.unique_id
def generator_squad_train(data_features):
for feature in data_features:
yield feature.input_ids, feature.input_mask, feature.segment_ids, feature.start_position, feature.end_position, feature.unique_id, feature.is_impossible
def create_squad_dataset(batch_size=1, repeat_count=1, data_file_path=None,
schema_file_path=None, is_training=True, do_shuffle=True, rank_size=1,
rank_id=0):
"""create finetune or evaluation dataset"""
type_cast_op = C.TypeCast(mstype.int32)
if is_training:
print('data_file_path: ', data_file_path)
print('rank_id: ', rank_id)
ds = de.MindDataset([data_file_path], columns_list=['input_ids',
'input_mask', 'segment_ids', 'start_positions', 'end_positions',
'unique_ids', 'is_impossible'], shuffle=do_shuffle, num_shards=
rank_size, shard_id=rank_id)
ds = ds.map(operations=type_cast_op, input_columns='start_positions')
ds = ds.map(operations=type_cast_op, input_columns='end_positions')
else:
ds = de.GeneratorDataset(generator_squad(data_file_path), shuffle=
do_shuffle, column_names=['input_ids', 'input_mask',
'segment_ids', 'unique_ids'])
ds = ds.map(operations=type_cast_op, input_columns='input_ids')
ds = ds.map(operations=type_cast_op, input_columns='input_mask')
ds = ds.map(operations=type_cast_op, input_columns='segment_ids')
ds = ds.map(operations=type_cast_op, input_columns='unique_ids')
ds = ds.repeat(repeat_count)
ds = ds.batch(batch_size, drop_remainder=True)
return ds
def create_eval_dataset(batchsize=32, device_num=1, rank=0, data_dir=None,
schema_dir=None):
"""create evaluation dataset"""
data_files = []
if os.path.isdir(data_dir):
files = os.listdir(data_dir)
for file_name in files:
if 'tfrecord' in file_name:
data_files.append(os.path.join(data_dir, file_name))
else:
data_files.append(data_dir)
data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir !=
'' else None, columns_list=['input_ids', 'input_mask',
'segment_ids', 'next_sentence_labels', 'masked_lm_positions',
'masked_lm_ids', 'masked_lm_weights'], shard_equal_rows=True)
ori_dataset_size = data_set.get_dataset_size()
print('origin eval size: ', ori_dataset_size)
dtypes = data_set.output_types()
shapes = data_set.output_shapes()
output_batches = math.ceil(ori_dataset_size / device_num / batchsize)
padded_num = output_batches * device_num * batchsize - ori_dataset_size
print('padded num: ', padded_num)
if padded_num > 0:
item = {'input_ids': np.zeros(shapes[0], dtypes[0]), 'input_mask':
np.zeros(shapes[1], dtypes[1]), 'segment_ids': np.zeros(shapes[
2], dtypes[2]), 'next_sentence_labels': np.zeros(shapes[3],
dtypes[3]), 'masked_lm_positions': np.zeros(shapes[4], dtypes[4
]), 'masked_lm_ids': np.zeros(shapes[5], dtypes[5]),
'masked_lm_weights': np.zeros(shapes[6], dtypes[6])}
padded_samples = [item for x in range(padded_num)]
padded_ds = de.PaddedDataset(padded_samples)
eval_ds = data_set + padded_ds
sampler = de.DistributedSampler(num_shards=device_num, shard_id=
rank, shuffle=False)
eval_ds.use_sampler(sampler)
else:
eval_ds = de.TFRecordDataset(data_files, schema_dir if schema_dir !=
'' else None, columns_list=['input_ids', 'input_mask',
'segment_ids', 'next_sentence_labels', 'masked_lm_positions',
'masked_lm_ids', 'masked_lm_weights'], num_shards=device_num,
shard_id=rank, shard_equal_rows=True)
type_cast_op = C.TypeCast(mstype.int32)
eval_ds = eval_ds.map(input_columns='masked_lm_ids', operations=
type_cast_op)
eval_ds = eval_ds.map(input_columns='masked_lm_positions', operations=
type_cast_op)
eval_ds = eval_ds.map(input_columns='next_sentence_labels', operations=
type_cast_op)
eval_ds = eval_ds.map(input_columns='segment_ids', operations=type_cast_op)
eval_ds = eval_ds.map(input_columns='input_mask', operations=type_cast_op)
eval_ds = eval_ds.map(input_columns='input_ids', operations=type_cast_op)
eval_ds = eval_ds.batch(batchsize, drop_remainder=True)
print('eval data size: {}'.format(eval_ds.get_dataset_size()))
print('eval repeat count: {}'.format(eval_ds.get_repeat_count()))
return eval_ds
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import os
import math
import numpy as np
import mindspore.common.dtype as mstype
import mindspore.dataset as de
import mindspore.dataset.transforms as C
from mindspore import log as logger
class BucketDatasetGenerator:
"""
Provide data distribution of different gears for the bert network.
Args:
data_set (Dataset): The training dataset.
batch_size (Int): The training batchsize.
bucket_list (List): List of different sentence lengths,such as [128, 256, 512]. Default: None.
"""
def __init__(self, data_set, batch_size, bucket_list=None):
self.dataset = data_set
self.batch_size = batch_size
self.bucket_list = bucket_list
self.data_bucket = {bucket: [] for bucket in bucket_list}
bucket_size = len(bucket_list)
self.random_list = np.random.binomial(n=bucket_size - 1, p=0.5,
size=self.__len__())
self.random_list = (self.random_list + 2) % bucket_size
self.random_list = [bucket_list[i] for i in self.random_list]
self.iter = 0
def __next__(self):
for item in self.iterator:
for seq_length in self.bucket_list:
if np.sum(item[1]) <= seq_length:
self.data_bucket[seq_length].append(item)
break
for key in self.data_bucket.keys():
data = self.data_bucket[key]
if len(data) >= self.batch_size and self.random_list[self.iter
] == key:
self.data_bucket[key] = self.data_bucket[key][self.
batch_size:]
arr = data[0]
for i in range(1, self.batch_size):
current_data = data[i]
for j in range(len(current_data)):
arr[j] = np.concatenate((arr[j], current_data[j]))
res = ()
for label in arr:
newlabel = np.reshape(label, (self.batch_size, -1))
res += newlabel,
res += np.array(key, np.int32),
self.iter += 1
return res
raise StopIteration
def __iter__(self):
self.iterator = self.dataset.create_tuple_iterator(output_numpy=True)
return self
def __len__(self):
return self.dataset.get_dataset_size() // self.batch_size - 1
def create_albert_dataset(device_num=1, rank=0, do_shuffle='true', data_dir
=None, schema_dir=None, batch_size=32, bucket_list=None):
"""create train dataset"""
files = os.listdir(data_dir)
data_files = []
for file_name in files:
if 'tfrecord' in file_name:
data_files.append(os.path.join(data_dir, file_name))
data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir !=
'' else None, columns_list=['input_ids', 'input_mask',
'segment_ids', 'next_sentence_labels', 'masked_lm_positions',
'masked_lm_ids', 'masked_lm_weights'], shuffle=de.Shuffle.FILES if
do_shuffle == 'true' else False, num_shards=device_num, shard_id=
rank, shard_equal_rows=True)
if bucket_list:
bucket_dataset = BucketDatasetGenerator(data_set, batch_size,
bucket_list=bucket_list)
data_set = de.GeneratorDataset(bucket_dataset, column_names=[
'input_ids', 'input_mask', 'segment_ids',
'next_sentence_labels', 'masked_lm_positions', 'masked_lm_ids',
'masked_lm_weights', 'sentence_flag'], shuffle=False)
else:
data_set = data_set.batch(batch_size, drop_remainder=True)
ori_dataset_size = data_set.get_dataset_size()
print('origin dataset size: ', ori_dataset_size)
type_cast_op = C.TypeCast(mstype.int32)
data_set = data_set.map(operations=type_cast_op, input_columns=
'masked_lm_ids')
data_set = data_set.map(operations=type_cast_op, input_columns=
'masked_lm_positions')
data_set = data_set.map(operations=type_cast_op, input_columns=
'next_sentence_labels')
data_set = data_set.map(operations=type_cast_op, input_columns=
'segment_ids')
data_set = data_set.map(operations=type_cast_op, input_columns='input_mask'
)
data_set = data_set.map(operations=type_cast_op, input_columns='input_ids')
logger.info('data size: {}'.format(data_set.get_dataset_size()))
logger.info('repeat count: {}'.format(data_set.get_repeat_count()))
return data_set
def create_classification_dataset(batch_size=1, repeat_count=1,
assessment_method='accuracy', data_file_path=None, schema_file_path=
None, do_shuffle=True, rank_size=1, rank_id=0):
"""create finetune or evaluation dataset"""
type_cast_op = C.TypeCast(mstype.int32)
ds = de.MindDataset([data_file_path], columns_list=['input_ids',
'input_mask', 'segment_ids', 'label_ids'], shuffle=do_shuffle,
num_shards=rank_size, shard_id=rank_id)
if assessment_method == 'Spearman_correlation':
type_cast_op_float = C.TypeCast(mstype.float32)
ds = ds.map(operations=type_cast_op_float, input_columns='label_ids')
else:
ds = ds.map(operations=type_cast_op, input_columns='label_ids')
ds = ds.map(operations=type_cast_op, input_columns='segment_ids')
ds = ds.map(operations=type_cast_op, input_columns='input_mask')
ds = ds.map(operations=type_cast_op, input_columns='input_ids')
ds = ds.repeat(repeat_count)
ds = ds.batch(batch_size, drop_remainder=True)
return ds
def generator_squad(data_features):
for feature in data_features:
yield feature.input_ids, feature.input_mask, feature.segment_ids, feature.unique_id
def generator_squad_train(data_features):
for feature in data_features:
yield feature.input_ids, feature.input_mask, feature.segment_ids, feature.start_position, feature.end_position, feature.unique_id, feature.is_impossible
def create_squad_dataset(batch_size=1, repeat_count=1, data_file_path=None,
schema_file_path=None, is_training=True, do_shuffle=True, rank_size=1,
rank_id=0):
"""create finetune or evaluation dataset"""
type_cast_op = C.TypeCast(mstype.int32)
if is_training:
print('data_file_path: ', data_file_path)
print('rank_id: ', rank_id)
ds = de.MindDataset([data_file_path], columns_list=['input_ids',
'input_mask', 'segment_ids', 'start_positions', 'end_positions',
'unique_ids', 'is_impossible'], shuffle=do_shuffle, num_shards=
rank_size, shard_id=rank_id)
ds = ds.map(operations=type_cast_op, input_columns='start_positions')
ds = ds.map(operations=type_cast_op, input_columns='end_positions')
else:
ds = de.GeneratorDataset(generator_squad(data_file_path), shuffle=
do_shuffle, column_names=['input_ids', 'input_mask',
'segment_ids', 'unique_ids'])
ds = ds.map(operations=type_cast_op, input_columns='input_ids')
ds = ds.map(operations=type_cast_op, input_columns='input_mask')
ds = ds.map(operations=type_cast_op, input_columns='segment_ids')
ds = ds.map(operations=type_cast_op, input_columns='unique_ids')
ds = ds.repeat(repeat_count)
ds = ds.batch(batch_size, drop_remainder=True)
return ds
def create_eval_dataset(batchsize=32, device_num=1, rank=0, data_dir=None,
schema_dir=None):
"""create evaluation dataset"""
data_files = []
if os.path.isdir(data_dir):
files = os.listdir(data_dir)
for file_name in files:
if 'tfrecord' in file_name:
data_files.append(os.path.join(data_dir, file_name))
else:
data_files.append(data_dir)
data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir !=
'' else None, columns_list=['input_ids', 'input_mask',
'segment_ids', 'next_sentence_labels', 'masked_lm_positions',
'masked_lm_ids', 'masked_lm_weights'], shard_equal_rows=True)
ori_dataset_size = data_set.get_dataset_size()
print('origin eval size: ', ori_dataset_size)
dtypes = data_set.output_types()
shapes = data_set.output_shapes()
output_batches = math.ceil(ori_dataset_size / device_num / batchsize)
padded_num = output_batches * device_num * batchsize - ori_dataset_size
print('padded num: ', padded_num)
if padded_num > 0:
item = {'input_ids': np.zeros(shapes[0], dtypes[0]), 'input_mask':
np.zeros(shapes[1], dtypes[1]), 'segment_ids': np.zeros(shapes[
2], dtypes[2]), 'next_sentence_labels': np.zeros(shapes[3],
dtypes[3]), 'masked_lm_positions': np.zeros(shapes[4], dtypes[4
]), 'masked_lm_ids': np.zeros(shapes[5], dtypes[5]),
'masked_lm_weights': np.zeros(shapes[6], dtypes[6])}
padded_samples = [item for x in range(padded_num)]
padded_ds = de.PaddedDataset(padded_samples)
eval_ds = data_set + padded_ds
sampler = de.DistributedSampler(num_shards=device_num, shard_id=
rank, shuffle=False)
eval_ds.use_sampler(sampler)
else:
eval_ds = de.TFRecordDataset(data_files, schema_dir if schema_dir !=
'' else None, columns_list=['input_ids', 'input_mask',
'segment_ids', 'next_sentence_labels', 'masked_lm_positions',
'masked_lm_ids', 'masked_lm_weights'], num_shards=device_num,
shard_id=rank, shard_equal_rows=True)
type_cast_op = C.TypeCast(mstype.int32)
eval_ds = eval_ds.map(input_columns='masked_lm_ids', operations=
type_cast_op)
eval_ds = eval_ds.map(input_columns='masked_lm_positions', operations=
type_cast_op)
eval_ds = eval_ds.map(input_columns='next_sentence_labels', operations=
type_cast_op)
eval_ds = eval_ds.map(input_columns='segment_ids', operations=type_cast_op)
eval_ds = eval_ds.map(input_columns='input_mask', operations=type_cast_op)
eval_ds = eval_ds.map(input_columns='input_ids', operations=type_cast_op)
eval_ds = eval_ds.batch(batchsize, drop_remainder=True)
print('eval data size: {}'.format(eval_ds.get_dataset_size()))
print('eval repeat count: {}'.format(eval_ds.get_repeat_count()))
return eval_ds
<|reserved_special_token_1|>
# Copyright 2021-2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
Data operations, will be used in run_pretrain.py
"""
import os
import math
import numpy as np
import mindspore.common.dtype as mstype
import mindspore.dataset as de
import mindspore.dataset.transforms as C
from mindspore import log as logger
class BucketDatasetGenerator:
"""
Provide data distribution of different gears for the bert network.
Args:
data_set (Dataset): The training dataset.
batch_size (Int): The training batchsize.
bucket_list (List): List of different sentence lengths,such as [128, 256, 512]. Default: None.
"""
def __init__(self, data_set, batch_size, bucket_list=None):
self.dataset = data_set
self.batch_size = batch_size
self.bucket_list = bucket_list
self.data_bucket = {bucket: [] for bucket in bucket_list}
bucket_size = len(bucket_list)
self.random_list = np.random.binomial(n=(bucket_size - 1), p=0.5, size=self.__len__())
self.random_list = (self.random_list + 2) % bucket_size
self.random_list = [bucket_list[i] for i in self.random_list]
self.iter = 0
def __next__(self):
for item in self.iterator:
for seq_length in self.bucket_list:
if np.sum(item[1]) <= seq_length:
self.data_bucket[seq_length].append(item)
break
for key in self.data_bucket.keys():
data = self.data_bucket[key]
if len(data) >= self.batch_size and self.random_list[self.iter] == key:
self.data_bucket[key] = self.data_bucket[key][self.batch_size:]
arr = data[0]
for i in range(1, self.batch_size):
current_data = data[i]
for j in range(len(current_data)):
arr[j] = np.concatenate((arr[j], current_data[j]))
res = ()
for label in arr:
newlabel = np.reshape(label, (self.batch_size, -1))
res += (newlabel,)
res += (np.array(key, np.int32),)
self.iter += 1
return res
raise StopIteration
def __iter__(self):
self.iterator = self.dataset.create_tuple_iterator(output_numpy=True)
return self
def __len__(self):
return (self.dataset.get_dataset_size() // self.batch_size) - 1
def create_albert_dataset(device_num=1, rank=0, do_shuffle="true", data_dir=None, schema_dir=None, batch_size=32,
bucket_list=None):
"""create train dataset"""
# apply repeat operations
files = os.listdir(data_dir)
data_files = []
for file_name in files:
if "tfrecord" in file_name:
data_files.append(os.path.join(data_dir, file_name))
data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir != "" else None,
columns_list=["input_ids", "input_mask", "segment_ids", "next_sentence_labels",
"masked_lm_positions", "masked_lm_ids", "masked_lm_weights"],
shuffle=de.Shuffle.FILES if do_shuffle == "true" else False,
num_shards=device_num, shard_id=rank, shard_equal_rows=True)
if bucket_list:
bucket_dataset = BucketDatasetGenerator(data_set, batch_size, bucket_list=bucket_list)
data_set = de.GeneratorDataset(bucket_dataset,
column_names=["input_ids", "input_mask", "segment_ids",
"next_sentence_labels",
"masked_lm_positions", "masked_lm_ids", "masked_lm_weights",
"sentence_flag"],
shuffle=False)
else:
data_set = data_set.batch(batch_size, drop_remainder=True)
ori_dataset_size = data_set.get_dataset_size()
print('origin dataset size: ', ori_dataset_size)
type_cast_op = C.TypeCast(mstype.int32)
data_set = data_set.map(operations=type_cast_op, input_columns="masked_lm_ids")
data_set = data_set.map(operations=type_cast_op, input_columns="masked_lm_positions")
data_set = data_set.map(operations=type_cast_op, input_columns="next_sentence_labels")
data_set = data_set.map(operations=type_cast_op, input_columns="segment_ids")
data_set = data_set.map(operations=type_cast_op, input_columns="input_mask")
data_set = data_set.map(operations=type_cast_op, input_columns="input_ids")
# apply batch operations
logger.info("data size: {}".format(data_set.get_dataset_size()))
logger.info("repeat count: {}".format(data_set.get_repeat_count()))
return data_set
def create_classification_dataset(batch_size=1, repeat_count=1, assessment_method="accuracy",
data_file_path=None, schema_file_path=None, do_shuffle=True,
rank_size=1, rank_id=0):
"""create finetune or evaluation dataset"""
type_cast_op = C.TypeCast(mstype.int32)
ds = de.MindDataset([data_file_path],
columns_list=["input_ids", "input_mask", "segment_ids", "label_ids"], shuffle=do_shuffle,
num_shards=rank_size, shard_id=rank_id)
if assessment_method == "Spearman_correlation":
type_cast_op_float = C.TypeCast(mstype.float32)
ds = ds.map(operations=type_cast_op_float, input_columns="label_ids")
else:
ds = ds.map(operations=type_cast_op, input_columns="label_ids")
ds = ds.map(operations=type_cast_op, input_columns="segment_ids")
ds = ds.map(operations=type_cast_op, input_columns="input_mask")
ds = ds.map(operations=type_cast_op, input_columns="input_ids")
ds = ds.repeat(repeat_count)
# apply batch operations
ds = ds.batch(batch_size, drop_remainder=True)
return ds
def generator_squad(data_features):
for feature in data_features:
yield (feature.input_ids, feature.input_mask, feature.segment_ids, feature.unique_id)
def generator_squad_train(data_features):
for feature in data_features:
yield (feature.input_ids, feature.input_mask, feature.segment_ids, feature.start_position, feature.end_position,
feature.unique_id, feature.is_impossible)
def create_squad_dataset(batch_size=1, repeat_count=1, data_file_path=None, schema_file_path=None,
is_training=True, do_shuffle=True, rank_size=1,
rank_id=0):
"""create finetune or evaluation dataset"""
type_cast_op = C.TypeCast(mstype.int32)
if is_training:
print("data_file_path: ", data_file_path)
print("rank_id: ", rank_id)
ds = de.MindDataset([data_file_path],
columns_list=["input_ids", "input_mask", "segment_ids", "start_positions",
"end_positions", "unique_ids", "is_impossible"],
shuffle=do_shuffle, num_shards=rank_size, shard_id=rank_id)
ds = ds.map(operations=type_cast_op, input_columns="start_positions")
ds = ds.map(operations=type_cast_op, input_columns="end_positions")
else:
ds = de.GeneratorDataset(generator_squad(data_file_path), shuffle=do_shuffle,
column_names=["input_ids", "input_mask", "segment_ids", "unique_ids"])
ds = ds.map(operations=type_cast_op, input_columns="input_ids")
ds = ds.map(operations=type_cast_op, input_columns="input_mask")
ds = ds.map(operations=type_cast_op, input_columns="segment_ids")
ds = ds.map(operations=type_cast_op, input_columns="unique_ids")
ds = ds.repeat(repeat_count)
# apply batch operations
ds = ds.batch(batch_size, drop_remainder=True)
return ds
def create_eval_dataset(batchsize=32, device_num=1, rank=0, data_dir=None, schema_dir=None):
"""create evaluation dataset"""
data_files = []
if os.path.isdir(data_dir):
files = os.listdir(data_dir)
for file_name in files:
if "tfrecord" in file_name:
data_files.append(os.path.join(data_dir, file_name))
else:
data_files.append(data_dir)
data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir != "" else None,
columns_list=["input_ids", "input_mask", "segment_ids", "next_sentence_labels",
"masked_lm_positions", "masked_lm_ids", "masked_lm_weights"],
shard_equal_rows=True)
ori_dataset_size = data_set.get_dataset_size()
print("origin eval size: ", ori_dataset_size)
dtypes = data_set.output_types()
shapes = data_set.output_shapes()
output_batches = math.ceil(ori_dataset_size / device_num / batchsize)
padded_num = output_batches * device_num * batchsize - ori_dataset_size
print("padded num: ", padded_num)
if padded_num > 0:
item = {"input_ids": np.zeros(shapes[0], dtypes[0]),
"input_mask": np.zeros(shapes[1], dtypes[1]),
"segment_ids": np.zeros(shapes[2], dtypes[2]),
"next_sentence_labels": np.zeros(shapes[3], dtypes[3]),
"masked_lm_positions": np.zeros(shapes[4], dtypes[4]),
"masked_lm_ids": np.zeros(shapes[5], dtypes[5]),
"masked_lm_weights": np.zeros(shapes[6], dtypes[6])}
padded_samples = [item for x in range(padded_num)]
padded_ds = de.PaddedDataset(padded_samples)
eval_ds = data_set + padded_ds
sampler = de.DistributedSampler(num_shards=device_num, shard_id=rank, shuffle=False)
eval_ds.use_sampler(sampler)
else:
eval_ds = de.TFRecordDataset(data_files, schema_dir if schema_dir != "" else None,
columns_list=["input_ids", "input_mask", "segment_ids",
"next_sentence_labels",
"masked_lm_positions", "masked_lm_ids", "masked_lm_weights"],
num_shards=device_num, shard_id=rank, shard_equal_rows=True)
type_cast_op = C.TypeCast(mstype.int32)
eval_ds = eval_ds.map(input_columns="masked_lm_ids", operations=type_cast_op)
eval_ds = eval_ds.map(input_columns="masked_lm_positions", operations=type_cast_op)
eval_ds = eval_ds.map(input_columns="next_sentence_labels", operations=type_cast_op)
eval_ds = eval_ds.map(input_columns="segment_ids", operations=type_cast_op)
eval_ds = eval_ds.map(input_columns="input_mask", operations=type_cast_op)
eval_ds = eval_ds.map(input_columns="input_ids", operations=type_cast_op)
eval_ds = eval_ds.batch(batchsize, drop_remainder=True)
print("eval data size: {}".format(eval_ds.get_dataset_size()))
print("eval repeat count: {}".format(eval_ds.get_repeat_count()))
return eval_ds
|
flexible
|
{
"blob_id": "8ae10aada79b0a687732e341d275eb3823ec0e4a",
"index": 9475,
"step-1": "<mask token>\n\n\nclass BucketDatasetGenerator:\n \"\"\"\n Provide data distribution of different gears for the bert network.\n\n Args:\n data_set (Dataset): The training dataset.\n batch_size (Int): The training batchsize.\n bucket_list (List): List of different sentence lengths,such as [128, 256, 512]. Default: None.\n \"\"\"\n\n def __init__(self, data_set, batch_size, bucket_list=None):\n self.dataset = data_set\n self.batch_size = batch_size\n self.bucket_list = bucket_list\n self.data_bucket = {bucket: [] for bucket in bucket_list}\n bucket_size = len(bucket_list)\n self.random_list = np.random.binomial(n=bucket_size - 1, p=0.5,\n size=self.__len__())\n self.random_list = (self.random_list + 2) % bucket_size\n self.random_list = [bucket_list[i] for i in self.random_list]\n self.iter = 0\n\n def __next__(self):\n for item in self.iterator:\n for seq_length in self.bucket_list:\n if np.sum(item[1]) <= seq_length:\n self.data_bucket[seq_length].append(item)\n break\n for key in self.data_bucket.keys():\n data = self.data_bucket[key]\n if len(data) >= self.batch_size and self.random_list[self.iter\n ] == key:\n self.data_bucket[key] = self.data_bucket[key][self.\n batch_size:]\n arr = data[0]\n for i in range(1, self.batch_size):\n current_data = data[i]\n for j in range(len(current_data)):\n arr[j] = np.concatenate((arr[j], current_data[j]))\n res = ()\n for label in arr:\n newlabel = np.reshape(label, (self.batch_size, -1))\n res += newlabel,\n res += np.array(key, np.int32),\n self.iter += 1\n return res\n raise StopIteration\n\n def __iter__(self):\n self.iterator = self.dataset.create_tuple_iterator(output_numpy=True)\n return self\n\n def __len__(self):\n return self.dataset.get_dataset_size() // self.batch_size - 1\n\n\n<mask token>\n\n\ndef create_squad_dataset(batch_size=1, repeat_count=1, data_file_path=None,\n schema_file_path=None, is_training=True, do_shuffle=True, rank_size=1,\n rank_id=0):\n \"\"\"create finetune or evaluation dataset\"\"\"\n type_cast_op = C.TypeCast(mstype.int32)\n if is_training:\n print('data_file_path: ', data_file_path)\n print('rank_id: ', rank_id)\n ds = de.MindDataset([data_file_path], columns_list=['input_ids',\n 'input_mask', 'segment_ids', 'start_positions', 'end_positions',\n 'unique_ids', 'is_impossible'], shuffle=do_shuffle, num_shards=\n rank_size, shard_id=rank_id)\n ds = ds.map(operations=type_cast_op, input_columns='start_positions')\n ds = ds.map(operations=type_cast_op, input_columns='end_positions')\n else:\n ds = de.GeneratorDataset(generator_squad(data_file_path), shuffle=\n do_shuffle, column_names=['input_ids', 'input_mask',\n 'segment_ids', 'unique_ids'])\n ds = ds.map(operations=type_cast_op, input_columns='input_ids')\n ds = ds.map(operations=type_cast_op, input_columns='input_mask')\n ds = ds.map(operations=type_cast_op, input_columns='segment_ids')\n ds = ds.map(operations=type_cast_op, input_columns='unique_ids')\n ds = ds.repeat(repeat_count)\n ds = ds.batch(batch_size, drop_remainder=True)\n return ds\n\n\ndef create_eval_dataset(batchsize=32, device_num=1, rank=0, data_dir=None,\n schema_dir=None):\n \"\"\"create evaluation dataset\"\"\"\n data_files = []\n if os.path.isdir(data_dir):\n files = os.listdir(data_dir)\n for file_name in files:\n if 'tfrecord' in file_name:\n data_files.append(os.path.join(data_dir, file_name))\n else:\n data_files.append(data_dir)\n data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir !=\n '' else None, columns_list=['input_ids', 'input_mask',\n 'segment_ids', 'next_sentence_labels', 'masked_lm_positions',\n 'masked_lm_ids', 'masked_lm_weights'], shard_equal_rows=True)\n ori_dataset_size = data_set.get_dataset_size()\n print('origin eval size: ', ori_dataset_size)\n dtypes = data_set.output_types()\n shapes = data_set.output_shapes()\n output_batches = math.ceil(ori_dataset_size / device_num / batchsize)\n padded_num = output_batches * device_num * batchsize - ori_dataset_size\n print('padded num: ', padded_num)\n if padded_num > 0:\n item = {'input_ids': np.zeros(shapes[0], dtypes[0]), 'input_mask':\n np.zeros(shapes[1], dtypes[1]), 'segment_ids': np.zeros(shapes[\n 2], dtypes[2]), 'next_sentence_labels': np.zeros(shapes[3],\n dtypes[3]), 'masked_lm_positions': np.zeros(shapes[4], dtypes[4\n ]), 'masked_lm_ids': np.zeros(shapes[5], dtypes[5]),\n 'masked_lm_weights': np.zeros(shapes[6], dtypes[6])}\n padded_samples = [item for x in range(padded_num)]\n padded_ds = de.PaddedDataset(padded_samples)\n eval_ds = data_set + padded_ds\n sampler = de.DistributedSampler(num_shards=device_num, shard_id=\n rank, shuffle=False)\n eval_ds.use_sampler(sampler)\n else:\n eval_ds = de.TFRecordDataset(data_files, schema_dir if schema_dir !=\n '' else None, columns_list=['input_ids', 'input_mask',\n 'segment_ids', 'next_sentence_labels', 'masked_lm_positions',\n 'masked_lm_ids', 'masked_lm_weights'], num_shards=device_num,\n shard_id=rank, shard_equal_rows=True)\n type_cast_op = C.TypeCast(mstype.int32)\n eval_ds = eval_ds.map(input_columns='masked_lm_ids', operations=\n type_cast_op)\n eval_ds = eval_ds.map(input_columns='masked_lm_positions', operations=\n type_cast_op)\n eval_ds = eval_ds.map(input_columns='next_sentence_labels', operations=\n type_cast_op)\n eval_ds = eval_ds.map(input_columns='segment_ids', operations=type_cast_op)\n eval_ds = eval_ds.map(input_columns='input_mask', operations=type_cast_op)\n eval_ds = eval_ds.map(input_columns='input_ids', operations=type_cast_op)\n eval_ds = eval_ds.batch(batchsize, drop_remainder=True)\n print('eval data size: {}'.format(eval_ds.get_dataset_size()))\n print('eval repeat count: {}'.format(eval_ds.get_repeat_count()))\n return eval_ds\n",
"step-2": "<mask token>\n\n\nclass BucketDatasetGenerator:\n \"\"\"\n Provide data distribution of different gears for the bert network.\n\n Args:\n data_set (Dataset): The training dataset.\n batch_size (Int): The training batchsize.\n bucket_list (List): List of different sentence lengths,such as [128, 256, 512]. Default: None.\n \"\"\"\n\n def __init__(self, data_set, batch_size, bucket_list=None):\n self.dataset = data_set\n self.batch_size = batch_size\n self.bucket_list = bucket_list\n self.data_bucket = {bucket: [] for bucket in bucket_list}\n bucket_size = len(bucket_list)\n self.random_list = np.random.binomial(n=bucket_size - 1, p=0.5,\n size=self.__len__())\n self.random_list = (self.random_list + 2) % bucket_size\n self.random_list = [bucket_list[i] for i in self.random_list]\n self.iter = 0\n\n def __next__(self):\n for item in self.iterator:\n for seq_length in self.bucket_list:\n if np.sum(item[1]) <= seq_length:\n self.data_bucket[seq_length].append(item)\n break\n for key in self.data_bucket.keys():\n data = self.data_bucket[key]\n if len(data) >= self.batch_size and self.random_list[self.iter\n ] == key:\n self.data_bucket[key] = self.data_bucket[key][self.\n batch_size:]\n arr = data[0]\n for i in range(1, self.batch_size):\n current_data = data[i]\n for j in range(len(current_data)):\n arr[j] = np.concatenate((arr[j], current_data[j]))\n res = ()\n for label in arr:\n newlabel = np.reshape(label, (self.batch_size, -1))\n res += newlabel,\n res += np.array(key, np.int32),\n self.iter += 1\n return res\n raise StopIteration\n\n def __iter__(self):\n self.iterator = self.dataset.create_tuple_iterator(output_numpy=True)\n return self\n\n def __len__(self):\n return self.dataset.get_dataset_size() // self.batch_size - 1\n\n\ndef create_albert_dataset(device_num=1, rank=0, do_shuffle='true', data_dir\n =None, schema_dir=None, batch_size=32, bucket_list=None):\n \"\"\"create train dataset\"\"\"\n files = os.listdir(data_dir)\n data_files = []\n for file_name in files:\n if 'tfrecord' in file_name:\n data_files.append(os.path.join(data_dir, file_name))\n data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir !=\n '' else None, columns_list=['input_ids', 'input_mask',\n 'segment_ids', 'next_sentence_labels', 'masked_lm_positions',\n 'masked_lm_ids', 'masked_lm_weights'], shuffle=de.Shuffle.FILES if \n do_shuffle == 'true' else False, num_shards=device_num, shard_id=\n rank, shard_equal_rows=True)\n if bucket_list:\n bucket_dataset = BucketDatasetGenerator(data_set, batch_size,\n bucket_list=bucket_list)\n data_set = de.GeneratorDataset(bucket_dataset, column_names=[\n 'input_ids', 'input_mask', 'segment_ids',\n 'next_sentence_labels', 'masked_lm_positions', 'masked_lm_ids',\n 'masked_lm_weights', 'sentence_flag'], shuffle=False)\n else:\n data_set = data_set.batch(batch_size, drop_remainder=True)\n ori_dataset_size = data_set.get_dataset_size()\n print('origin dataset size: ', ori_dataset_size)\n type_cast_op = C.TypeCast(mstype.int32)\n data_set = data_set.map(operations=type_cast_op, input_columns=\n 'masked_lm_ids')\n data_set = data_set.map(operations=type_cast_op, input_columns=\n 'masked_lm_positions')\n data_set = data_set.map(operations=type_cast_op, input_columns=\n 'next_sentence_labels')\n data_set = data_set.map(operations=type_cast_op, input_columns=\n 'segment_ids')\n data_set = data_set.map(operations=type_cast_op, input_columns='input_mask'\n )\n data_set = data_set.map(operations=type_cast_op, input_columns='input_ids')\n logger.info('data size: {}'.format(data_set.get_dataset_size()))\n logger.info('repeat count: {}'.format(data_set.get_repeat_count()))\n return data_set\n\n\ndef create_classification_dataset(batch_size=1, repeat_count=1,\n assessment_method='accuracy', data_file_path=None, schema_file_path=\n None, do_shuffle=True, rank_size=1, rank_id=0):\n \"\"\"create finetune or evaluation dataset\"\"\"\n type_cast_op = C.TypeCast(mstype.int32)\n ds = de.MindDataset([data_file_path], columns_list=['input_ids',\n 'input_mask', 'segment_ids', 'label_ids'], shuffle=do_shuffle,\n num_shards=rank_size, shard_id=rank_id)\n if assessment_method == 'Spearman_correlation':\n type_cast_op_float = C.TypeCast(mstype.float32)\n ds = ds.map(operations=type_cast_op_float, input_columns='label_ids')\n else:\n ds = ds.map(operations=type_cast_op, input_columns='label_ids')\n ds = ds.map(operations=type_cast_op, input_columns='segment_ids')\n ds = ds.map(operations=type_cast_op, input_columns='input_mask')\n ds = ds.map(operations=type_cast_op, input_columns='input_ids')\n ds = ds.repeat(repeat_count)\n ds = ds.batch(batch_size, drop_remainder=True)\n return ds\n\n\n<mask token>\n\n\ndef generator_squad_train(data_features):\n for feature in data_features:\n yield feature.input_ids, feature.input_mask, feature.segment_ids, feature.start_position, feature.end_position, feature.unique_id, feature.is_impossible\n\n\ndef create_squad_dataset(batch_size=1, repeat_count=1, data_file_path=None,\n schema_file_path=None, is_training=True, do_shuffle=True, rank_size=1,\n rank_id=0):\n \"\"\"create finetune or evaluation dataset\"\"\"\n type_cast_op = C.TypeCast(mstype.int32)\n if is_training:\n print('data_file_path: ', data_file_path)\n print('rank_id: ', rank_id)\n ds = de.MindDataset([data_file_path], columns_list=['input_ids',\n 'input_mask', 'segment_ids', 'start_positions', 'end_positions',\n 'unique_ids', 'is_impossible'], shuffle=do_shuffle, num_shards=\n rank_size, shard_id=rank_id)\n ds = ds.map(operations=type_cast_op, input_columns='start_positions')\n ds = ds.map(operations=type_cast_op, input_columns='end_positions')\n else:\n ds = de.GeneratorDataset(generator_squad(data_file_path), shuffle=\n do_shuffle, column_names=['input_ids', 'input_mask',\n 'segment_ids', 'unique_ids'])\n ds = ds.map(operations=type_cast_op, input_columns='input_ids')\n ds = ds.map(operations=type_cast_op, input_columns='input_mask')\n ds = ds.map(operations=type_cast_op, input_columns='segment_ids')\n ds = ds.map(operations=type_cast_op, input_columns='unique_ids')\n ds = ds.repeat(repeat_count)\n ds = ds.batch(batch_size, drop_remainder=True)\n return ds\n\n\ndef create_eval_dataset(batchsize=32, device_num=1, rank=0, data_dir=None,\n schema_dir=None):\n \"\"\"create evaluation dataset\"\"\"\n data_files = []\n if os.path.isdir(data_dir):\n files = os.listdir(data_dir)\n for file_name in files:\n if 'tfrecord' in file_name:\n data_files.append(os.path.join(data_dir, file_name))\n else:\n data_files.append(data_dir)\n data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir !=\n '' else None, columns_list=['input_ids', 'input_mask',\n 'segment_ids', 'next_sentence_labels', 'masked_lm_positions',\n 'masked_lm_ids', 'masked_lm_weights'], shard_equal_rows=True)\n ori_dataset_size = data_set.get_dataset_size()\n print('origin eval size: ', ori_dataset_size)\n dtypes = data_set.output_types()\n shapes = data_set.output_shapes()\n output_batches = math.ceil(ori_dataset_size / device_num / batchsize)\n padded_num = output_batches * device_num * batchsize - ori_dataset_size\n print('padded num: ', padded_num)\n if padded_num > 0:\n item = {'input_ids': np.zeros(shapes[0], dtypes[0]), 'input_mask':\n np.zeros(shapes[1], dtypes[1]), 'segment_ids': np.zeros(shapes[\n 2], dtypes[2]), 'next_sentence_labels': np.zeros(shapes[3],\n dtypes[3]), 'masked_lm_positions': np.zeros(shapes[4], dtypes[4\n ]), 'masked_lm_ids': np.zeros(shapes[5], dtypes[5]),\n 'masked_lm_weights': np.zeros(shapes[6], dtypes[6])}\n padded_samples = [item for x in range(padded_num)]\n padded_ds = de.PaddedDataset(padded_samples)\n eval_ds = data_set + padded_ds\n sampler = de.DistributedSampler(num_shards=device_num, shard_id=\n rank, shuffle=False)\n eval_ds.use_sampler(sampler)\n else:\n eval_ds = de.TFRecordDataset(data_files, schema_dir if schema_dir !=\n '' else None, columns_list=['input_ids', 'input_mask',\n 'segment_ids', 'next_sentence_labels', 'masked_lm_positions',\n 'masked_lm_ids', 'masked_lm_weights'], num_shards=device_num,\n shard_id=rank, shard_equal_rows=True)\n type_cast_op = C.TypeCast(mstype.int32)\n eval_ds = eval_ds.map(input_columns='masked_lm_ids', operations=\n type_cast_op)\n eval_ds = eval_ds.map(input_columns='masked_lm_positions', operations=\n type_cast_op)\n eval_ds = eval_ds.map(input_columns='next_sentence_labels', operations=\n type_cast_op)\n eval_ds = eval_ds.map(input_columns='segment_ids', operations=type_cast_op)\n eval_ds = eval_ds.map(input_columns='input_mask', operations=type_cast_op)\n eval_ds = eval_ds.map(input_columns='input_ids', operations=type_cast_op)\n eval_ds = eval_ds.batch(batchsize, drop_remainder=True)\n print('eval data size: {}'.format(eval_ds.get_dataset_size()))\n print('eval repeat count: {}'.format(eval_ds.get_repeat_count()))\n return eval_ds\n",
"step-3": "<mask token>\n\n\nclass BucketDatasetGenerator:\n \"\"\"\n Provide data distribution of different gears for the bert network.\n\n Args:\n data_set (Dataset): The training dataset.\n batch_size (Int): The training batchsize.\n bucket_list (List): List of different sentence lengths,such as [128, 256, 512]. Default: None.\n \"\"\"\n\n def __init__(self, data_set, batch_size, bucket_list=None):\n self.dataset = data_set\n self.batch_size = batch_size\n self.bucket_list = bucket_list\n self.data_bucket = {bucket: [] for bucket in bucket_list}\n bucket_size = len(bucket_list)\n self.random_list = np.random.binomial(n=bucket_size - 1, p=0.5,\n size=self.__len__())\n self.random_list = (self.random_list + 2) % bucket_size\n self.random_list = [bucket_list[i] for i in self.random_list]\n self.iter = 0\n\n def __next__(self):\n for item in self.iterator:\n for seq_length in self.bucket_list:\n if np.sum(item[1]) <= seq_length:\n self.data_bucket[seq_length].append(item)\n break\n for key in self.data_bucket.keys():\n data = self.data_bucket[key]\n if len(data) >= self.batch_size and self.random_list[self.iter\n ] == key:\n self.data_bucket[key] = self.data_bucket[key][self.\n batch_size:]\n arr = data[0]\n for i in range(1, self.batch_size):\n current_data = data[i]\n for j in range(len(current_data)):\n arr[j] = np.concatenate((arr[j], current_data[j]))\n res = ()\n for label in arr:\n newlabel = np.reshape(label, (self.batch_size, -1))\n res += newlabel,\n res += np.array(key, np.int32),\n self.iter += 1\n return res\n raise StopIteration\n\n def __iter__(self):\n self.iterator = self.dataset.create_tuple_iterator(output_numpy=True)\n return self\n\n def __len__(self):\n return self.dataset.get_dataset_size() // self.batch_size - 1\n\n\ndef create_albert_dataset(device_num=1, rank=0, do_shuffle='true', data_dir\n =None, schema_dir=None, batch_size=32, bucket_list=None):\n \"\"\"create train dataset\"\"\"\n files = os.listdir(data_dir)\n data_files = []\n for file_name in files:\n if 'tfrecord' in file_name:\n data_files.append(os.path.join(data_dir, file_name))\n data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir !=\n '' else None, columns_list=['input_ids', 'input_mask',\n 'segment_ids', 'next_sentence_labels', 'masked_lm_positions',\n 'masked_lm_ids', 'masked_lm_weights'], shuffle=de.Shuffle.FILES if \n do_shuffle == 'true' else False, num_shards=device_num, shard_id=\n rank, shard_equal_rows=True)\n if bucket_list:\n bucket_dataset = BucketDatasetGenerator(data_set, batch_size,\n bucket_list=bucket_list)\n data_set = de.GeneratorDataset(bucket_dataset, column_names=[\n 'input_ids', 'input_mask', 'segment_ids',\n 'next_sentence_labels', 'masked_lm_positions', 'masked_lm_ids',\n 'masked_lm_weights', 'sentence_flag'], shuffle=False)\n else:\n data_set = data_set.batch(batch_size, drop_remainder=True)\n ori_dataset_size = data_set.get_dataset_size()\n print('origin dataset size: ', ori_dataset_size)\n type_cast_op = C.TypeCast(mstype.int32)\n data_set = data_set.map(operations=type_cast_op, input_columns=\n 'masked_lm_ids')\n data_set = data_set.map(operations=type_cast_op, input_columns=\n 'masked_lm_positions')\n data_set = data_set.map(operations=type_cast_op, input_columns=\n 'next_sentence_labels')\n data_set = data_set.map(operations=type_cast_op, input_columns=\n 'segment_ids')\n data_set = data_set.map(operations=type_cast_op, input_columns='input_mask'\n )\n data_set = data_set.map(operations=type_cast_op, input_columns='input_ids')\n logger.info('data size: {}'.format(data_set.get_dataset_size()))\n logger.info('repeat count: {}'.format(data_set.get_repeat_count()))\n return data_set\n\n\ndef create_classification_dataset(batch_size=1, repeat_count=1,\n assessment_method='accuracy', data_file_path=None, schema_file_path=\n None, do_shuffle=True, rank_size=1, rank_id=0):\n \"\"\"create finetune or evaluation dataset\"\"\"\n type_cast_op = C.TypeCast(mstype.int32)\n ds = de.MindDataset([data_file_path], columns_list=['input_ids',\n 'input_mask', 'segment_ids', 'label_ids'], shuffle=do_shuffle,\n num_shards=rank_size, shard_id=rank_id)\n if assessment_method == 'Spearman_correlation':\n type_cast_op_float = C.TypeCast(mstype.float32)\n ds = ds.map(operations=type_cast_op_float, input_columns='label_ids')\n else:\n ds = ds.map(operations=type_cast_op, input_columns='label_ids')\n ds = ds.map(operations=type_cast_op, input_columns='segment_ids')\n ds = ds.map(operations=type_cast_op, input_columns='input_mask')\n ds = ds.map(operations=type_cast_op, input_columns='input_ids')\n ds = ds.repeat(repeat_count)\n ds = ds.batch(batch_size, drop_remainder=True)\n return ds\n\n\ndef generator_squad(data_features):\n for feature in data_features:\n yield feature.input_ids, feature.input_mask, feature.segment_ids, feature.unique_id\n\n\ndef generator_squad_train(data_features):\n for feature in data_features:\n yield feature.input_ids, feature.input_mask, feature.segment_ids, feature.start_position, feature.end_position, feature.unique_id, feature.is_impossible\n\n\ndef create_squad_dataset(batch_size=1, repeat_count=1, data_file_path=None,\n schema_file_path=None, is_training=True, do_shuffle=True, rank_size=1,\n rank_id=0):\n \"\"\"create finetune or evaluation dataset\"\"\"\n type_cast_op = C.TypeCast(mstype.int32)\n if is_training:\n print('data_file_path: ', data_file_path)\n print('rank_id: ', rank_id)\n ds = de.MindDataset([data_file_path], columns_list=['input_ids',\n 'input_mask', 'segment_ids', 'start_positions', 'end_positions',\n 'unique_ids', 'is_impossible'], shuffle=do_shuffle, num_shards=\n rank_size, shard_id=rank_id)\n ds = ds.map(operations=type_cast_op, input_columns='start_positions')\n ds = ds.map(operations=type_cast_op, input_columns='end_positions')\n else:\n ds = de.GeneratorDataset(generator_squad(data_file_path), shuffle=\n do_shuffle, column_names=['input_ids', 'input_mask',\n 'segment_ids', 'unique_ids'])\n ds = ds.map(operations=type_cast_op, input_columns='input_ids')\n ds = ds.map(operations=type_cast_op, input_columns='input_mask')\n ds = ds.map(operations=type_cast_op, input_columns='segment_ids')\n ds = ds.map(operations=type_cast_op, input_columns='unique_ids')\n ds = ds.repeat(repeat_count)\n ds = ds.batch(batch_size, drop_remainder=True)\n return ds\n\n\ndef create_eval_dataset(batchsize=32, device_num=1, rank=0, data_dir=None,\n schema_dir=None):\n \"\"\"create evaluation dataset\"\"\"\n data_files = []\n if os.path.isdir(data_dir):\n files = os.listdir(data_dir)\n for file_name in files:\n if 'tfrecord' in file_name:\n data_files.append(os.path.join(data_dir, file_name))\n else:\n data_files.append(data_dir)\n data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir !=\n '' else None, columns_list=['input_ids', 'input_mask',\n 'segment_ids', 'next_sentence_labels', 'masked_lm_positions',\n 'masked_lm_ids', 'masked_lm_weights'], shard_equal_rows=True)\n ori_dataset_size = data_set.get_dataset_size()\n print('origin eval size: ', ori_dataset_size)\n dtypes = data_set.output_types()\n shapes = data_set.output_shapes()\n output_batches = math.ceil(ori_dataset_size / device_num / batchsize)\n padded_num = output_batches * device_num * batchsize - ori_dataset_size\n print('padded num: ', padded_num)\n if padded_num > 0:\n item = {'input_ids': np.zeros(shapes[0], dtypes[0]), 'input_mask':\n np.zeros(shapes[1], dtypes[1]), 'segment_ids': np.zeros(shapes[\n 2], dtypes[2]), 'next_sentence_labels': np.zeros(shapes[3],\n dtypes[3]), 'masked_lm_positions': np.zeros(shapes[4], dtypes[4\n ]), 'masked_lm_ids': np.zeros(shapes[5], dtypes[5]),\n 'masked_lm_weights': np.zeros(shapes[6], dtypes[6])}\n padded_samples = [item for x in range(padded_num)]\n padded_ds = de.PaddedDataset(padded_samples)\n eval_ds = data_set + padded_ds\n sampler = de.DistributedSampler(num_shards=device_num, shard_id=\n rank, shuffle=False)\n eval_ds.use_sampler(sampler)\n else:\n eval_ds = de.TFRecordDataset(data_files, schema_dir if schema_dir !=\n '' else None, columns_list=['input_ids', 'input_mask',\n 'segment_ids', 'next_sentence_labels', 'masked_lm_positions',\n 'masked_lm_ids', 'masked_lm_weights'], num_shards=device_num,\n shard_id=rank, shard_equal_rows=True)\n type_cast_op = C.TypeCast(mstype.int32)\n eval_ds = eval_ds.map(input_columns='masked_lm_ids', operations=\n type_cast_op)\n eval_ds = eval_ds.map(input_columns='masked_lm_positions', operations=\n type_cast_op)\n eval_ds = eval_ds.map(input_columns='next_sentence_labels', operations=\n type_cast_op)\n eval_ds = eval_ds.map(input_columns='segment_ids', operations=type_cast_op)\n eval_ds = eval_ds.map(input_columns='input_mask', operations=type_cast_op)\n eval_ds = eval_ds.map(input_columns='input_ids', operations=type_cast_op)\n eval_ds = eval_ds.batch(batchsize, drop_remainder=True)\n print('eval data size: {}'.format(eval_ds.get_dataset_size()))\n print('eval repeat count: {}'.format(eval_ds.get_repeat_count()))\n return eval_ds\n",
"step-4": "<mask token>\nimport os\nimport math\nimport numpy as np\nimport mindspore.common.dtype as mstype\nimport mindspore.dataset as de\nimport mindspore.dataset.transforms as C\nfrom mindspore import log as logger\n\n\nclass BucketDatasetGenerator:\n \"\"\"\n Provide data distribution of different gears for the bert network.\n\n Args:\n data_set (Dataset): The training dataset.\n batch_size (Int): The training batchsize.\n bucket_list (List): List of different sentence lengths,such as [128, 256, 512]. Default: None.\n \"\"\"\n\n def __init__(self, data_set, batch_size, bucket_list=None):\n self.dataset = data_set\n self.batch_size = batch_size\n self.bucket_list = bucket_list\n self.data_bucket = {bucket: [] for bucket in bucket_list}\n bucket_size = len(bucket_list)\n self.random_list = np.random.binomial(n=bucket_size - 1, p=0.5,\n size=self.__len__())\n self.random_list = (self.random_list + 2) % bucket_size\n self.random_list = [bucket_list[i] for i in self.random_list]\n self.iter = 0\n\n def __next__(self):\n for item in self.iterator:\n for seq_length in self.bucket_list:\n if np.sum(item[1]) <= seq_length:\n self.data_bucket[seq_length].append(item)\n break\n for key in self.data_bucket.keys():\n data = self.data_bucket[key]\n if len(data) >= self.batch_size and self.random_list[self.iter\n ] == key:\n self.data_bucket[key] = self.data_bucket[key][self.\n batch_size:]\n arr = data[0]\n for i in range(1, self.batch_size):\n current_data = data[i]\n for j in range(len(current_data)):\n arr[j] = np.concatenate((arr[j], current_data[j]))\n res = ()\n for label in arr:\n newlabel = np.reshape(label, (self.batch_size, -1))\n res += newlabel,\n res += np.array(key, np.int32),\n self.iter += 1\n return res\n raise StopIteration\n\n def __iter__(self):\n self.iterator = self.dataset.create_tuple_iterator(output_numpy=True)\n return self\n\n def __len__(self):\n return self.dataset.get_dataset_size() // self.batch_size - 1\n\n\ndef create_albert_dataset(device_num=1, rank=0, do_shuffle='true', data_dir\n =None, schema_dir=None, batch_size=32, bucket_list=None):\n \"\"\"create train dataset\"\"\"\n files = os.listdir(data_dir)\n data_files = []\n for file_name in files:\n if 'tfrecord' in file_name:\n data_files.append(os.path.join(data_dir, file_name))\n data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir !=\n '' else None, columns_list=['input_ids', 'input_mask',\n 'segment_ids', 'next_sentence_labels', 'masked_lm_positions',\n 'masked_lm_ids', 'masked_lm_weights'], shuffle=de.Shuffle.FILES if \n do_shuffle == 'true' else False, num_shards=device_num, shard_id=\n rank, shard_equal_rows=True)\n if bucket_list:\n bucket_dataset = BucketDatasetGenerator(data_set, batch_size,\n bucket_list=bucket_list)\n data_set = de.GeneratorDataset(bucket_dataset, column_names=[\n 'input_ids', 'input_mask', 'segment_ids',\n 'next_sentence_labels', 'masked_lm_positions', 'masked_lm_ids',\n 'masked_lm_weights', 'sentence_flag'], shuffle=False)\n else:\n data_set = data_set.batch(batch_size, drop_remainder=True)\n ori_dataset_size = data_set.get_dataset_size()\n print('origin dataset size: ', ori_dataset_size)\n type_cast_op = C.TypeCast(mstype.int32)\n data_set = data_set.map(operations=type_cast_op, input_columns=\n 'masked_lm_ids')\n data_set = data_set.map(operations=type_cast_op, input_columns=\n 'masked_lm_positions')\n data_set = data_set.map(operations=type_cast_op, input_columns=\n 'next_sentence_labels')\n data_set = data_set.map(operations=type_cast_op, input_columns=\n 'segment_ids')\n data_set = data_set.map(operations=type_cast_op, input_columns='input_mask'\n )\n data_set = data_set.map(operations=type_cast_op, input_columns='input_ids')\n logger.info('data size: {}'.format(data_set.get_dataset_size()))\n logger.info('repeat count: {}'.format(data_set.get_repeat_count()))\n return data_set\n\n\ndef create_classification_dataset(batch_size=1, repeat_count=1,\n assessment_method='accuracy', data_file_path=None, schema_file_path=\n None, do_shuffle=True, rank_size=1, rank_id=0):\n \"\"\"create finetune or evaluation dataset\"\"\"\n type_cast_op = C.TypeCast(mstype.int32)\n ds = de.MindDataset([data_file_path], columns_list=['input_ids',\n 'input_mask', 'segment_ids', 'label_ids'], shuffle=do_shuffle,\n num_shards=rank_size, shard_id=rank_id)\n if assessment_method == 'Spearman_correlation':\n type_cast_op_float = C.TypeCast(mstype.float32)\n ds = ds.map(operations=type_cast_op_float, input_columns='label_ids')\n else:\n ds = ds.map(operations=type_cast_op, input_columns='label_ids')\n ds = ds.map(operations=type_cast_op, input_columns='segment_ids')\n ds = ds.map(operations=type_cast_op, input_columns='input_mask')\n ds = ds.map(operations=type_cast_op, input_columns='input_ids')\n ds = ds.repeat(repeat_count)\n ds = ds.batch(batch_size, drop_remainder=True)\n return ds\n\n\ndef generator_squad(data_features):\n for feature in data_features:\n yield feature.input_ids, feature.input_mask, feature.segment_ids, feature.unique_id\n\n\ndef generator_squad_train(data_features):\n for feature in data_features:\n yield feature.input_ids, feature.input_mask, feature.segment_ids, feature.start_position, feature.end_position, feature.unique_id, feature.is_impossible\n\n\ndef create_squad_dataset(batch_size=1, repeat_count=1, data_file_path=None,\n schema_file_path=None, is_training=True, do_shuffle=True, rank_size=1,\n rank_id=0):\n \"\"\"create finetune or evaluation dataset\"\"\"\n type_cast_op = C.TypeCast(mstype.int32)\n if is_training:\n print('data_file_path: ', data_file_path)\n print('rank_id: ', rank_id)\n ds = de.MindDataset([data_file_path], columns_list=['input_ids',\n 'input_mask', 'segment_ids', 'start_positions', 'end_positions',\n 'unique_ids', 'is_impossible'], shuffle=do_shuffle, num_shards=\n rank_size, shard_id=rank_id)\n ds = ds.map(operations=type_cast_op, input_columns='start_positions')\n ds = ds.map(operations=type_cast_op, input_columns='end_positions')\n else:\n ds = de.GeneratorDataset(generator_squad(data_file_path), shuffle=\n do_shuffle, column_names=['input_ids', 'input_mask',\n 'segment_ids', 'unique_ids'])\n ds = ds.map(operations=type_cast_op, input_columns='input_ids')\n ds = ds.map(operations=type_cast_op, input_columns='input_mask')\n ds = ds.map(operations=type_cast_op, input_columns='segment_ids')\n ds = ds.map(operations=type_cast_op, input_columns='unique_ids')\n ds = ds.repeat(repeat_count)\n ds = ds.batch(batch_size, drop_remainder=True)\n return ds\n\n\ndef create_eval_dataset(batchsize=32, device_num=1, rank=0, data_dir=None,\n schema_dir=None):\n \"\"\"create evaluation dataset\"\"\"\n data_files = []\n if os.path.isdir(data_dir):\n files = os.listdir(data_dir)\n for file_name in files:\n if 'tfrecord' in file_name:\n data_files.append(os.path.join(data_dir, file_name))\n else:\n data_files.append(data_dir)\n data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir !=\n '' else None, columns_list=['input_ids', 'input_mask',\n 'segment_ids', 'next_sentence_labels', 'masked_lm_positions',\n 'masked_lm_ids', 'masked_lm_weights'], shard_equal_rows=True)\n ori_dataset_size = data_set.get_dataset_size()\n print('origin eval size: ', ori_dataset_size)\n dtypes = data_set.output_types()\n shapes = data_set.output_shapes()\n output_batches = math.ceil(ori_dataset_size / device_num / batchsize)\n padded_num = output_batches * device_num * batchsize - ori_dataset_size\n print('padded num: ', padded_num)\n if padded_num > 0:\n item = {'input_ids': np.zeros(shapes[0], dtypes[0]), 'input_mask':\n np.zeros(shapes[1], dtypes[1]), 'segment_ids': np.zeros(shapes[\n 2], dtypes[2]), 'next_sentence_labels': np.zeros(shapes[3],\n dtypes[3]), 'masked_lm_positions': np.zeros(shapes[4], dtypes[4\n ]), 'masked_lm_ids': np.zeros(shapes[5], dtypes[5]),\n 'masked_lm_weights': np.zeros(shapes[6], dtypes[6])}\n padded_samples = [item for x in range(padded_num)]\n padded_ds = de.PaddedDataset(padded_samples)\n eval_ds = data_set + padded_ds\n sampler = de.DistributedSampler(num_shards=device_num, shard_id=\n rank, shuffle=False)\n eval_ds.use_sampler(sampler)\n else:\n eval_ds = de.TFRecordDataset(data_files, schema_dir if schema_dir !=\n '' else None, columns_list=['input_ids', 'input_mask',\n 'segment_ids', 'next_sentence_labels', 'masked_lm_positions',\n 'masked_lm_ids', 'masked_lm_weights'], num_shards=device_num,\n shard_id=rank, shard_equal_rows=True)\n type_cast_op = C.TypeCast(mstype.int32)\n eval_ds = eval_ds.map(input_columns='masked_lm_ids', operations=\n type_cast_op)\n eval_ds = eval_ds.map(input_columns='masked_lm_positions', operations=\n type_cast_op)\n eval_ds = eval_ds.map(input_columns='next_sentence_labels', operations=\n type_cast_op)\n eval_ds = eval_ds.map(input_columns='segment_ids', operations=type_cast_op)\n eval_ds = eval_ds.map(input_columns='input_mask', operations=type_cast_op)\n eval_ds = eval_ds.map(input_columns='input_ids', operations=type_cast_op)\n eval_ds = eval_ds.batch(batchsize, drop_remainder=True)\n print('eval data size: {}'.format(eval_ds.get_dataset_size()))\n print('eval repeat count: {}'.format(eval_ds.get_repeat_count()))\n return eval_ds\n",
"step-5": "# Copyright 2021-2022 Huawei Technologies Co., Ltd\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ============================================================================\n\"\"\"\nData operations, will be used in run_pretrain.py\n\"\"\"\nimport os\nimport math\nimport numpy as np\nimport mindspore.common.dtype as mstype\nimport mindspore.dataset as de\nimport mindspore.dataset.transforms as C\nfrom mindspore import log as logger\n\n\nclass BucketDatasetGenerator:\n \"\"\"\n Provide data distribution of different gears for the bert network.\n\n Args:\n data_set (Dataset): The training dataset.\n batch_size (Int): The training batchsize.\n bucket_list (List): List of different sentence lengths,such as [128, 256, 512]. Default: None.\n \"\"\"\n\n def __init__(self, data_set, batch_size, bucket_list=None):\n self.dataset = data_set\n self.batch_size = batch_size\n self.bucket_list = bucket_list\n self.data_bucket = {bucket: [] for bucket in bucket_list}\n bucket_size = len(bucket_list)\n self.random_list = np.random.binomial(n=(bucket_size - 1), p=0.5, size=self.__len__())\n self.random_list = (self.random_list + 2) % bucket_size\n self.random_list = [bucket_list[i] for i in self.random_list]\n self.iter = 0\n\n def __next__(self):\n for item in self.iterator:\n for seq_length in self.bucket_list:\n if np.sum(item[1]) <= seq_length:\n self.data_bucket[seq_length].append(item)\n break\n for key in self.data_bucket.keys():\n data = self.data_bucket[key]\n if len(data) >= self.batch_size and self.random_list[self.iter] == key:\n self.data_bucket[key] = self.data_bucket[key][self.batch_size:]\n arr = data[0]\n for i in range(1, self.batch_size):\n current_data = data[i]\n for j in range(len(current_data)):\n arr[j] = np.concatenate((arr[j], current_data[j]))\n res = ()\n for label in arr:\n newlabel = np.reshape(label, (self.batch_size, -1))\n res += (newlabel,)\n res += (np.array(key, np.int32),)\n self.iter += 1\n return res\n raise StopIteration\n\n def __iter__(self):\n self.iterator = self.dataset.create_tuple_iterator(output_numpy=True)\n return self\n\n def __len__(self):\n return (self.dataset.get_dataset_size() // self.batch_size) - 1\n\n\ndef create_albert_dataset(device_num=1, rank=0, do_shuffle=\"true\", data_dir=None, schema_dir=None, batch_size=32,\n bucket_list=None):\n \"\"\"create train dataset\"\"\"\n # apply repeat operations\n files = os.listdir(data_dir)\n data_files = []\n for file_name in files:\n if \"tfrecord\" in file_name:\n data_files.append(os.path.join(data_dir, file_name))\n data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir != \"\" else None,\n columns_list=[\"input_ids\", \"input_mask\", \"segment_ids\", \"next_sentence_labels\",\n \"masked_lm_positions\", \"masked_lm_ids\", \"masked_lm_weights\"],\n shuffle=de.Shuffle.FILES if do_shuffle == \"true\" else False,\n num_shards=device_num, shard_id=rank, shard_equal_rows=True)\n if bucket_list:\n bucket_dataset = BucketDatasetGenerator(data_set, batch_size, bucket_list=bucket_list)\n data_set = de.GeneratorDataset(bucket_dataset,\n column_names=[\"input_ids\", \"input_mask\", \"segment_ids\",\n \"next_sentence_labels\",\n \"masked_lm_positions\", \"masked_lm_ids\", \"masked_lm_weights\",\n \"sentence_flag\"],\n shuffle=False)\n else:\n data_set = data_set.batch(batch_size, drop_remainder=True)\n ori_dataset_size = data_set.get_dataset_size()\n print('origin dataset size: ', ori_dataset_size)\n type_cast_op = C.TypeCast(mstype.int32)\n data_set = data_set.map(operations=type_cast_op, input_columns=\"masked_lm_ids\")\n data_set = data_set.map(operations=type_cast_op, input_columns=\"masked_lm_positions\")\n data_set = data_set.map(operations=type_cast_op, input_columns=\"next_sentence_labels\")\n data_set = data_set.map(operations=type_cast_op, input_columns=\"segment_ids\")\n data_set = data_set.map(operations=type_cast_op, input_columns=\"input_mask\")\n data_set = data_set.map(operations=type_cast_op, input_columns=\"input_ids\")\n # apply batch operations\n logger.info(\"data size: {}\".format(data_set.get_dataset_size()))\n logger.info(\"repeat count: {}\".format(data_set.get_repeat_count()))\n return data_set\n\n\ndef create_classification_dataset(batch_size=1, repeat_count=1, assessment_method=\"accuracy\",\n data_file_path=None, schema_file_path=None, do_shuffle=True,\n rank_size=1, rank_id=0):\n \"\"\"create finetune or evaluation dataset\"\"\"\n type_cast_op = C.TypeCast(mstype.int32)\n ds = de.MindDataset([data_file_path],\n columns_list=[\"input_ids\", \"input_mask\", \"segment_ids\", \"label_ids\"], shuffle=do_shuffle,\n num_shards=rank_size, shard_id=rank_id)\n if assessment_method == \"Spearman_correlation\":\n type_cast_op_float = C.TypeCast(mstype.float32)\n ds = ds.map(operations=type_cast_op_float, input_columns=\"label_ids\")\n else:\n ds = ds.map(operations=type_cast_op, input_columns=\"label_ids\")\n ds = ds.map(operations=type_cast_op, input_columns=\"segment_ids\")\n ds = ds.map(operations=type_cast_op, input_columns=\"input_mask\")\n ds = ds.map(operations=type_cast_op, input_columns=\"input_ids\")\n ds = ds.repeat(repeat_count)\n # apply batch operations\n ds = ds.batch(batch_size, drop_remainder=True)\n return ds\n\n\ndef generator_squad(data_features):\n for feature in data_features:\n yield (feature.input_ids, feature.input_mask, feature.segment_ids, feature.unique_id)\n\n\ndef generator_squad_train(data_features):\n for feature in data_features:\n yield (feature.input_ids, feature.input_mask, feature.segment_ids, feature.start_position, feature.end_position,\n feature.unique_id, feature.is_impossible)\n\n\ndef create_squad_dataset(batch_size=1, repeat_count=1, data_file_path=None, schema_file_path=None,\n is_training=True, do_shuffle=True, rank_size=1,\n rank_id=0):\n \"\"\"create finetune or evaluation dataset\"\"\"\n type_cast_op = C.TypeCast(mstype.int32)\n if is_training:\n print(\"data_file_path: \", data_file_path)\n print(\"rank_id: \", rank_id)\n ds = de.MindDataset([data_file_path],\n columns_list=[\"input_ids\", \"input_mask\", \"segment_ids\", \"start_positions\",\n \"end_positions\", \"unique_ids\", \"is_impossible\"],\n shuffle=do_shuffle, num_shards=rank_size, shard_id=rank_id)\n ds = ds.map(operations=type_cast_op, input_columns=\"start_positions\")\n ds = ds.map(operations=type_cast_op, input_columns=\"end_positions\")\n else:\n ds = de.GeneratorDataset(generator_squad(data_file_path), shuffle=do_shuffle,\n column_names=[\"input_ids\", \"input_mask\", \"segment_ids\", \"unique_ids\"])\n\n ds = ds.map(operations=type_cast_op, input_columns=\"input_ids\")\n ds = ds.map(operations=type_cast_op, input_columns=\"input_mask\")\n ds = ds.map(operations=type_cast_op, input_columns=\"segment_ids\")\n ds = ds.map(operations=type_cast_op, input_columns=\"unique_ids\")\n ds = ds.repeat(repeat_count)\n # apply batch operations\n ds = ds.batch(batch_size, drop_remainder=True)\n return ds\n\n\ndef create_eval_dataset(batchsize=32, device_num=1, rank=0, data_dir=None, schema_dir=None):\n \"\"\"create evaluation dataset\"\"\"\n data_files = []\n if os.path.isdir(data_dir):\n files = os.listdir(data_dir)\n for file_name in files:\n if \"tfrecord\" in file_name:\n data_files.append(os.path.join(data_dir, file_name))\n else:\n data_files.append(data_dir)\n data_set = de.TFRecordDataset(data_files, schema_dir if schema_dir != \"\" else None,\n columns_list=[\"input_ids\", \"input_mask\", \"segment_ids\", \"next_sentence_labels\",\n \"masked_lm_positions\", \"masked_lm_ids\", \"masked_lm_weights\"],\n shard_equal_rows=True)\n ori_dataset_size = data_set.get_dataset_size()\n print(\"origin eval size: \", ori_dataset_size)\n dtypes = data_set.output_types()\n shapes = data_set.output_shapes()\n output_batches = math.ceil(ori_dataset_size / device_num / batchsize)\n padded_num = output_batches * device_num * batchsize - ori_dataset_size\n print(\"padded num: \", padded_num)\n if padded_num > 0:\n item = {\"input_ids\": np.zeros(shapes[0], dtypes[0]),\n \"input_mask\": np.zeros(shapes[1], dtypes[1]),\n \"segment_ids\": np.zeros(shapes[2], dtypes[2]),\n \"next_sentence_labels\": np.zeros(shapes[3], dtypes[3]),\n \"masked_lm_positions\": np.zeros(shapes[4], dtypes[4]),\n \"masked_lm_ids\": np.zeros(shapes[5], dtypes[5]),\n \"masked_lm_weights\": np.zeros(shapes[6], dtypes[6])}\n padded_samples = [item for x in range(padded_num)]\n padded_ds = de.PaddedDataset(padded_samples)\n eval_ds = data_set + padded_ds\n sampler = de.DistributedSampler(num_shards=device_num, shard_id=rank, shuffle=False)\n eval_ds.use_sampler(sampler)\n else:\n eval_ds = de.TFRecordDataset(data_files, schema_dir if schema_dir != \"\" else None,\n columns_list=[\"input_ids\", \"input_mask\", \"segment_ids\",\n \"next_sentence_labels\",\n \"masked_lm_positions\", \"masked_lm_ids\", \"masked_lm_weights\"],\n num_shards=device_num, shard_id=rank, shard_equal_rows=True)\n\n type_cast_op = C.TypeCast(mstype.int32)\n eval_ds = eval_ds.map(input_columns=\"masked_lm_ids\", operations=type_cast_op)\n eval_ds = eval_ds.map(input_columns=\"masked_lm_positions\", operations=type_cast_op)\n eval_ds = eval_ds.map(input_columns=\"next_sentence_labels\", operations=type_cast_op)\n eval_ds = eval_ds.map(input_columns=\"segment_ids\", operations=type_cast_op)\n eval_ds = eval_ds.map(input_columns=\"input_mask\", operations=type_cast_op)\n eval_ds = eval_ds.map(input_columns=\"input_ids\", operations=type_cast_op)\n eval_ds = eval_ds.batch(batchsize, drop_remainder=True)\n print(\"eval data size: {}\".format(eval_ds.get_dataset_size()))\n print(\"eval repeat count: {}\".format(eval_ds.get_repeat_count()))\n return eval_ds\n",
"step-ids": [
8,
11,
12,
13,
14
]
}
|
[
8,
11,
12,
13,
14
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def crawler(pid):
print('hole reply start!')
cids = []
texts = []
names = []
try:
para = {'action': 'getcomment', 'pid': pid, 'token':
'pnh3dmks5fmo00u0177qplsre44qo4fk'}
r = requests.get(url, headers=head, params=para)
data = json.loads(r.text)['data']
for t in data:
cids.append(int(t['cid']))
texts.append(t['text'])
names.append(t['name'])
print('hole reply end!')
return cids, texts, names
except:
print('HOLE REPLY ERROR!!!!!!')
return cids, texts, names
<|reserved_special_token_1|>
<|reserved_special_token_0|>
head = {'Host': 'www.pkuhelper.com', 'Accept': '*/*', 'Accept-Language':
'zh-Hans-CN;q=1', 'Connection': 'keep-alive', 'Accept-Encoding':
'gzip, deflate', 'User-Agent':
'PKU Helper/2.3.8 (iPhone; iOS 12.1; Scale/3.00)'}
url = 'http://162.105.205.61/services/pkuhole/api.php'
def crawler(pid):
print('hole reply start!')
cids = []
texts = []
names = []
try:
para = {'action': 'getcomment', 'pid': pid, 'token':
'pnh3dmks5fmo00u0177qplsre44qo4fk'}
r = requests.get(url, headers=head, params=para)
data = json.loads(r.text)['data']
for t in data:
cids.append(int(t['cid']))
texts.append(t['text'])
names.append(t['name'])
print('hole reply end!')
return cids, texts, names
except:
print('HOLE REPLY ERROR!!!!!!')
return cids, texts, names
<|reserved_special_token_1|>
import urllib.request
import http.cookiejar
import requests
import re
import sys
import time
import json
from bs4 import BeautifulSoup
head = {'Host': 'www.pkuhelper.com', 'Accept': '*/*', 'Accept-Language':
'zh-Hans-CN;q=1', 'Connection': 'keep-alive', 'Accept-Encoding':
'gzip, deflate', 'User-Agent':
'PKU Helper/2.3.8 (iPhone; iOS 12.1; Scale/3.00)'}
url = 'http://162.105.205.61/services/pkuhole/api.php'
def crawler(pid):
print('hole reply start!')
cids = []
texts = []
names = []
try:
para = {'action': 'getcomment', 'pid': pid, 'token':
'pnh3dmks5fmo00u0177qplsre44qo4fk'}
r = requests.get(url, headers=head, params=para)
data = json.loads(r.text)['data']
for t in data:
cids.append(int(t['cid']))
texts.append(t['text'])
names.append(t['name'])
print('hole reply end!')
return cids, texts, names
except:
print('HOLE REPLY ERROR!!!!!!')
return cids, texts, names
<|reserved_special_token_1|>
import urllib.request
import http.cookiejar
import requests
import re
import sys
import time
import json
from bs4 import BeautifulSoup
head = {
"Host": "www.pkuhelper.com",
"Accept": "*/*",
"Accept-Language": "zh-Hans-CN;q=1",
"Connection": "keep-alive",
"Accept-Encoding": "gzip, deflate",
"User-Agent": "PKU Helper/2.3.8 (iPhone; iOS 12.1; Scale/3.00)"
}
url = "http://162.105.205.61/services/pkuhole/api.php"
#树洞回复爬虫,爬取树洞回复号、内容、姓名
def crawler(pid):
print("hole reply start!")
cids = []
texts = []
names = []
try:
para = {"action": "getcomment", "pid": pid, "token": "pnh3dmks5fmo00u0177qplsre44qo4fk"}
r = requests.get(url, headers=head, params=para)
data = json.loads(r.text)["data"]
for t in data:
cids.append(int(t["cid"]))
texts.append(t["text"])
names.append(t["name"])
print("hole reply end!")
return cids, texts, names
except:
print("HOLE REPLY ERROR!!!!!!")
return cids, texts, names
|
flexible
|
{
"blob_id": "a74653f01b62445c74c8121739bd9185ce21c85a",
"index": 2764,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef crawler(pid):\n print('hole reply start!')\n cids = []\n texts = []\n names = []\n try:\n para = {'action': 'getcomment', 'pid': pid, 'token':\n 'pnh3dmks5fmo00u0177qplsre44qo4fk'}\n r = requests.get(url, headers=head, params=para)\n data = json.loads(r.text)['data']\n for t in data:\n cids.append(int(t['cid']))\n texts.append(t['text'])\n names.append(t['name'])\n print('hole reply end!')\n return cids, texts, names\n except:\n print('HOLE REPLY ERROR!!!!!!')\n return cids, texts, names\n",
"step-3": "<mask token>\nhead = {'Host': 'www.pkuhelper.com', 'Accept': '*/*', 'Accept-Language':\n 'zh-Hans-CN;q=1', 'Connection': 'keep-alive', 'Accept-Encoding':\n 'gzip, deflate', 'User-Agent':\n 'PKU Helper/2.3.8 (iPhone; iOS 12.1; Scale/3.00)'}\nurl = 'http://162.105.205.61/services/pkuhole/api.php'\n\n\ndef crawler(pid):\n print('hole reply start!')\n cids = []\n texts = []\n names = []\n try:\n para = {'action': 'getcomment', 'pid': pid, 'token':\n 'pnh3dmks5fmo00u0177qplsre44qo4fk'}\n r = requests.get(url, headers=head, params=para)\n data = json.loads(r.text)['data']\n for t in data:\n cids.append(int(t['cid']))\n texts.append(t['text'])\n names.append(t['name'])\n print('hole reply end!')\n return cids, texts, names\n except:\n print('HOLE REPLY ERROR!!!!!!')\n return cids, texts, names\n",
"step-4": "import urllib.request\nimport http.cookiejar\nimport requests\nimport re\nimport sys\nimport time\nimport json\nfrom bs4 import BeautifulSoup\nhead = {'Host': 'www.pkuhelper.com', 'Accept': '*/*', 'Accept-Language':\n 'zh-Hans-CN;q=1', 'Connection': 'keep-alive', 'Accept-Encoding':\n 'gzip, deflate', 'User-Agent':\n 'PKU Helper/2.3.8 (iPhone; iOS 12.1; Scale/3.00)'}\nurl = 'http://162.105.205.61/services/pkuhole/api.php'\n\n\ndef crawler(pid):\n print('hole reply start!')\n cids = []\n texts = []\n names = []\n try:\n para = {'action': 'getcomment', 'pid': pid, 'token':\n 'pnh3dmks5fmo00u0177qplsre44qo4fk'}\n r = requests.get(url, headers=head, params=para)\n data = json.loads(r.text)['data']\n for t in data:\n cids.append(int(t['cid']))\n texts.append(t['text'])\n names.append(t['name'])\n print('hole reply end!')\n return cids, texts, names\n except:\n print('HOLE REPLY ERROR!!!!!!')\n return cids, texts, names\n",
"step-5": "import urllib.request\nimport http.cookiejar\nimport requests\nimport re\nimport sys\nimport time\nimport json\nfrom bs4 import BeautifulSoup\n\nhead = {\n\t\"Host\": \"www.pkuhelper.com\",\n\t\"Accept\": \"*/*\",\n\t\"Accept-Language\": \"zh-Hans-CN;q=1\",\n\t\"Connection\": \"keep-alive\",\n\t\"Accept-Encoding\": \"gzip, deflate\",\n\t\"User-Agent\": \"PKU Helper/2.3.8 (iPhone; iOS 12.1; Scale/3.00)\"\n}\nurl = \"http://162.105.205.61/services/pkuhole/api.php\"\n\n#树洞回复爬虫,爬取树洞回复号、内容、姓名\ndef crawler(pid):\n\tprint(\"hole reply start!\")\n\tcids = []\n\ttexts = []\n\tnames = []\n\n\ttry:\n\t\tpara = {\"action\": \"getcomment\", \"pid\": pid, \"token\": \"pnh3dmks5fmo00u0177qplsre44qo4fk\"}\n\t\tr = requests.get(url, headers=head, params=para)\n\t\tdata = json.loads(r.text)[\"data\"]\n\t\tfor t in data:\n\t\t\tcids.append(int(t[\"cid\"]))\n\t\t\ttexts.append(t[\"text\"])\n\t\t\tnames.append(t[\"name\"])\n\n\t\tprint(\"hole reply end!\")\n\n\t\treturn cids, texts, names\n\texcept:\n\t\tprint(\"HOLE REPLY ERROR!!!!!!\")\n\t\treturn cids, texts, names",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if people < cats:
print('Too many cats')
elif people > cats:
print('Not many cats')
else:
print('we cannnot decide')
<|reserved_special_token_1|>
people = 20
cats = 30
dogs = 15
if people < cats:
print('Too many cats')
elif people > cats:
print('Not many cats')
else:
print('we cannnot decide')
<|reserved_special_token_1|>
people = 20
cats = 30
dogs = 15
if people < cats:
print("Too many cats")
elif people > cats:
print("Not many cats")
else:
print("we cannnot decide")
|
flexible
|
{
"blob_id": "0465e33d65c2ce47ebffeec38db6908826bf4934",
"index": 299,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif people < cats:\n print('Too many cats')\nelif people > cats:\n print('Not many cats')\nelse:\n print('we cannnot decide')\n",
"step-3": "people = 20\ncats = 30\ndogs = 15\nif people < cats:\n print('Too many cats')\nelif people > cats:\n print('Not many cats')\nelse:\n print('we cannnot decide')\n",
"step-4": "people = 20\ncats = 30\ndogs = 15\n\nif people < cats:\n print(\"Too many cats\")\nelif people > cats:\n print(\"Not many cats\")\nelse:\n print(\"we cannnot decide\")",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import numpy as np
import torch
def pad_sequences_1d(sequences, dtype=torch.long, device=torch.device("cpu"), fixed_length=None):
""" Pad a single-nested list or a sequence of n-d array (torch.tensor or np.ndarray)
into a (n+1)-d array, only allow the first dim has variable lengths.
Args:
sequences: list(n-d tensor or list)
dtype: np.dtype or torch.dtype
device:
fixed_length: pad all seq in sequences to fixed length. All seq should have a length <= fixed_length.
return will be of shape [len(sequences), fixed_length, ...]
Returns:
padded_seqs: ((n+1)-d tensor) padded with zeros
mask: (2d tensor) of the same shape as the first two dims of padded_seqs,
1 indicate valid, 0 otherwise
Examples:
>>> test_data_list = [[1,2,3], [1,2], [3,4,7,9]]
>>> pad_sequences_1d(test_data_list, dtype=torch.long)
>>> test_data_3d = [torch.randn(2,3,4), torch.randn(4,3,4), torch.randn(1,3,4)]
>>> pad_sequences_1d(test_data_3d, dtype=torch.float)
>>> test_data_list = [[1,2,3], [1,2], [3,4,7,9]]
>>> pad_sequences_1d(test_data_list, dtype=np.float32)
>>> test_data_3d = [np.random.randn(2,3,4), np.random.randn(4,3,4), np.random.randn(1,3,4)]
>>> pad_sequences_1d(test_data_3d, dtype=np.float32)
"""
if isinstance(sequences[0], list):
if "torch" in str(dtype):
sequences = [torch.tensor(s, dtype=dtype, device=device) for s in sequences]
else:
sequences = [np.asarray(s, dtype=dtype) for s in sequences]
extra_dims = sequences[0].shape[1:] # the extra dims should be the same for all elements
lengths = [len(seq) for seq in sequences]
if fixed_length is not None:
max_length = fixed_length
else:
max_length = max(lengths)
if isinstance(sequences[0], torch.Tensor):
assert "torch" in str(dtype), "dtype and input type does not match"
padded_seqs = torch.zeros((len(sequences), max_length) + extra_dims, dtype=dtype, device=device)
mask = torch.zeros((len(sequences), max_length), dtype=torch.float32, device=device)
else: # np
assert "numpy" in str(dtype), "dtype and input type does not match"
padded_seqs = np.zeros((len(sequences), max_length) + extra_dims, dtype=dtype)
mask = np.zeros((len(sequences), max_length), dtype=np.float32)
for idx, seq in enumerate(sequences):
end = lengths[idx]
padded_seqs[idx, :end] = seq
mask[idx, :end] = 1
return padded_seqs, mask # , lengths
def pad_sequences_2d(sequences, dtype=torch.long):
""" Pad a double-nested list or a sequence of n-d torch tensor into a (n+1)-d tensor,
only allow the first two dims has variable lengths
Args:
sequences: list(n-d tensor or list)
dtype: torch.long for word indices / torch.float (float32) for other cases
Returns:
Examples:
>>> test_data_list = [[[1, 3, 5], [3, 7, 4, 1]], [[98, 34, 11, 89, 90], [22], [34, 56]],]
>>> pad_sequences_2d(test_data_list, dtype=torch.long) # torch.Size([2, 3, 5])
>>> test_data_3d = [torch.randn(2,2,4), torch.randn(4,3,4), torch.randn(1,5,4)]
>>> pad_sequences_2d(test_data_3d, dtype=torch.float) # torch.Size([2, 3, 5])
>>> test_data_3d2 = [[torch.randn(2,4), ], [torch.randn(3,4), torch.randn(5,4)]]
>>> pad_sequences_2d(test_data_3d2, dtype=torch.float) # torch.Size([2, 3, 5])
# TODO add support for numpy array
"""
bsz = len(sequences)
para_lengths = [len(seq) for seq in sequences]
max_para_len = max(para_lengths)
sen_lengths = [[len(word_seq) for word_seq in seq] for seq in sequences]
max_sen_len = max([max(e) for e in sen_lengths])
if isinstance(sequences[0], torch.Tensor):
extra_dims = sequences[0].shape[2:]
elif isinstance(sequences[0][0], torch.Tensor):
extra_dims = sequences[0][0].shape[1:]
else:
sequences = [[torch.Tensor(word_seq, dtype=dtype) for word_seq in seq] for seq in sequences]
extra_dims = ()
padded_seqs = torch.zeros((bsz, max_para_len, max_sen_len) + extra_dims, dtype=dtype)
mask = torch.zeros(bsz, max_para_len, max_sen_len).float()
for b_i in range(bsz):
for sen_i, sen_l in enumerate(sen_lengths[b_i]):
padded_seqs[b_i, sen_i, :sen_l] = sequences[b_i][sen_i]
mask[b_i, sen_i, :sen_l] = 1
return padded_seqs, mask # , sen_lengths
def find_max_triples(st_prob, ed_prob, top_n=5, prob_thd=None, tensor_type="torch"):
""" Find a list of (k1, k2) where k1 < k2 with the maximum values of st_prob[k1] * ed_prob[k2]
Args:
st_prob (torch.Tensor or np.ndarray): (N, L) batched start_idx probabilities
ed_prob (torch.Tensor or np.ndarray): (N, L) batched end_idx probabilities
top_n (int): return topN pairs with highest values
prob_thd (float):
tensor_type: str, np or torch
Returns:
batched_sorted_triple: N * [(st_idx, ed_idx, confidence), ...]
"""
if tensor_type == "torch":
st_prob, ed_prob = st_prob.data.numpy(), ed_prob.data.numpy()
product = np.einsum("bm,bn->bmn", st_prob, ed_prob)
# (N, L, L) the lower part becomes zeros, start_idx < ed_idx
upper_product = np.triu(product, k=1)
return find_max_triples_from_upper_triangle_product(upper_product, top_n=top_n, prob_thd=prob_thd)
def find_max_triples_from_upper_triangle_product(upper_product, top_n=5, prob_thd=None):
""" Find a list of (k1, k2) where k1 < k2 with the maximum values of p1[k1] * p2[k2]
Args:
upper_product (torch.Tensor or np.ndarray): (N, L, L), the lower part becomes zeros, end_idx > start_idx
top_n (int): return topN pairs with highest values
prob_thd (float or None):
Returns:
batched_sorted_triple: N * [(st_idx, ed_idx, confidence), ...]
"""
batched_sorted_triple = []
for idx, e in enumerate(upper_product):
sorted_triple = top_n_array_2d(e, top_n=top_n)
if prob_thd is not None:
sorted_triple = sorted_triple[sorted_triple[2] >= prob_thd]
batched_sorted_triple.append(sorted_triple)
return batched_sorted_triple
def top_n_array_2d(array_2d, top_n):
""" Get topN indices and values of a 2d array, return a tuple of indices and their values,
ranked by the value
"""
row_indices, column_indices = np.unravel_index(np.argsort(array_2d, axis=None), array_2d.shape)
row_indices = row_indices[::-1][:top_n]
column_indices = column_indices[::-1][:top_n]
sorted_values = array_2d[row_indices, column_indices]
return np.stack([row_indices, column_indices, sorted_values], axis=1) # (N, 3)
|
normal
|
{
"blob_id": "788d9fa03c4311a8077d492b1a2b06d1f88826a3",
"index": 5570,
"step-1": "<mask token>\n\n\ndef pad_sequences_1d(sequences, dtype=torch.long, device=torch.device('cpu'\n ), fixed_length=None):\n \"\"\" Pad a single-nested list or a sequence of n-d array (torch.tensor or np.ndarray)\n into a (n+1)-d array, only allow the first dim has variable lengths.\n Args:\n sequences: list(n-d tensor or list)\n dtype: np.dtype or torch.dtype\n device:\n fixed_length: pad all seq in sequences to fixed length. All seq should have a length <= fixed_length.\n return will be of shape [len(sequences), fixed_length, ...]\n Returns:\n padded_seqs: ((n+1)-d tensor) padded with zeros\n mask: (2d tensor) of the same shape as the first two dims of padded_seqs,\n 1 indicate valid, 0 otherwise\n Examples:\n >>> test_data_list = [[1,2,3], [1,2], [3,4,7,9]]\n >>> pad_sequences_1d(test_data_list, dtype=torch.long)\n >>> test_data_3d = [torch.randn(2,3,4), torch.randn(4,3,4), torch.randn(1,3,4)]\n >>> pad_sequences_1d(test_data_3d, dtype=torch.float)\n >>> test_data_list = [[1,2,3], [1,2], [3,4,7,9]]\n >>> pad_sequences_1d(test_data_list, dtype=np.float32)\n >>> test_data_3d = [np.random.randn(2,3,4), np.random.randn(4,3,4), np.random.randn(1,3,4)]\n >>> pad_sequences_1d(test_data_3d, dtype=np.float32)\n \"\"\"\n if isinstance(sequences[0], list):\n if 'torch' in str(dtype):\n sequences = [torch.tensor(s, dtype=dtype, device=device) for s in\n sequences]\n else:\n sequences = [np.asarray(s, dtype=dtype) for s in sequences]\n extra_dims = sequences[0].shape[1:]\n lengths = [len(seq) for seq in sequences]\n if fixed_length is not None:\n max_length = fixed_length\n else:\n max_length = max(lengths)\n if isinstance(sequences[0], torch.Tensor):\n assert 'torch' in str(dtype), 'dtype and input type does not match'\n padded_seqs = torch.zeros((len(sequences), max_length) + extra_dims,\n dtype=dtype, device=device)\n mask = torch.zeros((len(sequences), max_length), dtype=torch.\n float32, device=device)\n else:\n assert 'numpy' in str(dtype), 'dtype and input type does not match'\n padded_seqs = np.zeros((len(sequences), max_length) + extra_dims,\n dtype=dtype)\n mask = np.zeros((len(sequences), max_length), dtype=np.float32)\n for idx, seq in enumerate(sequences):\n end = lengths[idx]\n padded_seqs[idx, :end] = seq\n mask[idx, :end] = 1\n return padded_seqs, mask\n\n\n<mask token>\n\n\ndef find_max_triples_from_upper_triangle_product(upper_product, top_n=5,\n prob_thd=None):\n \"\"\" Find a list of (k1, k2) where k1 < k2 with the maximum values of p1[k1] * p2[k2]\n Args:\n upper_product (torch.Tensor or np.ndarray): (N, L, L), the lower part becomes zeros, end_idx > start_idx\n top_n (int): return topN pairs with highest values\n prob_thd (float or None):\n Returns:\n batched_sorted_triple: N * [(st_idx, ed_idx, confidence), ...]\n \"\"\"\n batched_sorted_triple = []\n for idx, e in enumerate(upper_product):\n sorted_triple = top_n_array_2d(e, top_n=top_n)\n if prob_thd is not None:\n sorted_triple = sorted_triple[sorted_triple[2] >= prob_thd]\n batched_sorted_triple.append(sorted_triple)\n return batched_sorted_triple\n\n\ndef top_n_array_2d(array_2d, top_n):\n \"\"\" Get topN indices and values of a 2d array, return a tuple of indices and their values,\n ranked by the value\n \"\"\"\n row_indices, column_indices = np.unravel_index(np.argsort(array_2d,\n axis=None), array_2d.shape)\n row_indices = row_indices[::-1][:top_n]\n column_indices = column_indices[::-1][:top_n]\n sorted_values = array_2d[row_indices, column_indices]\n return np.stack([row_indices, column_indices, sorted_values], axis=1)\n",
"step-2": "<mask token>\n\n\ndef pad_sequences_1d(sequences, dtype=torch.long, device=torch.device('cpu'\n ), fixed_length=None):\n \"\"\" Pad a single-nested list or a sequence of n-d array (torch.tensor or np.ndarray)\n into a (n+1)-d array, only allow the first dim has variable lengths.\n Args:\n sequences: list(n-d tensor or list)\n dtype: np.dtype or torch.dtype\n device:\n fixed_length: pad all seq in sequences to fixed length. All seq should have a length <= fixed_length.\n return will be of shape [len(sequences), fixed_length, ...]\n Returns:\n padded_seqs: ((n+1)-d tensor) padded with zeros\n mask: (2d tensor) of the same shape as the first two dims of padded_seqs,\n 1 indicate valid, 0 otherwise\n Examples:\n >>> test_data_list = [[1,2,3], [1,2], [3,4,7,9]]\n >>> pad_sequences_1d(test_data_list, dtype=torch.long)\n >>> test_data_3d = [torch.randn(2,3,4), torch.randn(4,3,4), torch.randn(1,3,4)]\n >>> pad_sequences_1d(test_data_3d, dtype=torch.float)\n >>> test_data_list = [[1,2,3], [1,2], [3,4,7,9]]\n >>> pad_sequences_1d(test_data_list, dtype=np.float32)\n >>> test_data_3d = [np.random.randn(2,3,4), np.random.randn(4,3,4), np.random.randn(1,3,4)]\n >>> pad_sequences_1d(test_data_3d, dtype=np.float32)\n \"\"\"\n if isinstance(sequences[0], list):\n if 'torch' in str(dtype):\n sequences = [torch.tensor(s, dtype=dtype, device=device) for s in\n sequences]\n else:\n sequences = [np.asarray(s, dtype=dtype) for s in sequences]\n extra_dims = sequences[0].shape[1:]\n lengths = [len(seq) for seq in sequences]\n if fixed_length is not None:\n max_length = fixed_length\n else:\n max_length = max(lengths)\n if isinstance(sequences[0], torch.Tensor):\n assert 'torch' in str(dtype), 'dtype and input type does not match'\n padded_seqs = torch.zeros((len(sequences), max_length) + extra_dims,\n dtype=dtype, device=device)\n mask = torch.zeros((len(sequences), max_length), dtype=torch.\n float32, device=device)\n else:\n assert 'numpy' in str(dtype), 'dtype and input type does not match'\n padded_seqs = np.zeros((len(sequences), max_length) + extra_dims,\n dtype=dtype)\n mask = np.zeros((len(sequences), max_length), dtype=np.float32)\n for idx, seq in enumerate(sequences):\n end = lengths[idx]\n padded_seqs[idx, :end] = seq\n mask[idx, :end] = 1\n return padded_seqs, mask\n\n\ndef pad_sequences_2d(sequences, dtype=torch.long):\n \"\"\" Pad a double-nested list or a sequence of n-d torch tensor into a (n+1)-d tensor,\n only allow the first two dims has variable lengths\n Args:\n sequences: list(n-d tensor or list)\n dtype: torch.long for word indices / torch.float (float32) for other cases\n Returns:\n Examples:\n >>> test_data_list = [[[1, 3, 5], [3, 7, 4, 1]], [[98, 34, 11, 89, 90], [22], [34, 56]],]\n >>> pad_sequences_2d(test_data_list, dtype=torch.long) # torch.Size([2, 3, 5])\n >>> test_data_3d = [torch.randn(2,2,4), torch.randn(4,3,4), torch.randn(1,5,4)]\n >>> pad_sequences_2d(test_data_3d, dtype=torch.float) # torch.Size([2, 3, 5])\n >>> test_data_3d2 = [[torch.randn(2,4), ], [torch.randn(3,4), torch.randn(5,4)]]\n >>> pad_sequences_2d(test_data_3d2, dtype=torch.float) # torch.Size([2, 3, 5])\n # TODO add support for numpy array\n \"\"\"\n bsz = len(sequences)\n para_lengths = [len(seq) for seq in sequences]\n max_para_len = max(para_lengths)\n sen_lengths = [[len(word_seq) for word_seq in seq] for seq in sequences]\n max_sen_len = max([max(e) for e in sen_lengths])\n if isinstance(sequences[0], torch.Tensor):\n extra_dims = sequences[0].shape[2:]\n elif isinstance(sequences[0][0], torch.Tensor):\n extra_dims = sequences[0][0].shape[1:]\n else:\n sequences = [[torch.Tensor(word_seq, dtype=dtype) for word_seq in\n seq] for seq in sequences]\n extra_dims = ()\n padded_seqs = torch.zeros((bsz, max_para_len, max_sen_len) + extra_dims,\n dtype=dtype)\n mask = torch.zeros(bsz, max_para_len, max_sen_len).float()\n for b_i in range(bsz):\n for sen_i, sen_l in enumerate(sen_lengths[b_i]):\n padded_seqs[b_i, sen_i, :sen_l] = sequences[b_i][sen_i]\n mask[b_i, sen_i, :sen_l] = 1\n return padded_seqs, mask\n\n\n<mask token>\n\n\ndef find_max_triples_from_upper_triangle_product(upper_product, top_n=5,\n prob_thd=None):\n \"\"\" Find a list of (k1, k2) where k1 < k2 with the maximum values of p1[k1] * p2[k2]\n Args:\n upper_product (torch.Tensor or np.ndarray): (N, L, L), the lower part becomes zeros, end_idx > start_idx\n top_n (int): return topN pairs with highest values\n prob_thd (float or None):\n Returns:\n batched_sorted_triple: N * [(st_idx, ed_idx, confidence), ...]\n \"\"\"\n batched_sorted_triple = []\n for idx, e in enumerate(upper_product):\n sorted_triple = top_n_array_2d(e, top_n=top_n)\n if prob_thd is not None:\n sorted_triple = sorted_triple[sorted_triple[2] >= prob_thd]\n batched_sorted_triple.append(sorted_triple)\n return batched_sorted_triple\n\n\ndef top_n_array_2d(array_2d, top_n):\n \"\"\" Get topN indices and values of a 2d array, return a tuple of indices and their values,\n ranked by the value\n \"\"\"\n row_indices, column_indices = np.unravel_index(np.argsort(array_2d,\n axis=None), array_2d.shape)\n row_indices = row_indices[::-1][:top_n]\n column_indices = column_indices[::-1][:top_n]\n sorted_values = array_2d[row_indices, column_indices]\n return np.stack([row_indices, column_indices, sorted_values], axis=1)\n",
"step-3": "<mask token>\n\n\ndef pad_sequences_1d(sequences, dtype=torch.long, device=torch.device('cpu'\n ), fixed_length=None):\n \"\"\" Pad a single-nested list or a sequence of n-d array (torch.tensor or np.ndarray)\n into a (n+1)-d array, only allow the first dim has variable lengths.\n Args:\n sequences: list(n-d tensor or list)\n dtype: np.dtype or torch.dtype\n device:\n fixed_length: pad all seq in sequences to fixed length. All seq should have a length <= fixed_length.\n return will be of shape [len(sequences), fixed_length, ...]\n Returns:\n padded_seqs: ((n+1)-d tensor) padded with zeros\n mask: (2d tensor) of the same shape as the first two dims of padded_seqs,\n 1 indicate valid, 0 otherwise\n Examples:\n >>> test_data_list = [[1,2,3], [1,2], [3,4,7,9]]\n >>> pad_sequences_1d(test_data_list, dtype=torch.long)\n >>> test_data_3d = [torch.randn(2,3,4), torch.randn(4,3,4), torch.randn(1,3,4)]\n >>> pad_sequences_1d(test_data_3d, dtype=torch.float)\n >>> test_data_list = [[1,2,3], [1,2], [3,4,7,9]]\n >>> pad_sequences_1d(test_data_list, dtype=np.float32)\n >>> test_data_3d = [np.random.randn(2,3,4), np.random.randn(4,3,4), np.random.randn(1,3,4)]\n >>> pad_sequences_1d(test_data_3d, dtype=np.float32)\n \"\"\"\n if isinstance(sequences[0], list):\n if 'torch' in str(dtype):\n sequences = [torch.tensor(s, dtype=dtype, device=device) for s in\n sequences]\n else:\n sequences = [np.asarray(s, dtype=dtype) for s in sequences]\n extra_dims = sequences[0].shape[1:]\n lengths = [len(seq) for seq in sequences]\n if fixed_length is not None:\n max_length = fixed_length\n else:\n max_length = max(lengths)\n if isinstance(sequences[0], torch.Tensor):\n assert 'torch' in str(dtype), 'dtype and input type does not match'\n padded_seqs = torch.zeros((len(sequences), max_length) + extra_dims,\n dtype=dtype, device=device)\n mask = torch.zeros((len(sequences), max_length), dtype=torch.\n float32, device=device)\n else:\n assert 'numpy' in str(dtype), 'dtype and input type does not match'\n padded_seqs = np.zeros((len(sequences), max_length) + extra_dims,\n dtype=dtype)\n mask = np.zeros((len(sequences), max_length), dtype=np.float32)\n for idx, seq in enumerate(sequences):\n end = lengths[idx]\n padded_seqs[idx, :end] = seq\n mask[idx, :end] = 1\n return padded_seqs, mask\n\n\ndef pad_sequences_2d(sequences, dtype=torch.long):\n \"\"\" Pad a double-nested list or a sequence of n-d torch tensor into a (n+1)-d tensor,\n only allow the first two dims has variable lengths\n Args:\n sequences: list(n-d tensor or list)\n dtype: torch.long for word indices / torch.float (float32) for other cases\n Returns:\n Examples:\n >>> test_data_list = [[[1, 3, 5], [3, 7, 4, 1]], [[98, 34, 11, 89, 90], [22], [34, 56]],]\n >>> pad_sequences_2d(test_data_list, dtype=torch.long) # torch.Size([2, 3, 5])\n >>> test_data_3d = [torch.randn(2,2,4), torch.randn(4,3,4), torch.randn(1,5,4)]\n >>> pad_sequences_2d(test_data_3d, dtype=torch.float) # torch.Size([2, 3, 5])\n >>> test_data_3d2 = [[torch.randn(2,4), ], [torch.randn(3,4), torch.randn(5,4)]]\n >>> pad_sequences_2d(test_data_3d2, dtype=torch.float) # torch.Size([2, 3, 5])\n # TODO add support for numpy array\n \"\"\"\n bsz = len(sequences)\n para_lengths = [len(seq) for seq in sequences]\n max_para_len = max(para_lengths)\n sen_lengths = [[len(word_seq) for word_seq in seq] for seq in sequences]\n max_sen_len = max([max(e) for e in sen_lengths])\n if isinstance(sequences[0], torch.Tensor):\n extra_dims = sequences[0].shape[2:]\n elif isinstance(sequences[0][0], torch.Tensor):\n extra_dims = sequences[0][0].shape[1:]\n else:\n sequences = [[torch.Tensor(word_seq, dtype=dtype) for word_seq in\n seq] for seq in sequences]\n extra_dims = ()\n padded_seqs = torch.zeros((bsz, max_para_len, max_sen_len) + extra_dims,\n dtype=dtype)\n mask = torch.zeros(bsz, max_para_len, max_sen_len).float()\n for b_i in range(bsz):\n for sen_i, sen_l in enumerate(sen_lengths[b_i]):\n padded_seqs[b_i, sen_i, :sen_l] = sequences[b_i][sen_i]\n mask[b_i, sen_i, :sen_l] = 1\n return padded_seqs, mask\n\n\ndef find_max_triples(st_prob, ed_prob, top_n=5, prob_thd=None, tensor_type=\n 'torch'):\n \"\"\" Find a list of (k1, k2) where k1 < k2 with the maximum values of st_prob[k1] * ed_prob[k2]\n Args:\n st_prob (torch.Tensor or np.ndarray): (N, L) batched start_idx probabilities\n ed_prob (torch.Tensor or np.ndarray): (N, L) batched end_idx probabilities\n top_n (int): return topN pairs with highest values\n prob_thd (float):\n tensor_type: str, np or torch\n Returns:\n batched_sorted_triple: N * [(st_idx, ed_idx, confidence), ...]\n \"\"\"\n if tensor_type == 'torch':\n st_prob, ed_prob = st_prob.data.numpy(), ed_prob.data.numpy()\n product = np.einsum('bm,bn->bmn', st_prob, ed_prob)\n upper_product = np.triu(product, k=1)\n return find_max_triples_from_upper_triangle_product(upper_product,\n top_n=top_n, prob_thd=prob_thd)\n\n\ndef find_max_triples_from_upper_triangle_product(upper_product, top_n=5,\n prob_thd=None):\n \"\"\" Find a list of (k1, k2) where k1 < k2 with the maximum values of p1[k1] * p2[k2]\n Args:\n upper_product (torch.Tensor or np.ndarray): (N, L, L), the lower part becomes zeros, end_idx > start_idx\n top_n (int): return topN pairs with highest values\n prob_thd (float or None):\n Returns:\n batched_sorted_triple: N * [(st_idx, ed_idx, confidence), ...]\n \"\"\"\n batched_sorted_triple = []\n for idx, e in enumerate(upper_product):\n sorted_triple = top_n_array_2d(e, top_n=top_n)\n if prob_thd is not None:\n sorted_triple = sorted_triple[sorted_triple[2] >= prob_thd]\n batched_sorted_triple.append(sorted_triple)\n return batched_sorted_triple\n\n\ndef top_n_array_2d(array_2d, top_n):\n \"\"\" Get topN indices and values of a 2d array, return a tuple of indices and their values,\n ranked by the value\n \"\"\"\n row_indices, column_indices = np.unravel_index(np.argsort(array_2d,\n axis=None), array_2d.shape)\n row_indices = row_indices[::-1][:top_n]\n column_indices = column_indices[::-1][:top_n]\n sorted_values = array_2d[row_indices, column_indices]\n return np.stack([row_indices, column_indices, sorted_values], axis=1)\n",
"step-4": "import numpy as np\nimport torch\n\n\ndef pad_sequences_1d(sequences, dtype=torch.long, device=torch.device('cpu'\n ), fixed_length=None):\n \"\"\" Pad a single-nested list or a sequence of n-d array (torch.tensor or np.ndarray)\n into a (n+1)-d array, only allow the first dim has variable lengths.\n Args:\n sequences: list(n-d tensor or list)\n dtype: np.dtype or torch.dtype\n device:\n fixed_length: pad all seq in sequences to fixed length. All seq should have a length <= fixed_length.\n return will be of shape [len(sequences), fixed_length, ...]\n Returns:\n padded_seqs: ((n+1)-d tensor) padded with zeros\n mask: (2d tensor) of the same shape as the first two dims of padded_seqs,\n 1 indicate valid, 0 otherwise\n Examples:\n >>> test_data_list = [[1,2,3], [1,2], [3,4,7,9]]\n >>> pad_sequences_1d(test_data_list, dtype=torch.long)\n >>> test_data_3d = [torch.randn(2,3,4), torch.randn(4,3,4), torch.randn(1,3,4)]\n >>> pad_sequences_1d(test_data_3d, dtype=torch.float)\n >>> test_data_list = [[1,2,3], [1,2], [3,4,7,9]]\n >>> pad_sequences_1d(test_data_list, dtype=np.float32)\n >>> test_data_3d = [np.random.randn(2,3,4), np.random.randn(4,3,4), np.random.randn(1,3,4)]\n >>> pad_sequences_1d(test_data_3d, dtype=np.float32)\n \"\"\"\n if isinstance(sequences[0], list):\n if 'torch' in str(dtype):\n sequences = [torch.tensor(s, dtype=dtype, device=device) for s in\n sequences]\n else:\n sequences = [np.asarray(s, dtype=dtype) for s in sequences]\n extra_dims = sequences[0].shape[1:]\n lengths = [len(seq) for seq in sequences]\n if fixed_length is not None:\n max_length = fixed_length\n else:\n max_length = max(lengths)\n if isinstance(sequences[0], torch.Tensor):\n assert 'torch' in str(dtype), 'dtype and input type does not match'\n padded_seqs = torch.zeros((len(sequences), max_length) + extra_dims,\n dtype=dtype, device=device)\n mask = torch.zeros((len(sequences), max_length), dtype=torch.\n float32, device=device)\n else:\n assert 'numpy' in str(dtype), 'dtype and input type does not match'\n padded_seqs = np.zeros((len(sequences), max_length) + extra_dims,\n dtype=dtype)\n mask = np.zeros((len(sequences), max_length), dtype=np.float32)\n for idx, seq in enumerate(sequences):\n end = lengths[idx]\n padded_seqs[idx, :end] = seq\n mask[idx, :end] = 1\n return padded_seqs, mask\n\n\ndef pad_sequences_2d(sequences, dtype=torch.long):\n \"\"\" Pad a double-nested list or a sequence of n-d torch tensor into a (n+1)-d tensor,\n only allow the first two dims has variable lengths\n Args:\n sequences: list(n-d tensor or list)\n dtype: torch.long for word indices / torch.float (float32) for other cases\n Returns:\n Examples:\n >>> test_data_list = [[[1, 3, 5], [3, 7, 4, 1]], [[98, 34, 11, 89, 90], [22], [34, 56]],]\n >>> pad_sequences_2d(test_data_list, dtype=torch.long) # torch.Size([2, 3, 5])\n >>> test_data_3d = [torch.randn(2,2,4), torch.randn(4,3,4), torch.randn(1,5,4)]\n >>> pad_sequences_2d(test_data_3d, dtype=torch.float) # torch.Size([2, 3, 5])\n >>> test_data_3d2 = [[torch.randn(2,4), ], [torch.randn(3,4), torch.randn(5,4)]]\n >>> pad_sequences_2d(test_data_3d2, dtype=torch.float) # torch.Size([2, 3, 5])\n # TODO add support for numpy array\n \"\"\"\n bsz = len(sequences)\n para_lengths = [len(seq) for seq in sequences]\n max_para_len = max(para_lengths)\n sen_lengths = [[len(word_seq) for word_seq in seq] for seq in sequences]\n max_sen_len = max([max(e) for e in sen_lengths])\n if isinstance(sequences[0], torch.Tensor):\n extra_dims = sequences[0].shape[2:]\n elif isinstance(sequences[0][0], torch.Tensor):\n extra_dims = sequences[0][0].shape[1:]\n else:\n sequences = [[torch.Tensor(word_seq, dtype=dtype) for word_seq in\n seq] for seq in sequences]\n extra_dims = ()\n padded_seqs = torch.zeros((bsz, max_para_len, max_sen_len) + extra_dims,\n dtype=dtype)\n mask = torch.zeros(bsz, max_para_len, max_sen_len).float()\n for b_i in range(bsz):\n for sen_i, sen_l in enumerate(sen_lengths[b_i]):\n padded_seqs[b_i, sen_i, :sen_l] = sequences[b_i][sen_i]\n mask[b_i, sen_i, :sen_l] = 1\n return padded_seqs, mask\n\n\ndef find_max_triples(st_prob, ed_prob, top_n=5, prob_thd=None, tensor_type=\n 'torch'):\n \"\"\" Find a list of (k1, k2) where k1 < k2 with the maximum values of st_prob[k1] * ed_prob[k2]\n Args:\n st_prob (torch.Tensor or np.ndarray): (N, L) batched start_idx probabilities\n ed_prob (torch.Tensor or np.ndarray): (N, L) batched end_idx probabilities\n top_n (int): return topN pairs with highest values\n prob_thd (float):\n tensor_type: str, np or torch\n Returns:\n batched_sorted_triple: N * [(st_idx, ed_idx, confidence), ...]\n \"\"\"\n if tensor_type == 'torch':\n st_prob, ed_prob = st_prob.data.numpy(), ed_prob.data.numpy()\n product = np.einsum('bm,bn->bmn', st_prob, ed_prob)\n upper_product = np.triu(product, k=1)\n return find_max_triples_from_upper_triangle_product(upper_product,\n top_n=top_n, prob_thd=prob_thd)\n\n\ndef find_max_triples_from_upper_triangle_product(upper_product, top_n=5,\n prob_thd=None):\n \"\"\" Find a list of (k1, k2) where k1 < k2 with the maximum values of p1[k1] * p2[k2]\n Args:\n upper_product (torch.Tensor or np.ndarray): (N, L, L), the lower part becomes zeros, end_idx > start_idx\n top_n (int): return topN pairs with highest values\n prob_thd (float or None):\n Returns:\n batched_sorted_triple: N * [(st_idx, ed_idx, confidence), ...]\n \"\"\"\n batched_sorted_triple = []\n for idx, e in enumerate(upper_product):\n sorted_triple = top_n_array_2d(e, top_n=top_n)\n if prob_thd is not None:\n sorted_triple = sorted_triple[sorted_triple[2] >= prob_thd]\n batched_sorted_triple.append(sorted_triple)\n return batched_sorted_triple\n\n\ndef top_n_array_2d(array_2d, top_n):\n \"\"\" Get topN indices and values of a 2d array, return a tuple of indices and their values,\n ranked by the value\n \"\"\"\n row_indices, column_indices = np.unravel_index(np.argsort(array_2d,\n axis=None), array_2d.shape)\n row_indices = row_indices[::-1][:top_n]\n column_indices = column_indices[::-1][:top_n]\n sorted_values = array_2d[row_indices, column_indices]\n return np.stack([row_indices, column_indices, sorted_values], axis=1)\n",
"step-5": "import numpy as np\nimport torch\n\n\ndef pad_sequences_1d(sequences, dtype=torch.long, device=torch.device(\"cpu\"), fixed_length=None):\n \"\"\" Pad a single-nested list or a sequence of n-d array (torch.tensor or np.ndarray)\n into a (n+1)-d array, only allow the first dim has variable lengths.\n Args:\n sequences: list(n-d tensor or list)\n dtype: np.dtype or torch.dtype\n device:\n fixed_length: pad all seq in sequences to fixed length. All seq should have a length <= fixed_length.\n return will be of shape [len(sequences), fixed_length, ...]\n Returns:\n padded_seqs: ((n+1)-d tensor) padded with zeros\n mask: (2d tensor) of the same shape as the first two dims of padded_seqs,\n 1 indicate valid, 0 otherwise\n Examples:\n >>> test_data_list = [[1,2,3], [1,2], [3,4,7,9]]\n >>> pad_sequences_1d(test_data_list, dtype=torch.long)\n >>> test_data_3d = [torch.randn(2,3,4), torch.randn(4,3,4), torch.randn(1,3,4)]\n >>> pad_sequences_1d(test_data_3d, dtype=torch.float)\n >>> test_data_list = [[1,2,3], [1,2], [3,4,7,9]]\n >>> pad_sequences_1d(test_data_list, dtype=np.float32)\n >>> test_data_3d = [np.random.randn(2,3,4), np.random.randn(4,3,4), np.random.randn(1,3,4)]\n >>> pad_sequences_1d(test_data_3d, dtype=np.float32)\n \"\"\"\n if isinstance(sequences[0], list):\n if \"torch\" in str(dtype):\n sequences = [torch.tensor(s, dtype=dtype, device=device) for s in sequences]\n else:\n sequences = [np.asarray(s, dtype=dtype) for s in sequences]\n\n extra_dims = sequences[0].shape[1:] # the extra dims should be the same for all elements\n lengths = [len(seq) for seq in sequences]\n if fixed_length is not None:\n max_length = fixed_length\n else:\n max_length = max(lengths)\n if isinstance(sequences[0], torch.Tensor):\n assert \"torch\" in str(dtype), \"dtype and input type does not match\"\n padded_seqs = torch.zeros((len(sequences), max_length) + extra_dims, dtype=dtype, device=device)\n mask = torch.zeros((len(sequences), max_length), dtype=torch.float32, device=device)\n else: # np\n assert \"numpy\" in str(dtype), \"dtype and input type does not match\"\n padded_seqs = np.zeros((len(sequences), max_length) + extra_dims, dtype=dtype)\n mask = np.zeros((len(sequences), max_length), dtype=np.float32)\n\n for idx, seq in enumerate(sequences):\n end = lengths[idx]\n padded_seqs[idx, :end] = seq\n mask[idx, :end] = 1\n return padded_seqs, mask # , lengths\n\n\ndef pad_sequences_2d(sequences, dtype=torch.long):\n \"\"\" Pad a double-nested list or a sequence of n-d torch tensor into a (n+1)-d tensor,\n only allow the first two dims has variable lengths\n Args:\n sequences: list(n-d tensor or list)\n dtype: torch.long for word indices / torch.float (float32) for other cases\n Returns:\n Examples:\n >>> test_data_list = [[[1, 3, 5], [3, 7, 4, 1]], [[98, 34, 11, 89, 90], [22], [34, 56]],]\n >>> pad_sequences_2d(test_data_list, dtype=torch.long) # torch.Size([2, 3, 5])\n >>> test_data_3d = [torch.randn(2,2,4), torch.randn(4,3,4), torch.randn(1,5,4)]\n >>> pad_sequences_2d(test_data_3d, dtype=torch.float) # torch.Size([2, 3, 5])\n >>> test_data_3d2 = [[torch.randn(2,4), ], [torch.randn(3,4), torch.randn(5,4)]]\n >>> pad_sequences_2d(test_data_3d2, dtype=torch.float) # torch.Size([2, 3, 5])\n # TODO add support for numpy array\n \"\"\"\n bsz = len(sequences)\n para_lengths = [len(seq) for seq in sequences]\n max_para_len = max(para_lengths)\n sen_lengths = [[len(word_seq) for word_seq in seq] for seq in sequences]\n max_sen_len = max([max(e) for e in sen_lengths])\n\n if isinstance(sequences[0], torch.Tensor):\n extra_dims = sequences[0].shape[2:]\n elif isinstance(sequences[0][0], torch.Tensor):\n extra_dims = sequences[0][0].shape[1:]\n else:\n sequences = [[torch.Tensor(word_seq, dtype=dtype) for word_seq in seq] for seq in sequences]\n extra_dims = ()\n\n padded_seqs = torch.zeros((bsz, max_para_len, max_sen_len) + extra_dims, dtype=dtype)\n mask = torch.zeros(bsz, max_para_len, max_sen_len).float()\n\n for b_i in range(bsz):\n for sen_i, sen_l in enumerate(sen_lengths[b_i]):\n padded_seqs[b_i, sen_i, :sen_l] = sequences[b_i][sen_i]\n mask[b_i, sen_i, :sen_l] = 1\n return padded_seqs, mask # , sen_lengths\n\n\ndef find_max_triples(st_prob, ed_prob, top_n=5, prob_thd=None, tensor_type=\"torch\"):\n \"\"\" Find a list of (k1, k2) where k1 < k2 with the maximum values of st_prob[k1] * ed_prob[k2]\n Args:\n st_prob (torch.Tensor or np.ndarray): (N, L) batched start_idx probabilities\n ed_prob (torch.Tensor or np.ndarray): (N, L) batched end_idx probabilities\n top_n (int): return topN pairs with highest values\n prob_thd (float):\n tensor_type: str, np or torch\n Returns:\n batched_sorted_triple: N * [(st_idx, ed_idx, confidence), ...]\n \"\"\"\n if tensor_type == \"torch\":\n st_prob, ed_prob = st_prob.data.numpy(), ed_prob.data.numpy()\n product = np.einsum(\"bm,bn->bmn\", st_prob, ed_prob)\n # (N, L, L) the lower part becomes zeros, start_idx < ed_idx\n upper_product = np.triu(product, k=1)\n return find_max_triples_from_upper_triangle_product(upper_product, top_n=top_n, prob_thd=prob_thd)\n\n\ndef find_max_triples_from_upper_triangle_product(upper_product, top_n=5, prob_thd=None):\n \"\"\" Find a list of (k1, k2) where k1 < k2 with the maximum values of p1[k1] * p2[k2]\n Args:\n upper_product (torch.Tensor or np.ndarray): (N, L, L), the lower part becomes zeros, end_idx > start_idx\n top_n (int): return topN pairs with highest values\n prob_thd (float or None):\n Returns:\n batched_sorted_triple: N * [(st_idx, ed_idx, confidence), ...]\n \"\"\"\n batched_sorted_triple = []\n for idx, e in enumerate(upper_product):\n sorted_triple = top_n_array_2d(e, top_n=top_n)\n if prob_thd is not None:\n sorted_triple = sorted_triple[sorted_triple[2] >= prob_thd]\n batched_sorted_triple.append(sorted_triple)\n return batched_sorted_triple\n\n\ndef top_n_array_2d(array_2d, top_n):\n \"\"\" Get topN indices and values of a 2d array, return a tuple of indices and their values,\n ranked by the value\n \"\"\"\n row_indices, column_indices = np.unravel_index(np.argsort(array_2d, axis=None), array_2d.shape)\n row_indices = row_indices[::-1][:top_n]\n column_indices = column_indices[::-1][:top_n]\n sorted_values = array_2d[row_indices, column_indices]\n return np.stack([row_indices, column_indices, sorted_values], axis=1) # (N, 3)\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
# i have created this file-hitu
from django.http import HttpResponse
from django.shortcuts import render
from .forms import Sign_Up, Login
from .models import Student
# render is used to create and impot the templates
# render takes first arg = request, 2nd arg = name of the file you want to import, 3rd arg = parameters or variable name
def index(request):
return render(request, 'index.html')
def get_name(request):
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = Sign_Up(request.POST)
# check whether it's valid:
if form.is_valid():
firstName = form.cleaned_data['first_name']
lastName = form.cleaned_data['last_name']
email = form.cleaned_data['email']
password = form.cleaned_data['password']
details = Student(first_name=firstName, last_name=lastName, email=email,
password=password) # these are models variable in red
# process the data in form.cleaned_data as required
details.save() # this is used to save all the details
# ...
# redirect to a new URL:
return render(request, 'login/new_index.html', {'form': form})
# if a GET (or any other method) we'll create a blank form
else:
form = Sign_Up()
return render(request, 'login/new_index.html', {'form': form})
def login_name(request):
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = Login(request.POST)
# check whether it's valid:
if form.is_valid():
email = form.cleaned_data['email']
password = form.cleaned_data['password']
return render(request, 'login/new_index.html', {'form': form})
# if a GET (or any other method) we'll create a blank form
else:
form = Login()
return render(request, 'login/new_index.html', {'form': form})
|
normal
|
{
"blob_id": "cbbb314a3262713f6cb2bb2dd90709d7bf1ca8eb",
"index": 6095,
"step-1": "<mask token>\n\n\ndef login_name(request):\n if request.method == 'POST':\n form = Login(request.POST)\n if form.is_valid():\n email = form.cleaned_data['email']\n password = form.cleaned_data['password']\n return render(request, 'login/new_index.html', {'form': form})\n else:\n form = Login()\n return render(request, 'login/new_index.html', {'form': form})\n",
"step-2": "<mask token>\n\n\ndef index(request):\n return render(request, 'index.html')\n\n\n<mask token>\n\n\ndef login_name(request):\n if request.method == 'POST':\n form = Login(request.POST)\n if form.is_valid():\n email = form.cleaned_data['email']\n password = form.cleaned_data['password']\n return render(request, 'login/new_index.html', {'form': form})\n else:\n form = Login()\n return render(request, 'login/new_index.html', {'form': form})\n",
"step-3": "<mask token>\n\n\ndef index(request):\n return render(request, 'index.html')\n\n\ndef get_name(request):\n if request.method == 'POST':\n form = Sign_Up(request.POST)\n if form.is_valid():\n firstName = form.cleaned_data['first_name']\n lastName = form.cleaned_data['last_name']\n email = form.cleaned_data['email']\n password = form.cleaned_data['password']\n details = Student(first_name=firstName, last_name=lastName,\n email=email, password=password)\n details.save()\n return render(request, 'login/new_index.html', {'form': form})\n else:\n form = Sign_Up()\n return render(request, 'login/new_index.html', {'form': form})\n\n\ndef login_name(request):\n if request.method == 'POST':\n form = Login(request.POST)\n if form.is_valid():\n email = form.cleaned_data['email']\n password = form.cleaned_data['password']\n return render(request, 'login/new_index.html', {'form': form})\n else:\n form = Login()\n return render(request, 'login/new_index.html', {'form': form})\n",
"step-4": "from django.http import HttpResponse\nfrom django.shortcuts import render\nfrom .forms import Sign_Up, Login\nfrom .models import Student\n\n\ndef index(request):\n return render(request, 'index.html')\n\n\ndef get_name(request):\n if request.method == 'POST':\n form = Sign_Up(request.POST)\n if form.is_valid():\n firstName = form.cleaned_data['first_name']\n lastName = form.cleaned_data['last_name']\n email = form.cleaned_data['email']\n password = form.cleaned_data['password']\n details = Student(first_name=firstName, last_name=lastName,\n email=email, password=password)\n details.save()\n return render(request, 'login/new_index.html', {'form': form})\n else:\n form = Sign_Up()\n return render(request, 'login/new_index.html', {'form': form})\n\n\ndef login_name(request):\n if request.method == 'POST':\n form = Login(request.POST)\n if form.is_valid():\n email = form.cleaned_data['email']\n password = form.cleaned_data['password']\n return render(request, 'login/new_index.html', {'form': form})\n else:\n form = Login()\n return render(request, 'login/new_index.html', {'form': form})\n",
"step-5": "# i have created this file-hitu\nfrom django.http import HttpResponse\nfrom django.shortcuts import render\nfrom .forms import Sign_Up, Login\nfrom .models import Student\n\n\n# render is used to create and impot the templates\n# render takes first arg = request, 2nd arg = name of the file you want to import, 3rd arg = parameters or variable name\ndef index(request):\n return render(request, 'index.html')\n\n\ndef get_name(request):\n # if this is a POST request we need to process the form data\n if request.method == 'POST':\n # create a form instance and populate it with data from the request:\n form = Sign_Up(request.POST)\n # check whether it's valid:\n if form.is_valid():\n firstName = form.cleaned_data['first_name']\n lastName = form.cleaned_data['last_name']\n email = form.cleaned_data['email']\n password = form.cleaned_data['password']\n details = Student(first_name=firstName, last_name=lastName, email=email,\n password=password) # these are models variable in red\n # process the data in form.cleaned_data as required\n details.save() # this is used to save all the details\n # ...\n # redirect to a new URL:\n return render(request, 'login/new_index.html', {'form': form})\n\n # if a GET (or any other method) we'll create a blank form\n else:\n form = Sign_Up()\n\n return render(request, 'login/new_index.html', {'form': form})\n\n\ndef login_name(request):\n # if this is a POST request we need to process the form data\n if request.method == 'POST':\n # create a form instance and populate it with data from the request:\n form = Login(request.POST)\n # check whether it's valid:\n if form.is_valid():\n email = form.cleaned_data['email']\n password = form.cleaned_data['password']\n\n return render(request, 'login/new_index.html', {'form': form})\n\n # if a GET (or any other method) we'll create a blank form\n else:\n form = Login()\n\n return render(request, 'login/new_index.html', {'form': form})\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def hexdigest_sha256(*args):
r = hashlib.sha256()
for arg in args:
r.update(str(arg).encode('utf-8'))
return r.hexdigest()
<|reserved_special_token_0|>
def notify_by_email(template, data, subject, sender, dests, message_id, ref
=None):
if hasattr(settings, 'REPLY_EMAIL') and hasattr(settings, 'REPLY_KEY'):
data.update({'answering': True})
text_message = render_to_string('conversations/emails/%s.txt' %
template, data)
html_message = render_to_string('conversations/emails/%s.html' %
template, data)
from_email = '{name} <{email}>'.format(name=sender.get_full_name() or
sender.username, email=settings.DEFAULT_FROM_EMAIL)
headers = {'Message-ID': '<%s.%s>' % (message_id, settings.
DEFAULT_FROM_EMAIL)}
if ref:
headers.update({'References': '<%s.%s>' % (ref, settings.
DEFAULT_FROM_EMAIL)})
mails = []
for dest in dests:
if not dest.email:
continue
reply_to = get_reply_addr(message_id, dest)
mails += [(subject, (text_message, html_message), from_email, [dest
.email], reply_to, headers)]
messages = []
for subject, message, from_email, dest_emails, reply_to, headers in mails:
text_message, html_message = message
msg = EmailMultiAlternatives(subject, text_message, from_email,
dest_emails, reply_to=reply_to, headers=headers)
msg.attach_alternative(html_message, 'text/html')
messages += [msg]
with mail.get_connection() as connection:
connection.send_messages(messages)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def hexdigest_sha256(*args):
r = hashlib.sha256()
for arg in args:
r.update(str(arg).encode('utf-8'))
return r.hexdigest()
<|reserved_special_token_0|>
def generate_message_token():
return get_random_string(length=60, allowed_chars=
'abcdefghijklmnopqrstuvwxyz0123456789')
def notify_by_email(template, data, subject, sender, dests, message_id, ref
=None):
if hasattr(settings, 'REPLY_EMAIL') and hasattr(settings, 'REPLY_KEY'):
data.update({'answering': True})
text_message = render_to_string('conversations/emails/%s.txt' %
template, data)
html_message = render_to_string('conversations/emails/%s.html' %
template, data)
from_email = '{name} <{email}>'.format(name=sender.get_full_name() or
sender.username, email=settings.DEFAULT_FROM_EMAIL)
headers = {'Message-ID': '<%s.%s>' % (message_id, settings.
DEFAULT_FROM_EMAIL)}
if ref:
headers.update({'References': '<%s.%s>' % (ref, settings.
DEFAULT_FROM_EMAIL)})
mails = []
for dest in dests:
if not dest.email:
continue
reply_to = get_reply_addr(message_id, dest)
mails += [(subject, (text_message, html_message), from_email, [dest
.email], reply_to, headers)]
messages = []
for subject, message, from_email, dest_emails, reply_to, headers in mails:
text_message, html_message = message
msg = EmailMultiAlternatives(subject, text_message, from_email,
dest_emails, reply_to=reply_to, headers=headers)
msg.attach_alternative(html_message, 'text/html')
messages += [msg]
with mail.get_connection() as connection:
connection.send_messages(messages)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def hexdigest_sha256(*args):
r = hashlib.sha256()
for arg in args:
r.update(str(arg).encode('utf-8'))
return r.hexdigest()
def get_reply_addr(message_id, dest):
if not hasattr(settings, 'REPLY_EMAIL'):
return []
addr = settings.REPLY_EMAIL
pos = addr.find('@')
name = addr[:pos]
domain = addr[pos:]
key = hexdigest_sha256(settings.SECRET_KEY, message_id, dest.pk)[0:12]
return ['%s+%s%s%s%s' % (name, dest.profile.email_token, message_id,
key, domain)]
def generate_message_token():
return get_random_string(length=60, allowed_chars=
'abcdefghijklmnopqrstuvwxyz0123456789')
def notify_by_email(template, data, subject, sender, dests, message_id, ref
=None):
if hasattr(settings, 'REPLY_EMAIL') and hasattr(settings, 'REPLY_KEY'):
data.update({'answering': True})
text_message = render_to_string('conversations/emails/%s.txt' %
template, data)
html_message = render_to_string('conversations/emails/%s.html' %
template, data)
from_email = '{name} <{email}>'.format(name=sender.get_full_name() or
sender.username, email=settings.DEFAULT_FROM_EMAIL)
headers = {'Message-ID': '<%s.%s>' % (message_id, settings.
DEFAULT_FROM_EMAIL)}
if ref:
headers.update({'References': '<%s.%s>' % (ref, settings.
DEFAULT_FROM_EMAIL)})
mails = []
for dest in dests:
if not dest.email:
continue
reply_to = get_reply_addr(message_id, dest)
mails += [(subject, (text_message, html_message), from_email, [dest
.email], reply_to, headers)]
messages = []
for subject, message, from_email, dest_emails, reply_to, headers in mails:
text_message, html_message = message
msg = EmailMultiAlternatives(subject, text_message, from_email,
dest_emails, reply_to=reply_to, headers=headers)
msg.attach_alternative(html_message, 'text/html')
messages += [msg]
with mail.get_connection() as connection:
connection.send_messages(messages)
<|reserved_special_token_1|>
import hashlib
from django.conf import settings
from django.core import mail
from django.core.mail import EmailMultiAlternatives
from django.template.loader import render_to_string
from django.utils.crypto import get_random_string
def hexdigest_sha256(*args):
r = hashlib.sha256()
for arg in args:
r.update(str(arg).encode('utf-8'))
return r.hexdigest()
def get_reply_addr(message_id, dest):
if not hasattr(settings, 'REPLY_EMAIL'):
return []
addr = settings.REPLY_EMAIL
pos = addr.find('@')
name = addr[:pos]
domain = addr[pos:]
key = hexdigest_sha256(settings.SECRET_KEY, message_id, dest.pk)[0:12]
return ['%s+%s%s%s%s' % (name, dest.profile.email_token, message_id,
key, domain)]
def generate_message_token():
return get_random_string(length=60, allowed_chars=
'abcdefghijklmnopqrstuvwxyz0123456789')
def notify_by_email(template, data, subject, sender, dests, message_id, ref
=None):
if hasattr(settings, 'REPLY_EMAIL') and hasattr(settings, 'REPLY_KEY'):
data.update({'answering': True})
text_message = render_to_string('conversations/emails/%s.txt' %
template, data)
html_message = render_to_string('conversations/emails/%s.html' %
template, data)
from_email = '{name} <{email}>'.format(name=sender.get_full_name() or
sender.username, email=settings.DEFAULT_FROM_EMAIL)
headers = {'Message-ID': '<%s.%s>' % (message_id, settings.
DEFAULT_FROM_EMAIL)}
if ref:
headers.update({'References': '<%s.%s>' % (ref, settings.
DEFAULT_FROM_EMAIL)})
mails = []
for dest in dests:
if not dest.email:
continue
reply_to = get_reply_addr(message_id, dest)
mails += [(subject, (text_message, html_message), from_email, [dest
.email], reply_to, headers)]
messages = []
for subject, message, from_email, dest_emails, reply_to, headers in mails:
text_message, html_message = message
msg = EmailMultiAlternatives(subject, text_message, from_email,
dest_emails, reply_to=reply_to, headers=headers)
msg.attach_alternative(html_message, 'text/html')
messages += [msg]
with mail.get_connection() as connection:
connection.send_messages(messages)
<|reserved_special_token_1|>
import hashlib
from django.conf import settings
from django.core import mail
from django.core.mail import EmailMultiAlternatives
from django.template.loader import render_to_string
from django.utils.crypto import get_random_string
def hexdigest_sha256(*args):
r = hashlib.sha256()
for arg in args:
r.update(str(arg).encode('utf-8'))
return r.hexdigest()
def get_reply_addr(message_id, dest):
if not hasattr(settings, 'REPLY_EMAIL'):
return []
addr = settings.REPLY_EMAIL
pos = addr.find('@')
name = addr[:pos]
domain = addr[pos:]
key = hexdigest_sha256(settings.SECRET_KEY, message_id, dest.pk)[0:12]
return ['%s+%s%s%s%s' % (name, dest.profile.email_token, message_id, key, domain)]
def generate_message_token():
return get_random_string(length=60, allowed_chars='abcdefghijklmnopqrstuvwxyz0123456789')
def notify_by_email(template, data, subject, sender, dests, message_id, ref=None):
if hasattr(settings, 'REPLY_EMAIL') and hasattr(settings, 'REPLY_KEY'):
data.update({'answering': True})
text_message = render_to_string('conversations/emails/%s.txt' % template, data)
html_message = render_to_string('conversations/emails/%s.html' % template, data)
from_email = '{name} <{email}>'.format(
name=sender.get_full_name() or sender.username,
email=settings.DEFAULT_FROM_EMAIL)
# Generating headers
headers = {'Message-ID': "<%s.%s>" % (message_id, settings.DEFAULT_FROM_EMAIL)}
if ref:
# This email reference a previous one
headers.update({
'References': '<%s.%s>' % (ref, settings.DEFAULT_FROM_EMAIL),
})
mails = []
for dest in dests:
if not dest.email:
continue
reply_to = get_reply_addr(message_id, dest)
mails += [(subject, (text_message, html_message), from_email, [dest.email], reply_to, headers)]
messages = []
for subject, message, from_email, dest_emails, reply_to, headers in mails:
text_message, html_message = message
msg = EmailMultiAlternatives(subject, text_message, from_email, dest_emails, reply_to=reply_to,
headers=headers)
msg.attach_alternative(html_message, 'text/html')
messages += [msg]
with mail.get_connection() as connection:
connection.send_messages(messages)
|
flexible
|
{
"blob_id": "a35004e2b306ba1a8649ce66a1612f63a2b6bf39",
"index": 2673,
"step-1": "<mask token>\n\n\ndef hexdigest_sha256(*args):\n r = hashlib.sha256()\n for arg in args:\n r.update(str(arg).encode('utf-8'))\n return r.hexdigest()\n\n\n<mask token>\n\n\ndef notify_by_email(template, data, subject, sender, dests, message_id, ref\n =None):\n if hasattr(settings, 'REPLY_EMAIL') and hasattr(settings, 'REPLY_KEY'):\n data.update({'answering': True})\n text_message = render_to_string('conversations/emails/%s.txt' %\n template, data)\n html_message = render_to_string('conversations/emails/%s.html' %\n template, data)\n from_email = '{name} <{email}>'.format(name=sender.get_full_name() or\n sender.username, email=settings.DEFAULT_FROM_EMAIL)\n headers = {'Message-ID': '<%s.%s>' % (message_id, settings.\n DEFAULT_FROM_EMAIL)}\n if ref:\n headers.update({'References': '<%s.%s>' % (ref, settings.\n DEFAULT_FROM_EMAIL)})\n mails = []\n for dest in dests:\n if not dest.email:\n continue\n reply_to = get_reply_addr(message_id, dest)\n mails += [(subject, (text_message, html_message), from_email, [dest\n .email], reply_to, headers)]\n messages = []\n for subject, message, from_email, dest_emails, reply_to, headers in mails:\n text_message, html_message = message\n msg = EmailMultiAlternatives(subject, text_message, from_email,\n dest_emails, reply_to=reply_to, headers=headers)\n msg.attach_alternative(html_message, 'text/html')\n messages += [msg]\n with mail.get_connection() as connection:\n connection.send_messages(messages)\n",
"step-2": "<mask token>\n\n\ndef hexdigest_sha256(*args):\n r = hashlib.sha256()\n for arg in args:\n r.update(str(arg).encode('utf-8'))\n return r.hexdigest()\n\n\n<mask token>\n\n\ndef generate_message_token():\n return get_random_string(length=60, allowed_chars=\n 'abcdefghijklmnopqrstuvwxyz0123456789')\n\n\ndef notify_by_email(template, data, subject, sender, dests, message_id, ref\n =None):\n if hasattr(settings, 'REPLY_EMAIL') and hasattr(settings, 'REPLY_KEY'):\n data.update({'answering': True})\n text_message = render_to_string('conversations/emails/%s.txt' %\n template, data)\n html_message = render_to_string('conversations/emails/%s.html' %\n template, data)\n from_email = '{name} <{email}>'.format(name=sender.get_full_name() or\n sender.username, email=settings.DEFAULT_FROM_EMAIL)\n headers = {'Message-ID': '<%s.%s>' % (message_id, settings.\n DEFAULT_FROM_EMAIL)}\n if ref:\n headers.update({'References': '<%s.%s>' % (ref, settings.\n DEFAULT_FROM_EMAIL)})\n mails = []\n for dest in dests:\n if not dest.email:\n continue\n reply_to = get_reply_addr(message_id, dest)\n mails += [(subject, (text_message, html_message), from_email, [dest\n .email], reply_to, headers)]\n messages = []\n for subject, message, from_email, dest_emails, reply_to, headers in mails:\n text_message, html_message = message\n msg = EmailMultiAlternatives(subject, text_message, from_email,\n dest_emails, reply_to=reply_to, headers=headers)\n msg.attach_alternative(html_message, 'text/html')\n messages += [msg]\n with mail.get_connection() as connection:\n connection.send_messages(messages)\n",
"step-3": "<mask token>\n\n\ndef hexdigest_sha256(*args):\n r = hashlib.sha256()\n for arg in args:\n r.update(str(arg).encode('utf-8'))\n return r.hexdigest()\n\n\ndef get_reply_addr(message_id, dest):\n if not hasattr(settings, 'REPLY_EMAIL'):\n return []\n addr = settings.REPLY_EMAIL\n pos = addr.find('@')\n name = addr[:pos]\n domain = addr[pos:]\n key = hexdigest_sha256(settings.SECRET_KEY, message_id, dest.pk)[0:12]\n return ['%s+%s%s%s%s' % (name, dest.profile.email_token, message_id,\n key, domain)]\n\n\ndef generate_message_token():\n return get_random_string(length=60, allowed_chars=\n 'abcdefghijklmnopqrstuvwxyz0123456789')\n\n\ndef notify_by_email(template, data, subject, sender, dests, message_id, ref\n =None):\n if hasattr(settings, 'REPLY_EMAIL') and hasattr(settings, 'REPLY_KEY'):\n data.update({'answering': True})\n text_message = render_to_string('conversations/emails/%s.txt' %\n template, data)\n html_message = render_to_string('conversations/emails/%s.html' %\n template, data)\n from_email = '{name} <{email}>'.format(name=sender.get_full_name() or\n sender.username, email=settings.DEFAULT_FROM_EMAIL)\n headers = {'Message-ID': '<%s.%s>' % (message_id, settings.\n DEFAULT_FROM_EMAIL)}\n if ref:\n headers.update({'References': '<%s.%s>' % (ref, settings.\n DEFAULT_FROM_EMAIL)})\n mails = []\n for dest in dests:\n if not dest.email:\n continue\n reply_to = get_reply_addr(message_id, dest)\n mails += [(subject, (text_message, html_message), from_email, [dest\n .email], reply_to, headers)]\n messages = []\n for subject, message, from_email, dest_emails, reply_to, headers in mails:\n text_message, html_message = message\n msg = EmailMultiAlternatives(subject, text_message, from_email,\n dest_emails, reply_to=reply_to, headers=headers)\n msg.attach_alternative(html_message, 'text/html')\n messages += [msg]\n with mail.get_connection() as connection:\n connection.send_messages(messages)\n",
"step-4": "import hashlib\nfrom django.conf import settings\nfrom django.core import mail\nfrom django.core.mail import EmailMultiAlternatives\nfrom django.template.loader import render_to_string\nfrom django.utils.crypto import get_random_string\n\n\ndef hexdigest_sha256(*args):\n r = hashlib.sha256()\n for arg in args:\n r.update(str(arg).encode('utf-8'))\n return r.hexdigest()\n\n\ndef get_reply_addr(message_id, dest):\n if not hasattr(settings, 'REPLY_EMAIL'):\n return []\n addr = settings.REPLY_EMAIL\n pos = addr.find('@')\n name = addr[:pos]\n domain = addr[pos:]\n key = hexdigest_sha256(settings.SECRET_KEY, message_id, dest.pk)[0:12]\n return ['%s+%s%s%s%s' % (name, dest.profile.email_token, message_id,\n key, domain)]\n\n\ndef generate_message_token():\n return get_random_string(length=60, allowed_chars=\n 'abcdefghijklmnopqrstuvwxyz0123456789')\n\n\ndef notify_by_email(template, data, subject, sender, dests, message_id, ref\n =None):\n if hasattr(settings, 'REPLY_EMAIL') and hasattr(settings, 'REPLY_KEY'):\n data.update({'answering': True})\n text_message = render_to_string('conversations/emails/%s.txt' %\n template, data)\n html_message = render_to_string('conversations/emails/%s.html' %\n template, data)\n from_email = '{name} <{email}>'.format(name=sender.get_full_name() or\n sender.username, email=settings.DEFAULT_FROM_EMAIL)\n headers = {'Message-ID': '<%s.%s>' % (message_id, settings.\n DEFAULT_FROM_EMAIL)}\n if ref:\n headers.update({'References': '<%s.%s>' % (ref, settings.\n DEFAULT_FROM_EMAIL)})\n mails = []\n for dest in dests:\n if not dest.email:\n continue\n reply_to = get_reply_addr(message_id, dest)\n mails += [(subject, (text_message, html_message), from_email, [dest\n .email], reply_to, headers)]\n messages = []\n for subject, message, from_email, dest_emails, reply_to, headers in mails:\n text_message, html_message = message\n msg = EmailMultiAlternatives(subject, text_message, from_email,\n dest_emails, reply_to=reply_to, headers=headers)\n msg.attach_alternative(html_message, 'text/html')\n messages += [msg]\n with mail.get_connection() as connection:\n connection.send_messages(messages)\n",
"step-5": "import hashlib\n\nfrom django.conf import settings\nfrom django.core import mail\nfrom django.core.mail import EmailMultiAlternatives\nfrom django.template.loader import render_to_string\nfrom django.utils.crypto import get_random_string\n\n\ndef hexdigest_sha256(*args):\n\n r = hashlib.sha256()\n for arg in args:\n r.update(str(arg).encode('utf-8'))\n\n return r.hexdigest()\n\n\ndef get_reply_addr(message_id, dest):\n\n if not hasattr(settings, 'REPLY_EMAIL'):\n return []\n\n addr = settings.REPLY_EMAIL\n pos = addr.find('@')\n name = addr[:pos]\n domain = addr[pos:]\n key = hexdigest_sha256(settings.SECRET_KEY, message_id, dest.pk)[0:12]\n\n return ['%s+%s%s%s%s' % (name, dest.profile.email_token, message_id, key, domain)]\n\n\ndef generate_message_token():\n return get_random_string(length=60, allowed_chars='abcdefghijklmnopqrstuvwxyz0123456789')\n\n\ndef notify_by_email(template, data, subject, sender, dests, message_id, ref=None):\n\n if hasattr(settings, 'REPLY_EMAIL') and hasattr(settings, 'REPLY_KEY'):\n data.update({'answering': True})\n\n text_message = render_to_string('conversations/emails/%s.txt' % template, data)\n html_message = render_to_string('conversations/emails/%s.html' % template, data)\n\n from_email = '{name} <{email}>'.format(\n name=sender.get_full_name() or sender.username,\n email=settings.DEFAULT_FROM_EMAIL)\n\n # Generating headers\n headers = {'Message-ID': \"<%s.%s>\" % (message_id, settings.DEFAULT_FROM_EMAIL)}\n if ref:\n # This email reference a previous one\n headers.update({\n 'References': '<%s.%s>' % (ref, settings.DEFAULT_FROM_EMAIL),\n })\n\n mails = []\n for dest in dests:\n if not dest.email:\n continue\n\n reply_to = get_reply_addr(message_id, dest)\n\n mails += [(subject, (text_message, html_message), from_email, [dest.email], reply_to, headers)]\n\n messages = []\n for subject, message, from_email, dest_emails, reply_to, headers in mails:\n text_message, html_message = message\n msg = EmailMultiAlternatives(subject, text_message, from_email, dest_emails, reply_to=reply_to,\n headers=headers)\n msg.attach_alternative(html_message, 'text/html')\n messages += [msg]\n with mail.get_connection() as connection:\n connection.send_messages(messages)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import logging.config
import os
import sys
import yaml
sys.path.append(os.path.join(os.path.abspath('.'), '..', '..'))
def setup_logging(default_path='common/config/logging.yaml'):
path = default_path
if os.path.exists(path):
with open(path, 'rt') as f:
config = yaml.safe_load(f.read())
logging.config.dictConfig(config)
else:
logging.basicConfig(level=default_level)
|
normal
|
{
"blob_id": "6657f0b51bc021e6b5867bbdd1a520c2b0cb92b3",
"index": 2367,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef setup_logging(default_path='common/config/logging.yaml'):\n path = default_path\n if os.path.exists(path):\n with open(path, 'rt') as f:\n config = yaml.safe_load(f.read())\n logging.config.dictConfig(config)\n else:\n logging.basicConfig(level=default_level)\n",
"step-3": "<mask token>\nsys.path.append(os.path.join(os.path.abspath('.'), '..', '..'))\n\n\ndef setup_logging(default_path='common/config/logging.yaml'):\n path = default_path\n if os.path.exists(path):\n with open(path, 'rt') as f:\n config = yaml.safe_load(f.read())\n logging.config.dictConfig(config)\n else:\n logging.basicConfig(level=default_level)\n",
"step-4": "import logging.config\nimport os\nimport sys\nimport yaml\nsys.path.append(os.path.join(os.path.abspath('.'), '..', '..'))\n\n\ndef setup_logging(default_path='common/config/logging.yaml'):\n path = default_path\n if os.path.exists(path):\n with open(path, 'rt') as f:\n config = yaml.safe_load(f.read())\n logging.config.dictConfig(config)\n else:\n logging.basicConfig(level=default_level)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
pwm1.freq(60)
pwm1.duty(0)
<|reserved_special_token_0|>
for i in range(10):
while step < 1000:
pwm1.duty(step)
time.sleep_ms(500)
step += 100
while step > 0:
pwm1.duty(step)
time.sleep_ms(500)
step -= 200
pwm1.deinit()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
PinNum = 5
pwm1 = PWM(Pin(PinNum))
pwm1.freq(60)
pwm1.duty(0)
step = 100
for i in range(10):
while step < 1000:
pwm1.duty(step)
time.sleep_ms(500)
step += 100
while step > 0:
pwm1.duty(step)
time.sleep_ms(500)
step -= 200
pwm1.deinit()
<|reserved_special_token_1|>
from machine import Pin, PWM
import time
PinNum = 5
pwm1 = PWM(Pin(PinNum))
pwm1.freq(60)
pwm1.duty(0)
step = 100
for i in range(10):
while step < 1000:
pwm1.duty(step)
time.sleep_ms(500)
step += 100
while step > 0:
pwm1.duty(step)
time.sleep_ms(500)
step -= 200
pwm1.deinit()
<|reserved_special_token_1|>
from machine import Pin, PWM
import time
# externe LED zit op pin D1 (GPIO5)
PinNum = 5
# pwm initialisatie
pwm1 = PWM(Pin(PinNum))
pwm1.freq(60)
pwm1.duty(0)
step = 100
for i in range(10):
# oplichten
while step < 1000:
pwm1.duty(step)
time.sleep_ms(500)
step+=100
# uitdoven
while step > 0:
pwm1.duty(step)
time.sleep_ms(500)
step-=200
# pwm resetten
pwm1.deinit()
|
flexible
|
{
"blob_id": "9f31694d80f2dcc50a76b32aa296871694d3644d",
"index": 7838,
"step-1": "<mask token>\n",
"step-2": "<mask token>\npwm1.freq(60)\npwm1.duty(0)\n<mask token>\nfor i in range(10):\n while step < 1000:\n pwm1.duty(step)\n time.sleep_ms(500)\n step += 100\n while step > 0:\n pwm1.duty(step)\n time.sleep_ms(500)\n step -= 200\npwm1.deinit()\n",
"step-3": "<mask token>\nPinNum = 5\npwm1 = PWM(Pin(PinNum))\npwm1.freq(60)\npwm1.duty(0)\nstep = 100\nfor i in range(10):\n while step < 1000:\n pwm1.duty(step)\n time.sleep_ms(500)\n step += 100\n while step > 0:\n pwm1.duty(step)\n time.sleep_ms(500)\n step -= 200\npwm1.deinit()\n",
"step-4": "from machine import Pin, PWM\nimport time\nPinNum = 5\npwm1 = PWM(Pin(PinNum))\npwm1.freq(60)\npwm1.duty(0)\nstep = 100\nfor i in range(10):\n while step < 1000:\n pwm1.duty(step)\n time.sleep_ms(500)\n step += 100\n while step > 0:\n pwm1.duty(step)\n time.sleep_ms(500)\n step -= 200\npwm1.deinit()\n",
"step-5": "from machine import Pin, PWM\nimport time\n\n# externe LED zit op pin D1 (GPIO5)\nPinNum = 5\n\n# pwm initialisatie\npwm1 = PWM(Pin(PinNum))\npwm1.freq(60)\npwm1.duty(0)\n\nstep = 100\nfor i in range(10):\n # oplichten\n while step < 1000:\n pwm1.duty(step)\n time.sleep_ms(500)\n step+=100\n # uitdoven \n while step > 0:\n pwm1.duty(step)\n time.sleep_ms(500)\n step-=200\n\n# pwm resetten \npwm1.deinit()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class MyDaemon(DaemonBase):
<|reserved_special_token_0|>
def __init__(self, api_url, monitor_port, pidfile, stdin='/dev/null',
stdout='/dev/null', stderr='/dev/null'):
self.api_url = api_url
self.monitor_port = monitor_port
super().__init__(pidfile, stdin, stdout, stderr)
@staticmethod
def get_host_addrs(family):
for nic, snics in psutil.net_if_addrs().items():
for snic in snics:
if snic.family == family:
yield nic, snic.address
<|reserved_special_token_0|>
def tasks(self):
pnic_before = get_net_io_counters()
while 1:
time.sleep(60)
pnic_after = get_net_io_counters()
send_datas = {'type': 8, 'ip_addr': ''.join([n[1] for n in self
.get_host_addrs(socket.AF_INET) if n[0] == self.
monitor_port]), 'cpu_perf': get_cpu_percent(), 'mem_perf':
get_mem_usage(), 'disk_perf': get_disk_usage(),
'disk_speed': get_disk_speed(), 'net_perf':
get_network_traffic(pnic_before, pnic_after)}
self.do_post(send_datas)
pnic_before = get_net_io_counters()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MyDaemon(DaemonBase):
<|reserved_special_token_0|>
def __init__(self, api_url, monitor_port, pidfile, stdin='/dev/null',
stdout='/dev/null', stderr='/dev/null'):
self.api_url = api_url
self.monitor_port = monitor_port
super().__init__(pidfile, stdin, stdout, stderr)
@staticmethod
def get_host_addrs(family):
for nic, snics in psutil.net_if_addrs().items():
for snic in snics:
if snic.family == family:
yield nic, snic.address
<|reserved_special_token_0|>
def tasks(self):
pnic_before = get_net_io_counters()
while 1:
time.sleep(60)
pnic_after = get_net_io_counters()
send_datas = {'type': 8, 'ip_addr': ''.join([n[1] for n in self
.get_host_addrs(socket.AF_INET) if n[0] == self.
monitor_port]), 'cpu_perf': get_cpu_percent(), 'mem_perf':
get_mem_usage(), 'disk_perf': get_disk_usage(),
'disk_speed': get_disk_speed(), 'net_perf':
get_network_traffic(pnic_before, pnic_after)}
self.do_post(send_datas)
pnic_before = get_net_io_counters()
def run(self):
sys.stdout.write('Daemon started with pid %s\n' % os.getpid())
_p = Process(target=self.tasks, daemon=True)
_p.start()
p = psutil.Process(_p.pid)
while 1:
current_cpu = p.cpu_percent()
current_mem = p.memory_percent()
if p.is_running() and (current_mem > 1 or current_cpu > 1):
p.terminate()
p.wait()
with open('/tmp/test_daemon.log', 'a') as f:
f.write('CPU: %s - MEM: %s - at: %s\n' % (current_cpu,
current_mem, time.ctime()))
_p = Process(target=self.tasks, daemon=True)
_p.start()
sys.stdout.write('The subprocess restart pid %s\n' % _p.pid)
p = psutil.Process(_p.pid)
time.sleep(60)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MyDaemon(DaemonBase):
"""Real Daemon class"""
def __init__(self, api_url, monitor_port, pidfile, stdin='/dev/null',
stdout='/dev/null', stderr='/dev/null'):
self.api_url = api_url
self.monitor_port = monitor_port
super().__init__(pidfile, stdin, stdout, stderr)
@staticmethod
def get_host_addrs(family):
for nic, snics in psutil.net_if_addrs().items():
for snic in snics:
if snic.family == family:
yield nic, snic.address
def do_post(self, params):
data = json.dumps(params)
data = parse.urlencode({'data': data})
req = request.Request(self.api_url, data=data.encode('utf-8'))
try:
with request.urlopen(req, timeout=3) as resp:
return resp.status
except Exception as e:
with open('/tmp/test_daemon.err', 'a') as f:
print('%s at: %s' % (e, time.ctime()), file=f)
def tasks(self):
pnic_before = get_net_io_counters()
while 1:
time.sleep(60)
pnic_after = get_net_io_counters()
send_datas = {'type': 8, 'ip_addr': ''.join([n[1] for n in self
.get_host_addrs(socket.AF_INET) if n[0] == self.
monitor_port]), 'cpu_perf': get_cpu_percent(), 'mem_perf':
get_mem_usage(), 'disk_perf': get_disk_usage(),
'disk_speed': get_disk_speed(), 'net_perf':
get_network_traffic(pnic_before, pnic_after)}
self.do_post(send_datas)
pnic_before = get_net_io_counters()
def run(self):
sys.stdout.write('Daemon started with pid %s\n' % os.getpid())
_p = Process(target=self.tasks, daemon=True)
_p.start()
p = psutil.Process(_p.pid)
while 1:
current_cpu = p.cpu_percent()
current_mem = p.memory_percent()
if p.is_running() and (current_mem > 1 or current_cpu > 1):
p.terminate()
p.wait()
with open('/tmp/test_daemon.log', 'a') as f:
f.write('CPU: %s - MEM: %s - at: %s\n' % (current_cpu,
current_mem, time.ctime()))
_p = Process(target=self.tasks, daemon=True)
_p.start()
sys.stdout.write('The subprocess restart pid %s\n' % _p.pid)
p = psutil.Process(_p.pid)
time.sleep(60)
<|reserved_special_token_1|>
import os
import sys
import time
import json
import socket
from urllib import request, parse
from concurrent.futures import ThreadPoolExecutor
from multiprocessing import Process
import psutil
from daemon import DaemonBase
from host_performence import *
class MyDaemon(DaemonBase):
"""Real Daemon class"""
def __init__(self, api_url, monitor_port, pidfile, stdin='/dev/null',
stdout='/dev/null', stderr='/dev/null'):
self.api_url = api_url
self.monitor_port = monitor_port
super().__init__(pidfile, stdin, stdout, stderr)
@staticmethod
def get_host_addrs(family):
for nic, snics in psutil.net_if_addrs().items():
for snic in snics:
if snic.family == family:
yield nic, snic.address
def do_post(self, params):
data = json.dumps(params)
data = parse.urlencode({'data': data})
req = request.Request(self.api_url, data=data.encode('utf-8'))
try:
with request.urlopen(req, timeout=3) as resp:
return resp.status
except Exception as e:
with open('/tmp/test_daemon.err', 'a') as f:
print('%s at: %s' % (e, time.ctime()), file=f)
def tasks(self):
pnic_before = get_net_io_counters()
while 1:
time.sleep(60)
pnic_after = get_net_io_counters()
send_datas = {'type': 8, 'ip_addr': ''.join([n[1] for n in self
.get_host_addrs(socket.AF_INET) if n[0] == self.
monitor_port]), 'cpu_perf': get_cpu_percent(), 'mem_perf':
get_mem_usage(), 'disk_perf': get_disk_usage(),
'disk_speed': get_disk_speed(), 'net_perf':
get_network_traffic(pnic_before, pnic_after)}
self.do_post(send_datas)
pnic_before = get_net_io_counters()
def run(self):
sys.stdout.write('Daemon started with pid %s\n' % os.getpid())
_p = Process(target=self.tasks, daemon=True)
_p.start()
p = psutil.Process(_p.pid)
while 1:
current_cpu = p.cpu_percent()
current_mem = p.memory_percent()
if p.is_running() and (current_mem > 1 or current_cpu > 1):
p.terminate()
p.wait()
with open('/tmp/test_daemon.log', 'a') as f:
f.write('CPU: %s - MEM: %s - at: %s\n' % (current_cpu,
current_mem, time.ctime()))
_p = Process(target=self.tasks, daemon=True)
_p.start()
sys.stdout.write('The subprocess restart pid %s\n' % _p.pid)
p = psutil.Process(_p.pid)
time.sleep(60)
<|reserved_special_token_1|>
import os
import sys
import time
import json
import socket
from urllib import request, parse
from concurrent.futures import ThreadPoolExecutor
from multiprocessing import Process
import psutil
from daemon import DaemonBase
from host_performence import *
class MyDaemon(DaemonBase):
"""Real Daemon class"""
def __init__(self,
api_url,
monitor_port,
pidfile,
stdin='/dev/null',
stdout='/dev/null',
stderr='/dev/null'):
self.api_url = api_url
self.monitor_port = monitor_port
super().__init__(pidfile, stdin, stdout, stderr)
@staticmethod
def get_host_addrs(family):
for nic, snics in psutil.net_if_addrs().items():
for snic in snics:
if snic.family == family:
yield (nic, snic.address)
def do_post(self, params):
data = json.dumps(params)
# Json Post
# headers = {'Content-Type': 'application/json'}
# req = request.Request(self.api_url, data=data.encode('utf-8'), headers=headers)
# Form Post eg. ?data=params&code=1
data = parse.urlencode({'data': data})
req = request.Request(self.api_url, data=data.encode('utf-8'))
try:
with request.urlopen(req, timeout=3) as resp:
# print(resp.read().decode('utf-8'))
return resp.status
except Exception as e:
with open('/tmp/test_daemon.err', 'a') as f:
print('%s at: %s' % (e, time.ctime()), file=f)
def tasks(self):
pnic_before = get_net_io_counters()
while 1:
time.sleep(60)
pnic_after = get_net_io_counters()
send_datas = {
'type': 8,
'ip_addr': ''.join([
n[1] for n in self.get_host_addrs(socket.AF_INET)
if n[0] == self.monitor_port
]),
'cpu_perf': get_cpu_percent(),
'mem_perf': get_mem_usage(),
'disk_perf': get_disk_usage(),
'disk_speed': get_disk_speed(),
'net_perf': get_network_traffic(pnic_before, pnic_after)
}
self.do_post(send_datas)
pnic_before = get_net_io_counters()
def run(self):
sys.stdout.write('Daemon started with pid %s\n' % os.getpid())
_p = Process(target=self.tasks, daemon=True)
_p.start()
p = psutil.Process(_p.pid)
while 1:
current_cpu = p.cpu_percent()
current_mem = p.memory_percent()
# print(current_cpu, current_mem, time.ctime(), p.pid, p.ppid())
if p.is_running() and (current_mem > 1 or current_cpu > 1):
p.terminate()
p.wait()
with open('/tmp/test_daemon.log', 'a') as f:
f.write('CPU: %s - MEM: %s - at: %s\n' %
(current_cpu, current_mem, time.ctime()))
_p = Process(target=self.tasks, daemon=True)
_p.start()
sys.stdout.write('The subprocess restart pid %s\n' % _p.pid)
p = psutil.Process(_p.pid)
time.sleep(60)
|
flexible
|
{
"blob_id": "6e253747182716f84aa6326aafe15ff82be17378",
"index": 1351,
"step-1": "<mask token>\n\n\nclass MyDaemon(DaemonBase):\n <mask token>\n\n def __init__(self, api_url, monitor_port, pidfile, stdin='/dev/null',\n stdout='/dev/null', stderr='/dev/null'):\n self.api_url = api_url\n self.monitor_port = monitor_port\n super().__init__(pidfile, stdin, stdout, stderr)\n\n @staticmethod\n def get_host_addrs(family):\n for nic, snics in psutil.net_if_addrs().items():\n for snic in snics:\n if snic.family == family:\n yield nic, snic.address\n <mask token>\n\n def tasks(self):\n pnic_before = get_net_io_counters()\n while 1:\n time.sleep(60)\n pnic_after = get_net_io_counters()\n send_datas = {'type': 8, 'ip_addr': ''.join([n[1] for n in self\n .get_host_addrs(socket.AF_INET) if n[0] == self.\n monitor_port]), 'cpu_perf': get_cpu_percent(), 'mem_perf':\n get_mem_usage(), 'disk_perf': get_disk_usage(),\n 'disk_speed': get_disk_speed(), 'net_perf':\n get_network_traffic(pnic_before, pnic_after)}\n self.do_post(send_datas)\n pnic_before = get_net_io_counters()\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass MyDaemon(DaemonBase):\n <mask token>\n\n def __init__(self, api_url, monitor_port, pidfile, stdin='/dev/null',\n stdout='/dev/null', stderr='/dev/null'):\n self.api_url = api_url\n self.monitor_port = monitor_port\n super().__init__(pidfile, stdin, stdout, stderr)\n\n @staticmethod\n def get_host_addrs(family):\n for nic, snics in psutil.net_if_addrs().items():\n for snic in snics:\n if snic.family == family:\n yield nic, snic.address\n <mask token>\n\n def tasks(self):\n pnic_before = get_net_io_counters()\n while 1:\n time.sleep(60)\n pnic_after = get_net_io_counters()\n send_datas = {'type': 8, 'ip_addr': ''.join([n[1] for n in self\n .get_host_addrs(socket.AF_INET) if n[0] == self.\n monitor_port]), 'cpu_perf': get_cpu_percent(), 'mem_perf':\n get_mem_usage(), 'disk_perf': get_disk_usage(),\n 'disk_speed': get_disk_speed(), 'net_perf':\n get_network_traffic(pnic_before, pnic_after)}\n self.do_post(send_datas)\n pnic_before = get_net_io_counters()\n\n def run(self):\n sys.stdout.write('Daemon started with pid %s\\n' % os.getpid())\n _p = Process(target=self.tasks, daemon=True)\n _p.start()\n p = psutil.Process(_p.pid)\n while 1:\n current_cpu = p.cpu_percent()\n current_mem = p.memory_percent()\n if p.is_running() and (current_mem > 1 or current_cpu > 1):\n p.terminate()\n p.wait()\n with open('/tmp/test_daemon.log', 'a') as f:\n f.write('CPU: %s - MEM: %s - at: %s\\n' % (current_cpu,\n current_mem, time.ctime()))\n _p = Process(target=self.tasks, daemon=True)\n _p.start()\n sys.stdout.write('The subprocess restart pid %s\\n' % _p.pid)\n p = psutil.Process(_p.pid)\n time.sleep(60)\n",
"step-3": "<mask token>\n\n\nclass MyDaemon(DaemonBase):\n \"\"\"Real Daemon class\"\"\"\n\n def __init__(self, api_url, monitor_port, pidfile, stdin='/dev/null',\n stdout='/dev/null', stderr='/dev/null'):\n self.api_url = api_url\n self.monitor_port = monitor_port\n super().__init__(pidfile, stdin, stdout, stderr)\n\n @staticmethod\n def get_host_addrs(family):\n for nic, snics in psutil.net_if_addrs().items():\n for snic in snics:\n if snic.family == family:\n yield nic, snic.address\n\n def do_post(self, params):\n data = json.dumps(params)\n data = parse.urlencode({'data': data})\n req = request.Request(self.api_url, data=data.encode('utf-8'))\n try:\n with request.urlopen(req, timeout=3) as resp:\n return resp.status\n except Exception as e:\n with open('/tmp/test_daemon.err', 'a') as f:\n print('%s at: %s' % (e, time.ctime()), file=f)\n\n def tasks(self):\n pnic_before = get_net_io_counters()\n while 1:\n time.sleep(60)\n pnic_after = get_net_io_counters()\n send_datas = {'type': 8, 'ip_addr': ''.join([n[1] for n in self\n .get_host_addrs(socket.AF_INET) if n[0] == self.\n monitor_port]), 'cpu_perf': get_cpu_percent(), 'mem_perf':\n get_mem_usage(), 'disk_perf': get_disk_usage(),\n 'disk_speed': get_disk_speed(), 'net_perf':\n get_network_traffic(pnic_before, pnic_after)}\n self.do_post(send_datas)\n pnic_before = get_net_io_counters()\n\n def run(self):\n sys.stdout.write('Daemon started with pid %s\\n' % os.getpid())\n _p = Process(target=self.tasks, daemon=True)\n _p.start()\n p = psutil.Process(_p.pid)\n while 1:\n current_cpu = p.cpu_percent()\n current_mem = p.memory_percent()\n if p.is_running() and (current_mem > 1 or current_cpu > 1):\n p.terminate()\n p.wait()\n with open('/tmp/test_daemon.log', 'a') as f:\n f.write('CPU: %s - MEM: %s - at: %s\\n' % (current_cpu,\n current_mem, time.ctime()))\n _p = Process(target=self.tasks, daemon=True)\n _p.start()\n sys.stdout.write('The subprocess restart pid %s\\n' % _p.pid)\n p = psutil.Process(_p.pid)\n time.sleep(60)\n",
"step-4": "import os\nimport sys\nimport time\nimport json\nimport socket\nfrom urllib import request, parse\nfrom concurrent.futures import ThreadPoolExecutor\nfrom multiprocessing import Process\nimport psutil\nfrom daemon import DaemonBase\nfrom host_performence import *\n\n\nclass MyDaemon(DaemonBase):\n \"\"\"Real Daemon class\"\"\"\n\n def __init__(self, api_url, monitor_port, pidfile, stdin='/dev/null',\n stdout='/dev/null', stderr='/dev/null'):\n self.api_url = api_url\n self.monitor_port = monitor_port\n super().__init__(pidfile, stdin, stdout, stderr)\n\n @staticmethod\n def get_host_addrs(family):\n for nic, snics in psutil.net_if_addrs().items():\n for snic in snics:\n if snic.family == family:\n yield nic, snic.address\n\n def do_post(self, params):\n data = json.dumps(params)\n data = parse.urlencode({'data': data})\n req = request.Request(self.api_url, data=data.encode('utf-8'))\n try:\n with request.urlopen(req, timeout=3) as resp:\n return resp.status\n except Exception as e:\n with open('/tmp/test_daemon.err', 'a') as f:\n print('%s at: %s' % (e, time.ctime()), file=f)\n\n def tasks(self):\n pnic_before = get_net_io_counters()\n while 1:\n time.sleep(60)\n pnic_after = get_net_io_counters()\n send_datas = {'type': 8, 'ip_addr': ''.join([n[1] for n in self\n .get_host_addrs(socket.AF_INET) if n[0] == self.\n monitor_port]), 'cpu_perf': get_cpu_percent(), 'mem_perf':\n get_mem_usage(), 'disk_perf': get_disk_usage(),\n 'disk_speed': get_disk_speed(), 'net_perf':\n get_network_traffic(pnic_before, pnic_after)}\n self.do_post(send_datas)\n pnic_before = get_net_io_counters()\n\n def run(self):\n sys.stdout.write('Daemon started with pid %s\\n' % os.getpid())\n _p = Process(target=self.tasks, daemon=True)\n _p.start()\n p = psutil.Process(_p.pid)\n while 1:\n current_cpu = p.cpu_percent()\n current_mem = p.memory_percent()\n if p.is_running() and (current_mem > 1 or current_cpu > 1):\n p.terminate()\n p.wait()\n with open('/tmp/test_daemon.log', 'a') as f:\n f.write('CPU: %s - MEM: %s - at: %s\\n' % (current_cpu,\n current_mem, time.ctime()))\n _p = Process(target=self.tasks, daemon=True)\n _p.start()\n sys.stdout.write('The subprocess restart pid %s\\n' % _p.pid)\n p = psutil.Process(_p.pid)\n time.sleep(60)\n",
"step-5": "import os\nimport sys\nimport time\nimport json\nimport socket\nfrom urllib import request, parse\nfrom concurrent.futures import ThreadPoolExecutor\nfrom multiprocessing import Process\n\nimport psutil\n\nfrom daemon import DaemonBase\nfrom host_performence import *\n\n\nclass MyDaemon(DaemonBase):\n \"\"\"Real Daemon class\"\"\"\n\n def __init__(self,\n api_url,\n monitor_port,\n pidfile,\n stdin='/dev/null',\n stdout='/dev/null',\n stderr='/dev/null'):\n self.api_url = api_url\n self.monitor_port = monitor_port\n super().__init__(pidfile, stdin, stdout, stderr)\n\n @staticmethod\n def get_host_addrs(family):\n for nic, snics in psutil.net_if_addrs().items():\n for snic in snics:\n if snic.family == family:\n yield (nic, snic.address)\n\n def do_post(self, params):\n data = json.dumps(params)\n # Json Post\n # headers = {'Content-Type': 'application/json'}\n # req = request.Request(self.api_url, data=data.encode('utf-8'), headers=headers) \n # Form Post eg. ?data=params&code=1\n data = parse.urlencode({'data': data})\n req = request.Request(self.api_url, data=data.encode('utf-8'))\n try:\n with request.urlopen(req, timeout=3) as resp:\n # print(resp.read().decode('utf-8'))\n return resp.status\n except Exception as e:\n with open('/tmp/test_daemon.err', 'a') as f:\n print('%s at: %s' % (e, time.ctime()), file=f)\n\n def tasks(self):\n pnic_before = get_net_io_counters()\n while 1:\n time.sleep(60)\n pnic_after = get_net_io_counters()\n send_datas = {\n 'type': 8,\n 'ip_addr': ''.join([\n n[1] for n in self.get_host_addrs(socket.AF_INET)\n if n[0] == self.monitor_port\n ]),\n 'cpu_perf': get_cpu_percent(),\n 'mem_perf': get_mem_usage(),\n 'disk_perf': get_disk_usage(),\n 'disk_speed': get_disk_speed(),\n 'net_perf': get_network_traffic(pnic_before, pnic_after)\n }\n self.do_post(send_datas)\n pnic_before = get_net_io_counters()\n\n def run(self):\n sys.stdout.write('Daemon started with pid %s\\n' % os.getpid())\n _p = Process(target=self.tasks, daemon=True)\n _p.start()\n p = psutil.Process(_p.pid)\n while 1:\n current_cpu = p.cpu_percent()\n current_mem = p.memory_percent()\n # print(current_cpu, current_mem, time.ctime(), p.pid, p.ppid())\n if p.is_running() and (current_mem > 1 or current_cpu > 1):\n p.terminate()\n p.wait()\n with open('/tmp/test_daemon.log', 'a') as f:\n f.write('CPU: %s - MEM: %s - at: %s\\n' %\n (current_cpu, current_mem, time.ctime()))\n _p = Process(target=self.tasks, daemon=True)\n _p.start()\n sys.stdout.write('The subprocess restart pid %s\\n' % _p.pid)\n p = psutil.Process(_p.pid)\n time.sleep(60)",
"step-ids": [
4,
5,
7,
8,
9
]
}
|
[
4,
5,
7,
8,
9
] |
#!/usr/bin/env python
# encoding: utf-8
"""
PreScaledTriggers.py
Created by Bryn Mathias on 2011-11-02.
Copyright (c) 2011 Imperial College. All rights reserved.
"""
import sys
import os
from plottingUtils import *
# HLT_HT600_v1Pre_1_HLT_HT300_v9Pre_210
def main():
c1 = Print("HLT_HT550_HLT_HT250.pdf")
c1.open()
# c1.Print()
diffList = []
cumuList = []
histList = ("HT_Nom","HT_Denom")
dirs = [
"HLT_HT550_v11_HLT_HT250_v11",
"HLT_HT550_v2_HLT_HT250_v2",
"HLT_HT550_v3_HLT_HT250_v3",
"HLT_HT550_v4_HLT_HT250_v4",
"HLT_HT550_v5_HLT_HT250_v5",
"HLT_HT550_v6_HLT_HT250_v6",
"HLT_HT550_v7_HLT_HT250_v7",
"HLT_HT550_v8_HLT_HT250_v8" ,
]
# dirs = [ "HT275_HLT_HT250_AlphaT0p53_v2_HLT_Mu15_HT200_v2", "HT275_HLT_HT250_AlphaT0p53_v3_HLT_Mu15_HT200_v3",
# "HT275_HLT_HT250_AlphaT0p53_v4_HLT_Mu15_HT200_v4", "HT275_HLT_HT250_AlphaT0p53_v5_HLT_Mu30_HT200_v1",
# "HT275_HLT_HT250_AlphaT0p53_v6_HLT_Mu40_HT200_v4", "HT275_HLT_HT250_AlphaT0p55_v1_HLT_Mu5_HT200_v4" ,
# "HT275_HLT_HT250_AlphaT0p55_v2_HLT_Mu40_HT200_v4"]
# weights = [138.018/2760.509,444.633/2760.509,4.291/2760.509,179.041/2760.509,1799.0/2760.509,233.808/2760.509,1799.0/2760.509]
weights = [1.0,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,]
mg = None
c1.cd()
c1.Clear()
Nom = GetSumHist(File = ["4fbHTTriggers.root"], Directories = dirs, Hist = histList[0], Col = r.kBlack, Norm = weights, LegendText = "")
Nom.HideOverFlow()
Denom = GetSumHist(File = ["4fbHTTriggers.root"], Directories = dirs, Hist = histList[1], Col = r.kRed, Norm = weights, LegendText = "")
Denom.HideOverFlow()
Nom.Rebin(25,None)
Denom.Rebin(25,None)
Nom.hObj.GetXaxis().SetRangeUser(0.,1200.)
Denom.hObj.GetXaxis().SetRangeUser(0.,1200.)
Denom.hObj.SetTitle("HLT_HT550_HLT_HT250")
Denom.Draw("h")
Denom.hObj.GetXaxis().SetTitle("H_{T}")
Denom.hObj.GetYaxis().SetTitle("Number of Trigger events / %f"%(Denom.hObj.GetBinWidth(1)))
Denom.hObj.GetYaxis().SetTitleOffset(1.15)
Nom.hObj.SetMarkerStyle(20)
Nom.Draw("psame")
c1.Print()
c1.toFile(Nom.hObj,"Nom_Standard_All")
c1.toFile(Denom.hObj,"Denom_Standard_All")
turnon = TurnOn(Nom,Denom)
# c1.Clear()
turnon.setRange(0.,1200.)
c1.cd()
turnon.DifferentialTurnOn().GetXaxis().SetRangeUser(0.,1200.)
turnon.DifferentialTurnOn().Draw("ap")
diffList.append(turnon.DifferentialTurnOn())
c1.toFile(turnon.DifferentialTurnOn(),"HLT_HT550_HLT_HT250")
c1.Print()
# leg = Legend()
# print float(pair.split("_")[7])/float((pair.split("_")[3:4])[0])
# if float(pair.split("_")[7])%float((pair.split("_")[3:4])[0]) == 0:
cumNom = Nom.CumulativeHist()
cumDenom = Denom.CumulativeHist()
cumDenom.GetYaxis().SetTitle("")
cumDenom.Draw("h")
cumNom.Draw("psame")
c1.Print()
cumuTurnOn = r.TGraphAsymmErrors()
cumuTurnOn.Divide(cumNom,cumDenom)
cumuTurnOn.GetXaxis().SetTitle("H_{T}^{cut} ")
cumuTurnOn.GetXaxis().SetTitleSize(0.05)
cumuTurnOn.GetYaxis().SetTitle("Cumulative efficiency")
cumuTurnOn.GetYaxis().SetTitleOffset(1.5)
cumuTurnOn.GetXaxis().SetRangeUser(0.,1200.)
cumuTurnOn.SetMarkerStyle(20)
cumuTurnOn.SetMarkerSize(0.5)
cumuTurnOn.SetTitle("Cumulative HLT_HT550_HLT_HT250")
cumuList.append(cumuTurnOn)
c1.toFile(cumNom,"CumuNom_All")
c1.toFile(cumDenom,"CumuDenom_All")
cumuTurnOn.Draw("ap")
cumuTurnOn.GetXaxis().SetRangeUser(0.,1200.)
c1.canvas.Update()
c1.Print()
c1.toFile(cumuTurnOn,"Cumulative HLT_HT550_HLT_HT250")
c1.Clear()
c1.close()
pass
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "e748420dfdb77fa8661111a92fc48b79f64bff10",
"index": 4128,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n c1 = Print('HLT_HT550_HLT_HT250.pdf')\n c1.open()\n diffList = []\n cumuList = []\n histList = 'HT_Nom', 'HT_Denom'\n dirs = ['HLT_HT550_v11_HLT_HT250_v11', 'HLT_HT550_v2_HLT_HT250_v2',\n 'HLT_HT550_v3_HLT_HT250_v3', 'HLT_HT550_v4_HLT_HT250_v4',\n 'HLT_HT550_v5_HLT_HT250_v5', 'HLT_HT550_v6_HLT_HT250_v6',\n 'HLT_HT550_v7_HLT_HT250_v7', 'HLT_HT550_v8_HLT_HT250_v8']\n weights = [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, \n 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]\n mg = None\n c1.cd()\n c1.Clear()\n Nom = GetSumHist(File=['4fbHTTriggers.root'], Directories=dirs, Hist=\n histList[0], Col=r.kBlack, Norm=weights, LegendText='')\n Nom.HideOverFlow()\n Denom = GetSumHist(File=['4fbHTTriggers.root'], Directories=dirs, Hist=\n histList[1], Col=r.kRed, Norm=weights, LegendText='')\n Denom.HideOverFlow()\n Nom.Rebin(25, None)\n Denom.Rebin(25, None)\n Nom.hObj.GetXaxis().SetRangeUser(0.0, 1200.0)\n Denom.hObj.GetXaxis().SetRangeUser(0.0, 1200.0)\n Denom.hObj.SetTitle('HLT_HT550_HLT_HT250')\n Denom.Draw('h')\n Denom.hObj.GetXaxis().SetTitle('H_{T}')\n Denom.hObj.GetYaxis().SetTitle('Number of Trigger events / %f' % Denom.\n hObj.GetBinWidth(1))\n Denom.hObj.GetYaxis().SetTitleOffset(1.15)\n Nom.hObj.SetMarkerStyle(20)\n Nom.Draw('psame')\n c1.Print()\n c1.toFile(Nom.hObj, 'Nom_Standard_All')\n c1.toFile(Denom.hObj, 'Denom_Standard_All')\n turnon = TurnOn(Nom, Denom)\n turnon.setRange(0.0, 1200.0)\n c1.cd()\n turnon.DifferentialTurnOn().GetXaxis().SetRangeUser(0.0, 1200.0)\n turnon.DifferentialTurnOn().Draw('ap')\n diffList.append(turnon.DifferentialTurnOn())\n c1.toFile(turnon.DifferentialTurnOn(), 'HLT_HT550_HLT_HT250')\n c1.Print()\n cumNom = Nom.CumulativeHist()\n cumDenom = Denom.CumulativeHist()\n cumDenom.GetYaxis().SetTitle('')\n cumDenom.Draw('h')\n cumNom.Draw('psame')\n c1.Print()\n cumuTurnOn = r.TGraphAsymmErrors()\n cumuTurnOn.Divide(cumNom, cumDenom)\n cumuTurnOn.GetXaxis().SetTitle('H_{T}^{cut} ')\n cumuTurnOn.GetXaxis().SetTitleSize(0.05)\n cumuTurnOn.GetYaxis().SetTitle('Cumulative efficiency')\n cumuTurnOn.GetYaxis().SetTitleOffset(1.5)\n cumuTurnOn.GetXaxis().SetRangeUser(0.0, 1200.0)\n cumuTurnOn.SetMarkerStyle(20)\n cumuTurnOn.SetMarkerSize(0.5)\n cumuTurnOn.SetTitle('Cumulative HLT_HT550_HLT_HT250')\n cumuList.append(cumuTurnOn)\n c1.toFile(cumNom, 'CumuNom_All')\n c1.toFile(cumDenom, 'CumuDenom_All')\n cumuTurnOn.Draw('ap')\n cumuTurnOn.GetXaxis().SetRangeUser(0.0, 1200.0)\n c1.canvas.Update()\n c1.Print()\n c1.toFile(cumuTurnOn, 'Cumulative HLT_HT550_HLT_HT250')\n c1.Clear()\n c1.close()\n pass\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n c1 = Print('HLT_HT550_HLT_HT250.pdf')\n c1.open()\n diffList = []\n cumuList = []\n histList = 'HT_Nom', 'HT_Denom'\n dirs = ['HLT_HT550_v11_HLT_HT250_v11', 'HLT_HT550_v2_HLT_HT250_v2',\n 'HLT_HT550_v3_HLT_HT250_v3', 'HLT_HT550_v4_HLT_HT250_v4',\n 'HLT_HT550_v5_HLT_HT250_v5', 'HLT_HT550_v6_HLT_HT250_v6',\n 'HLT_HT550_v7_HLT_HT250_v7', 'HLT_HT550_v8_HLT_HT250_v8']\n weights = [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, \n 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]\n mg = None\n c1.cd()\n c1.Clear()\n Nom = GetSumHist(File=['4fbHTTriggers.root'], Directories=dirs, Hist=\n histList[0], Col=r.kBlack, Norm=weights, LegendText='')\n Nom.HideOverFlow()\n Denom = GetSumHist(File=['4fbHTTriggers.root'], Directories=dirs, Hist=\n histList[1], Col=r.kRed, Norm=weights, LegendText='')\n Denom.HideOverFlow()\n Nom.Rebin(25, None)\n Denom.Rebin(25, None)\n Nom.hObj.GetXaxis().SetRangeUser(0.0, 1200.0)\n Denom.hObj.GetXaxis().SetRangeUser(0.0, 1200.0)\n Denom.hObj.SetTitle('HLT_HT550_HLT_HT250')\n Denom.Draw('h')\n Denom.hObj.GetXaxis().SetTitle('H_{T}')\n Denom.hObj.GetYaxis().SetTitle('Number of Trigger events / %f' % Denom.\n hObj.GetBinWidth(1))\n Denom.hObj.GetYaxis().SetTitleOffset(1.15)\n Nom.hObj.SetMarkerStyle(20)\n Nom.Draw('psame')\n c1.Print()\n c1.toFile(Nom.hObj, 'Nom_Standard_All')\n c1.toFile(Denom.hObj, 'Denom_Standard_All')\n turnon = TurnOn(Nom, Denom)\n turnon.setRange(0.0, 1200.0)\n c1.cd()\n turnon.DifferentialTurnOn().GetXaxis().SetRangeUser(0.0, 1200.0)\n turnon.DifferentialTurnOn().Draw('ap')\n diffList.append(turnon.DifferentialTurnOn())\n c1.toFile(turnon.DifferentialTurnOn(), 'HLT_HT550_HLT_HT250')\n c1.Print()\n cumNom = Nom.CumulativeHist()\n cumDenom = Denom.CumulativeHist()\n cumDenom.GetYaxis().SetTitle('')\n cumDenom.Draw('h')\n cumNom.Draw('psame')\n c1.Print()\n cumuTurnOn = r.TGraphAsymmErrors()\n cumuTurnOn.Divide(cumNom, cumDenom)\n cumuTurnOn.GetXaxis().SetTitle('H_{T}^{cut} ')\n cumuTurnOn.GetXaxis().SetTitleSize(0.05)\n cumuTurnOn.GetYaxis().SetTitle('Cumulative efficiency')\n cumuTurnOn.GetYaxis().SetTitleOffset(1.5)\n cumuTurnOn.GetXaxis().SetRangeUser(0.0, 1200.0)\n cumuTurnOn.SetMarkerStyle(20)\n cumuTurnOn.SetMarkerSize(0.5)\n cumuTurnOn.SetTitle('Cumulative HLT_HT550_HLT_HT250')\n cumuList.append(cumuTurnOn)\n c1.toFile(cumNom, 'CumuNom_All')\n c1.toFile(cumDenom, 'CumuDenom_All')\n cumuTurnOn.Draw('ap')\n cumuTurnOn.GetXaxis().SetRangeUser(0.0, 1200.0)\n c1.canvas.Update()\n c1.Print()\n c1.toFile(cumuTurnOn, 'Cumulative HLT_HT550_HLT_HT250')\n c1.Clear()\n c1.close()\n pass\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\nimport sys\nimport os\nfrom plottingUtils import *\n\n\ndef main():\n c1 = Print('HLT_HT550_HLT_HT250.pdf')\n c1.open()\n diffList = []\n cumuList = []\n histList = 'HT_Nom', 'HT_Denom'\n dirs = ['HLT_HT550_v11_HLT_HT250_v11', 'HLT_HT550_v2_HLT_HT250_v2',\n 'HLT_HT550_v3_HLT_HT250_v3', 'HLT_HT550_v4_HLT_HT250_v4',\n 'HLT_HT550_v5_HLT_HT250_v5', 'HLT_HT550_v6_HLT_HT250_v6',\n 'HLT_HT550_v7_HLT_HT250_v7', 'HLT_HT550_v8_HLT_HT250_v8']\n weights = [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, \n 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]\n mg = None\n c1.cd()\n c1.Clear()\n Nom = GetSumHist(File=['4fbHTTriggers.root'], Directories=dirs, Hist=\n histList[0], Col=r.kBlack, Norm=weights, LegendText='')\n Nom.HideOverFlow()\n Denom = GetSumHist(File=['4fbHTTriggers.root'], Directories=dirs, Hist=\n histList[1], Col=r.kRed, Norm=weights, LegendText='')\n Denom.HideOverFlow()\n Nom.Rebin(25, None)\n Denom.Rebin(25, None)\n Nom.hObj.GetXaxis().SetRangeUser(0.0, 1200.0)\n Denom.hObj.GetXaxis().SetRangeUser(0.0, 1200.0)\n Denom.hObj.SetTitle('HLT_HT550_HLT_HT250')\n Denom.Draw('h')\n Denom.hObj.GetXaxis().SetTitle('H_{T}')\n Denom.hObj.GetYaxis().SetTitle('Number of Trigger events / %f' % Denom.\n hObj.GetBinWidth(1))\n Denom.hObj.GetYaxis().SetTitleOffset(1.15)\n Nom.hObj.SetMarkerStyle(20)\n Nom.Draw('psame')\n c1.Print()\n c1.toFile(Nom.hObj, 'Nom_Standard_All')\n c1.toFile(Denom.hObj, 'Denom_Standard_All')\n turnon = TurnOn(Nom, Denom)\n turnon.setRange(0.0, 1200.0)\n c1.cd()\n turnon.DifferentialTurnOn().GetXaxis().SetRangeUser(0.0, 1200.0)\n turnon.DifferentialTurnOn().Draw('ap')\n diffList.append(turnon.DifferentialTurnOn())\n c1.toFile(turnon.DifferentialTurnOn(), 'HLT_HT550_HLT_HT250')\n c1.Print()\n cumNom = Nom.CumulativeHist()\n cumDenom = Denom.CumulativeHist()\n cumDenom.GetYaxis().SetTitle('')\n cumDenom.Draw('h')\n cumNom.Draw('psame')\n c1.Print()\n cumuTurnOn = r.TGraphAsymmErrors()\n cumuTurnOn.Divide(cumNom, cumDenom)\n cumuTurnOn.GetXaxis().SetTitle('H_{T}^{cut} ')\n cumuTurnOn.GetXaxis().SetTitleSize(0.05)\n cumuTurnOn.GetYaxis().SetTitle('Cumulative efficiency')\n cumuTurnOn.GetYaxis().SetTitleOffset(1.5)\n cumuTurnOn.GetXaxis().SetRangeUser(0.0, 1200.0)\n cumuTurnOn.SetMarkerStyle(20)\n cumuTurnOn.SetMarkerSize(0.5)\n cumuTurnOn.SetTitle('Cumulative HLT_HT550_HLT_HT250')\n cumuList.append(cumuTurnOn)\n c1.toFile(cumNom, 'CumuNom_All')\n c1.toFile(cumDenom, 'CumuDenom_All')\n cumuTurnOn.Draw('ap')\n cumuTurnOn.GetXaxis().SetRangeUser(0.0, 1200.0)\n c1.canvas.Update()\n c1.Print()\n c1.toFile(cumuTurnOn, 'Cumulative HLT_HT550_HLT_HT250')\n c1.Clear()\n c1.close()\n pass\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/usr/bin/env python\n# encoding: utf-8\n\"\"\"\nPreScaledTriggers.py\n\nCreated by Bryn Mathias on 2011-11-02.\nCopyright (c) 2011 Imperial College. All rights reserved.\n\"\"\"\n\nimport sys\nimport os\nfrom plottingUtils import *\n\n# HLT_HT600_v1Pre_1_HLT_HT300_v9Pre_210\ndef main():\n c1 = Print(\"HLT_HT550_HLT_HT250.pdf\")\n c1.open()\n # c1.Print()\n\n diffList = []\n cumuList = []\n histList = (\"HT_Nom\",\"HT_Denom\")\n dirs = [\n\"HLT_HT550_v11_HLT_HT250_v11\",\n\"HLT_HT550_v2_HLT_HT250_v2\",\n\"HLT_HT550_v3_HLT_HT250_v3\",\n\"HLT_HT550_v4_HLT_HT250_v4\",\n\"HLT_HT550_v5_HLT_HT250_v5\",\n\"HLT_HT550_v6_HLT_HT250_v6\",\n\"HLT_HT550_v7_HLT_HT250_v7\",\n\"HLT_HT550_v8_HLT_HT250_v8\" ,\n\n\n\n]\n\n\n # dirs = [ \"HT275_HLT_HT250_AlphaT0p53_v2_HLT_Mu15_HT200_v2\", \"HT275_HLT_HT250_AlphaT0p53_v3_HLT_Mu15_HT200_v3\",\n # \"HT275_HLT_HT250_AlphaT0p53_v4_HLT_Mu15_HT200_v4\", \"HT275_HLT_HT250_AlphaT0p53_v5_HLT_Mu30_HT200_v1\",\n # \"HT275_HLT_HT250_AlphaT0p53_v6_HLT_Mu40_HT200_v4\", \"HT275_HLT_HT250_AlphaT0p55_v1_HLT_Mu5_HT200_v4\" ,\n # \"HT275_HLT_HT250_AlphaT0p55_v2_HLT_Mu40_HT200_v4\"]\n # weights = [138.018/2760.509,444.633/2760.509,4.291/2760.509,179.041/2760.509,1799.0/2760.509,233.808/2760.509,1799.0/2760.509]\n weights = [1.0,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,]\n\n mg = None\n c1.cd()\n c1.Clear()\n Nom = GetSumHist(File = [\"4fbHTTriggers.root\"], Directories = dirs, Hist = histList[0], Col = r.kBlack, Norm = weights, LegendText = \"\")\n Nom.HideOverFlow()\n Denom = GetSumHist(File = [\"4fbHTTriggers.root\"], Directories = dirs, Hist = histList[1], Col = r.kRed, Norm = weights, LegendText = \"\")\n Denom.HideOverFlow()\n Nom.Rebin(25,None)\n Denom.Rebin(25,None)\n Nom.hObj.GetXaxis().SetRangeUser(0.,1200.)\n Denom.hObj.GetXaxis().SetRangeUser(0.,1200.)\n Denom.hObj.SetTitle(\"HLT_HT550_HLT_HT250\")\n Denom.Draw(\"h\")\n Denom.hObj.GetXaxis().SetTitle(\"H_{T}\")\n Denom.hObj.GetYaxis().SetTitle(\"Number of Trigger events / %f\"%(Denom.hObj.GetBinWidth(1)))\n Denom.hObj.GetYaxis().SetTitleOffset(1.15)\n\n Nom.hObj.SetMarkerStyle(20)\n Nom.Draw(\"psame\")\n c1.Print()\n c1.toFile(Nom.hObj,\"Nom_Standard_All\")\n c1.toFile(Denom.hObj,\"Denom_Standard_All\")\n turnon = TurnOn(Nom,Denom)\n # c1.Clear()\n turnon.setRange(0.,1200.)\n c1.cd()\n turnon.DifferentialTurnOn().GetXaxis().SetRangeUser(0.,1200.)\n turnon.DifferentialTurnOn().Draw(\"ap\")\n diffList.append(turnon.DifferentialTurnOn())\n c1.toFile(turnon.DifferentialTurnOn(),\"HLT_HT550_HLT_HT250\")\n c1.Print()\n # leg = Legend()\n # print float(pair.split(\"_\")[7])/float((pair.split(\"_\")[3:4])[0])\n # if float(pair.split(\"_\")[7])%float((pair.split(\"_\")[3:4])[0]) == 0:\n cumNom = Nom.CumulativeHist()\n cumDenom = Denom.CumulativeHist()\n cumDenom.GetYaxis().SetTitle(\"\")\n cumDenom.Draw(\"h\")\n cumNom.Draw(\"psame\")\n c1.Print()\n cumuTurnOn = r.TGraphAsymmErrors()\n cumuTurnOn.Divide(cumNom,cumDenom)\n cumuTurnOn.GetXaxis().SetTitle(\"H_{T}^{cut} \")\n cumuTurnOn.GetXaxis().SetTitleSize(0.05)\n cumuTurnOn.GetYaxis().SetTitle(\"Cumulative efficiency\")\n cumuTurnOn.GetYaxis().SetTitleOffset(1.5)\n cumuTurnOn.GetXaxis().SetRangeUser(0.,1200.)\n cumuTurnOn.SetMarkerStyle(20)\n cumuTurnOn.SetMarkerSize(0.5)\n cumuTurnOn.SetTitle(\"Cumulative HLT_HT550_HLT_HT250\")\n cumuList.append(cumuTurnOn)\n c1.toFile(cumNom,\"CumuNom_All\")\n c1.toFile(cumDenom,\"CumuDenom_All\")\n cumuTurnOn.Draw(\"ap\")\n cumuTurnOn.GetXaxis().SetRangeUser(0.,1200.)\n c1.canvas.Update()\n c1.Print()\n c1.toFile(cumuTurnOn,\"Cumulative HLT_HT550_HLT_HT250\")\n c1.Clear()\n\n c1.close()\n pass\n\n\n\n\nif __name__ == '__main__':\n main()\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def GetAuditedSystemVersion():
global OSX_VERSION
SysVersion = 'Unknown system version'
SystemVersionPlist = False
SystemVersionPlist = core.UniversalReadPlist(
'/System/Library/CoreServices/SystemVersion.plist')
if SystemVersionPlist:
if 'ProductName' in SystemVersionPlist:
SysVersion = SystemVersionPlist['ProductName']
if 'ProductVersion' in SystemVersionPlist:
SysVersion += ' ' + SystemVersionPlist['ProductVersion']
if 'ProductBuildVersion' in SystemVersionPlist:
SysVersion += ' build ' + SystemVersionPlist['ProductBuildVersion']
OSX_VERSION = {'ProductBuildVersion': SystemVersionPlist[
'ProductBuildVersion'], 'ProductVersion': SystemVersionPlist[
'ProductVersion'], 'MajorVersion': int(SystemVersionPlist[
'ProductVersion'].split('.')[0]), 'MinorVersion': int(
SystemVersionPlist['ProductVersion'].split('.')[1]),
'PatchVersion': int(SystemVersionPlist['ProductVersion'].split(
'.')[2])}
else:
log.PrintAndLog(u'Cannot determine the system version', 'ERROR')
return SysVersion
def GetAuditedSystemTimezone():
""" Return the current system timezone """
Timezone = False
try:
Timezone = os.path.realpath(os.path.join(ROOT_PATH, 'etc/localtime'))
Timezone = Timezone.split('/')
except Exception as e:
PrintAndLog(u'Cannot read the timezone' + str(e.args).decode(
'utf-8'), 'ERROR')
return Timezone[-2] + '/' + Timezone[-1]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def generate_header():
header = {}
description = ('Report generated by ' + __description__ + ' v' +
__version__ + ' on ' + time.strftime('%x %X %Z') + ' running as ' +
Euid + '/' + Egid)
header['description'] = description
audit_path = 'Audited system path: ' + ROOT_PATH.decode('utf-8')
header['audit_path'] = audit_path
AuditedSystemVersion = GetAuditedSystemVersion()
sysv = 'Version of the audited system: ' + AuditedSystemVersion
header['system_version'] = sysv
Timezone = GetAuditedSystemTimezone()
tz = 'Current timezone of the audited system: ' + Timezone
header['timezone'] = tz
return header
def GetAuditedSystemVersion():
global OSX_VERSION
SysVersion = 'Unknown system version'
SystemVersionPlist = False
SystemVersionPlist = core.UniversalReadPlist(
'/System/Library/CoreServices/SystemVersion.plist')
if SystemVersionPlist:
if 'ProductName' in SystemVersionPlist:
SysVersion = SystemVersionPlist['ProductName']
if 'ProductVersion' in SystemVersionPlist:
SysVersion += ' ' + SystemVersionPlist['ProductVersion']
if 'ProductBuildVersion' in SystemVersionPlist:
SysVersion += ' build ' + SystemVersionPlist['ProductBuildVersion']
OSX_VERSION = {'ProductBuildVersion': SystemVersionPlist[
'ProductBuildVersion'], 'ProductVersion': SystemVersionPlist[
'ProductVersion'], 'MajorVersion': int(SystemVersionPlist[
'ProductVersion'].split('.')[0]), 'MinorVersion': int(
SystemVersionPlist['ProductVersion'].split('.')[1]),
'PatchVersion': int(SystemVersionPlist['ProductVersion'].split(
'.')[2])}
else:
log.PrintAndLog(u'Cannot determine the system version', 'ERROR')
return SysVersion
def GetAuditedSystemTimezone():
""" Return the current system timezone """
Timezone = False
try:
Timezone = os.path.realpath(os.path.join(ROOT_PATH, 'etc/localtime'))
Timezone = Timezone.split('/')
except Exception as e:
PrintAndLog(u'Cannot read the timezone' + str(e.args).decode(
'utf-8'), 'ERROR')
return Timezone[-2] + '/' + Timezone[-1]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
__description__ = 'OS X Auditor'
__author__ = 'Atarimaster & @Jipe_'
__version__ = '0.5.0'
ROOT_PATH = '/'
Euid = str(os.geteuid())
Egid = str(os.getegid())
def generate_header():
header = {}
description = ('Report generated by ' + __description__ + ' v' +
__version__ + ' on ' + time.strftime('%x %X %Z') + ' running as ' +
Euid + '/' + Egid)
header['description'] = description
audit_path = 'Audited system path: ' + ROOT_PATH.decode('utf-8')
header['audit_path'] = audit_path
AuditedSystemVersion = GetAuditedSystemVersion()
sysv = 'Version of the audited system: ' + AuditedSystemVersion
header['system_version'] = sysv
Timezone = GetAuditedSystemTimezone()
tz = 'Current timezone of the audited system: ' + Timezone
header['timezone'] = tz
return header
def GetAuditedSystemVersion():
global OSX_VERSION
SysVersion = 'Unknown system version'
SystemVersionPlist = False
SystemVersionPlist = core.UniversalReadPlist(
'/System/Library/CoreServices/SystemVersion.plist')
if SystemVersionPlist:
if 'ProductName' in SystemVersionPlist:
SysVersion = SystemVersionPlist['ProductName']
if 'ProductVersion' in SystemVersionPlist:
SysVersion += ' ' + SystemVersionPlist['ProductVersion']
if 'ProductBuildVersion' in SystemVersionPlist:
SysVersion += ' build ' + SystemVersionPlist['ProductBuildVersion']
OSX_VERSION = {'ProductBuildVersion': SystemVersionPlist[
'ProductBuildVersion'], 'ProductVersion': SystemVersionPlist[
'ProductVersion'], 'MajorVersion': int(SystemVersionPlist[
'ProductVersion'].split('.')[0]), 'MinorVersion': int(
SystemVersionPlist['ProductVersion'].split('.')[1]),
'PatchVersion': int(SystemVersionPlist['ProductVersion'].split(
'.')[2])}
else:
log.PrintAndLog(u'Cannot determine the system version', 'ERROR')
return SysVersion
def GetAuditedSystemTimezone():
""" Return the current system timezone """
Timezone = False
try:
Timezone = os.path.realpath(os.path.join(ROOT_PATH, 'etc/localtime'))
Timezone = Timezone.split('/')
except Exception as e:
PrintAndLog(u'Cannot read the timezone' + str(e.args).decode(
'utf-8'), 'ERROR')
return Timezone[-2] + '/' + Timezone[-1]
<|reserved_special_token_1|>
import os
import log
import core
import time
__description__ = 'OS X Auditor'
__author__ = 'Atarimaster & @Jipe_'
__version__ = '0.5.0'
ROOT_PATH = '/'
Euid = str(os.geteuid())
Egid = str(os.getegid())
def generate_header():
header = {}
description = ('Report generated by ' + __description__ + ' v' +
__version__ + ' on ' + time.strftime('%x %X %Z') + ' running as ' +
Euid + '/' + Egid)
header['description'] = description
audit_path = 'Audited system path: ' + ROOT_PATH.decode('utf-8')
header['audit_path'] = audit_path
AuditedSystemVersion = GetAuditedSystemVersion()
sysv = 'Version of the audited system: ' + AuditedSystemVersion
header['system_version'] = sysv
Timezone = GetAuditedSystemTimezone()
tz = 'Current timezone of the audited system: ' + Timezone
header['timezone'] = tz
return header
def GetAuditedSystemVersion():
global OSX_VERSION
SysVersion = 'Unknown system version'
SystemVersionPlist = False
SystemVersionPlist = core.UniversalReadPlist(
'/System/Library/CoreServices/SystemVersion.plist')
if SystemVersionPlist:
if 'ProductName' in SystemVersionPlist:
SysVersion = SystemVersionPlist['ProductName']
if 'ProductVersion' in SystemVersionPlist:
SysVersion += ' ' + SystemVersionPlist['ProductVersion']
if 'ProductBuildVersion' in SystemVersionPlist:
SysVersion += ' build ' + SystemVersionPlist['ProductBuildVersion']
OSX_VERSION = {'ProductBuildVersion': SystemVersionPlist[
'ProductBuildVersion'], 'ProductVersion': SystemVersionPlist[
'ProductVersion'], 'MajorVersion': int(SystemVersionPlist[
'ProductVersion'].split('.')[0]), 'MinorVersion': int(
SystemVersionPlist['ProductVersion'].split('.')[1]),
'PatchVersion': int(SystemVersionPlist['ProductVersion'].split(
'.')[2])}
else:
log.PrintAndLog(u'Cannot determine the system version', 'ERROR')
return SysVersion
def GetAuditedSystemTimezone():
""" Return the current system timezone """
Timezone = False
try:
Timezone = os.path.realpath(os.path.join(ROOT_PATH, 'etc/localtime'))
Timezone = Timezone.split('/')
except Exception as e:
PrintAndLog(u'Cannot read the timezone' + str(e.args).decode(
'utf-8'), 'ERROR')
return Timezone[-2] + '/' + Timezone[-1]
<|reserved_special_token_1|>
import os
import log
import core
import time
__description__ = 'OS X Auditor'
__author__ = 'Atarimaster & @Jipe_'
__version__ = '0.5.0'
ROOT_PATH = '/'
Euid = str(os.geteuid())
Egid = str(os.getegid())
def generate_header():
header = {}
# Description(Audited By)
description = "Report generated by " + __description__ + " v" + __version__ + " on " + time.strftime('%x %X %Z') + " running as " + Euid + "/" + Egid
header['description'] = description
# Audited Path
audit_path = "Audited system path: " + ROOT_PATH.decode("utf-8")
header['audit_path'] = audit_path
# System Version
AuditedSystemVersion = GetAuditedSystemVersion()
sysv = "Version of the audited system: " + AuditedSystemVersion
header['system_version'] = sysv
# Current Timezone
Timezone = GetAuditedSystemTimezone()
tz = "Current timezone of the audited system: " + Timezone
header['timezone'] = tz
return header
def GetAuditedSystemVersion():
global OSX_VERSION
SysVersion = "Unknown system version"
SystemVersionPlist = False
SystemVersionPlist = core.UniversalReadPlist("/System/Library/CoreServices/SystemVersion.plist")
if SystemVersionPlist:
if "ProductName" in SystemVersionPlist: SysVersion = SystemVersionPlist["ProductName"]
if "ProductVersion" in SystemVersionPlist: SysVersion += " " + SystemVersionPlist["ProductVersion"]
if "ProductBuildVersion" in SystemVersionPlist: SysVersion += " build " + SystemVersionPlist["ProductBuildVersion"]
OSX_VERSION = {
"ProductBuildVersion": SystemVersionPlist["ProductBuildVersion"],
"ProductVersion": SystemVersionPlist["ProductVersion"],
"MajorVersion": int(SystemVersionPlist["ProductVersion"].split('.')[0]),
"MinorVersion": int(SystemVersionPlist["ProductVersion"].split('.')[1]),
"PatchVersion": int(SystemVersionPlist["ProductVersion"].split('.')[2])
}
else:
log.PrintAndLog(u"Cannot determine the system version", "ERROR")
return SysVersion
def GetAuditedSystemTimezone():
""" Return the current system timezone """
Timezone = False
try:
Timezone = os.path.realpath(os.path.join(ROOT_PATH, "etc/localtime"))
Timezone = Timezone.split("/")
except Exception as e:
PrintAndLog(u"Cannot read the timezone" + str(e.args).decode("utf-8"), "ERROR")
return Timezone[-2] + "/" + Timezone[-1]
|
flexible
|
{
"blob_id": "547d67bce7eb05e55e02c73a22342ca572e89f39",
"index": 9959,
"step-1": "<mask token>\n\n\ndef GetAuditedSystemVersion():\n global OSX_VERSION\n SysVersion = 'Unknown system version'\n SystemVersionPlist = False\n SystemVersionPlist = core.UniversalReadPlist(\n '/System/Library/CoreServices/SystemVersion.plist')\n if SystemVersionPlist:\n if 'ProductName' in SystemVersionPlist:\n SysVersion = SystemVersionPlist['ProductName']\n if 'ProductVersion' in SystemVersionPlist:\n SysVersion += ' ' + SystemVersionPlist['ProductVersion']\n if 'ProductBuildVersion' in SystemVersionPlist:\n SysVersion += ' build ' + SystemVersionPlist['ProductBuildVersion']\n OSX_VERSION = {'ProductBuildVersion': SystemVersionPlist[\n 'ProductBuildVersion'], 'ProductVersion': SystemVersionPlist[\n 'ProductVersion'], 'MajorVersion': int(SystemVersionPlist[\n 'ProductVersion'].split('.')[0]), 'MinorVersion': int(\n SystemVersionPlist['ProductVersion'].split('.')[1]),\n 'PatchVersion': int(SystemVersionPlist['ProductVersion'].split(\n '.')[2])}\n else:\n log.PrintAndLog(u'Cannot determine the system version', 'ERROR')\n return SysVersion\n\n\ndef GetAuditedSystemTimezone():\n \"\"\" Return the current system timezone \"\"\"\n Timezone = False\n try:\n Timezone = os.path.realpath(os.path.join(ROOT_PATH, 'etc/localtime'))\n Timezone = Timezone.split('/')\n except Exception as e:\n PrintAndLog(u'Cannot read the timezone' + str(e.args).decode(\n 'utf-8'), 'ERROR')\n return Timezone[-2] + '/' + Timezone[-1]\n",
"step-2": "<mask token>\n\n\ndef generate_header():\n header = {}\n description = ('Report generated by ' + __description__ + ' v' +\n __version__ + ' on ' + time.strftime('%x %X %Z') + ' running as ' +\n Euid + '/' + Egid)\n header['description'] = description\n audit_path = 'Audited system path: ' + ROOT_PATH.decode('utf-8')\n header['audit_path'] = audit_path\n AuditedSystemVersion = GetAuditedSystemVersion()\n sysv = 'Version of the audited system: ' + AuditedSystemVersion\n header['system_version'] = sysv\n Timezone = GetAuditedSystemTimezone()\n tz = 'Current timezone of the audited system: ' + Timezone\n header['timezone'] = tz\n return header\n\n\ndef GetAuditedSystemVersion():\n global OSX_VERSION\n SysVersion = 'Unknown system version'\n SystemVersionPlist = False\n SystemVersionPlist = core.UniversalReadPlist(\n '/System/Library/CoreServices/SystemVersion.plist')\n if SystemVersionPlist:\n if 'ProductName' in SystemVersionPlist:\n SysVersion = SystemVersionPlist['ProductName']\n if 'ProductVersion' in SystemVersionPlist:\n SysVersion += ' ' + SystemVersionPlist['ProductVersion']\n if 'ProductBuildVersion' in SystemVersionPlist:\n SysVersion += ' build ' + SystemVersionPlist['ProductBuildVersion']\n OSX_VERSION = {'ProductBuildVersion': SystemVersionPlist[\n 'ProductBuildVersion'], 'ProductVersion': SystemVersionPlist[\n 'ProductVersion'], 'MajorVersion': int(SystemVersionPlist[\n 'ProductVersion'].split('.')[0]), 'MinorVersion': int(\n SystemVersionPlist['ProductVersion'].split('.')[1]),\n 'PatchVersion': int(SystemVersionPlist['ProductVersion'].split(\n '.')[2])}\n else:\n log.PrintAndLog(u'Cannot determine the system version', 'ERROR')\n return SysVersion\n\n\ndef GetAuditedSystemTimezone():\n \"\"\" Return the current system timezone \"\"\"\n Timezone = False\n try:\n Timezone = os.path.realpath(os.path.join(ROOT_PATH, 'etc/localtime'))\n Timezone = Timezone.split('/')\n except Exception as e:\n PrintAndLog(u'Cannot read the timezone' + str(e.args).decode(\n 'utf-8'), 'ERROR')\n return Timezone[-2] + '/' + Timezone[-1]\n",
"step-3": "<mask token>\n__description__ = 'OS X Auditor'\n__author__ = 'Atarimaster & @Jipe_'\n__version__ = '0.5.0'\nROOT_PATH = '/'\nEuid = str(os.geteuid())\nEgid = str(os.getegid())\n\n\ndef generate_header():\n header = {}\n description = ('Report generated by ' + __description__ + ' v' +\n __version__ + ' on ' + time.strftime('%x %X %Z') + ' running as ' +\n Euid + '/' + Egid)\n header['description'] = description\n audit_path = 'Audited system path: ' + ROOT_PATH.decode('utf-8')\n header['audit_path'] = audit_path\n AuditedSystemVersion = GetAuditedSystemVersion()\n sysv = 'Version of the audited system: ' + AuditedSystemVersion\n header['system_version'] = sysv\n Timezone = GetAuditedSystemTimezone()\n tz = 'Current timezone of the audited system: ' + Timezone\n header['timezone'] = tz\n return header\n\n\ndef GetAuditedSystemVersion():\n global OSX_VERSION\n SysVersion = 'Unknown system version'\n SystemVersionPlist = False\n SystemVersionPlist = core.UniversalReadPlist(\n '/System/Library/CoreServices/SystemVersion.plist')\n if SystemVersionPlist:\n if 'ProductName' in SystemVersionPlist:\n SysVersion = SystemVersionPlist['ProductName']\n if 'ProductVersion' in SystemVersionPlist:\n SysVersion += ' ' + SystemVersionPlist['ProductVersion']\n if 'ProductBuildVersion' in SystemVersionPlist:\n SysVersion += ' build ' + SystemVersionPlist['ProductBuildVersion']\n OSX_VERSION = {'ProductBuildVersion': SystemVersionPlist[\n 'ProductBuildVersion'], 'ProductVersion': SystemVersionPlist[\n 'ProductVersion'], 'MajorVersion': int(SystemVersionPlist[\n 'ProductVersion'].split('.')[0]), 'MinorVersion': int(\n SystemVersionPlist['ProductVersion'].split('.')[1]),\n 'PatchVersion': int(SystemVersionPlist['ProductVersion'].split(\n '.')[2])}\n else:\n log.PrintAndLog(u'Cannot determine the system version', 'ERROR')\n return SysVersion\n\n\ndef GetAuditedSystemTimezone():\n \"\"\" Return the current system timezone \"\"\"\n Timezone = False\n try:\n Timezone = os.path.realpath(os.path.join(ROOT_PATH, 'etc/localtime'))\n Timezone = Timezone.split('/')\n except Exception as e:\n PrintAndLog(u'Cannot read the timezone' + str(e.args).decode(\n 'utf-8'), 'ERROR')\n return Timezone[-2] + '/' + Timezone[-1]\n",
"step-4": "import os\nimport log\nimport core\nimport time\n__description__ = 'OS X Auditor'\n__author__ = 'Atarimaster & @Jipe_'\n__version__ = '0.5.0'\nROOT_PATH = '/'\nEuid = str(os.geteuid())\nEgid = str(os.getegid())\n\n\ndef generate_header():\n header = {}\n description = ('Report generated by ' + __description__ + ' v' +\n __version__ + ' on ' + time.strftime('%x %X %Z') + ' running as ' +\n Euid + '/' + Egid)\n header['description'] = description\n audit_path = 'Audited system path: ' + ROOT_PATH.decode('utf-8')\n header['audit_path'] = audit_path\n AuditedSystemVersion = GetAuditedSystemVersion()\n sysv = 'Version of the audited system: ' + AuditedSystemVersion\n header['system_version'] = sysv\n Timezone = GetAuditedSystemTimezone()\n tz = 'Current timezone of the audited system: ' + Timezone\n header['timezone'] = tz\n return header\n\n\ndef GetAuditedSystemVersion():\n global OSX_VERSION\n SysVersion = 'Unknown system version'\n SystemVersionPlist = False\n SystemVersionPlist = core.UniversalReadPlist(\n '/System/Library/CoreServices/SystemVersion.plist')\n if SystemVersionPlist:\n if 'ProductName' in SystemVersionPlist:\n SysVersion = SystemVersionPlist['ProductName']\n if 'ProductVersion' in SystemVersionPlist:\n SysVersion += ' ' + SystemVersionPlist['ProductVersion']\n if 'ProductBuildVersion' in SystemVersionPlist:\n SysVersion += ' build ' + SystemVersionPlist['ProductBuildVersion']\n OSX_VERSION = {'ProductBuildVersion': SystemVersionPlist[\n 'ProductBuildVersion'], 'ProductVersion': SystemVersionPlist[\n 'ProductVersion'], 'MajorVersion': int(SystemVersionPlist[\n 'ProductVersion'].split('.')[0]), 'MinorVersion': int(\n SystemVersionPlist['ProductVersion'].split('.')[1]),\n 'PatchVersion': int(SystemVersionPlist['ProductVersion'].split(\n '.')[2])}\n else:\n log.PrintAndLog(u'Cannot determine the system version', 'ERROR')\n return SysVersion\n\n\ndef GetAuditedSystemTimezone():\n \"\"\" Return the current system timezone \"\"\"\n Timezone = False\n try:\n Timezone = os.path.realpath(os.path.join(ROOT_PATH, 'etc/localtime'))\n Timezone = Timezone.split('/')\n except Exception as e:\n PrintAndLog(u'Cannot read the timezone' + str(e.args).decode(\n 'utf-8'), 'ERROR')\n return Timezone[-2] + '/' + Timezone[-1]\n",
"step-5": "import os\nimport log\nimport core\nimport time\n\n__description__ = 'OS X Auditor'\n__author__ = 'Atarimaster & @Jipe_'\n__version__ = '0.5.0'\n\nROOT_PATH = '/'\n\nEuid = str(os.geteuid())\nEgid = str(os.getegid())\n\ndef generate_header():\n header = {}\n\n # Description(Audited By)\n description = \"Report generated by \" + __description__ + \" v\" + __version__ + \" on \" + time.strftime('%x %X %Z') + \" running as \" + Euid + \"/\" + Egid\n header['description'] = description\n\n # Audited Path\n audit_path = \"Audited system path: \" + ROOT_PATH.decode(\"utf-8\")\n header['audit_path'] = audit_path\n\n # System Version\n AuditedSystemVersion = GetAuditedSystemVersion()\n sysv = \"Version of the audited system: \" + AuditedSystemVersion\n header['system_version'] = sysv\n\n # Current Timezone\n Timezone = GetAuditedSystemTimezone()\n tz = \"Current timezone of the audited system: \" + Timezone\n header['timezone'] = tz\n\n return header\n\ndef GetAuditedSystemVersion():\n global OSX_VERSION\n\n SysVersion = \"Unknown system version\"\n SystemVersionPlist = False\n\n SystemVersionPlist = core.UniversalReadPlist(\"/System/Library/CoreServices/SystemVersion.plist\")\n\n if SystemVersionPlist:\n if \"ProductName\" in SystemVersionPlist: SysVersion = SystemVersionPlist[\"ProductName\"]\n if \"ProductVersion\" in SystemVersionPlist: SysVersion += \" \" + SystemVersionPlist[\"ProductVersion\"]\n if \"ProductBuildVersion\" in SystemVersionPlist: SysVersion += \" build \" + SystemVersionPlist[\"ProductBuildVersion\"]\n\n OSX_VERSION = {\n \"ProductBuildVersion\": SystemVersionPlist[\"ProductBuildVersion\"],\n \"ProductVersion\": SystemVersionPlist[\"ProductVersion\"],\n \"MajorVersion\": int(SystemVersionPlist[\"ProductVersion\"].split('.')[0]),\n \"MinorVersion\": int(SystemVersionPlist[\"ProductVersion\"].split('.')[1]),\n \"PatchVersion\": int(SystemVersionPlist[\"ProductVersion\"].split('.')[2])\n }\n\n else:\n log.PrintAndLog(u\"Cannot determine the system version\", \"ERROR\")\n\n return SysVersion\n\ndef GetAuditedSystemTimezone():\n \"\"\" Return the current system timezone \"\"\"\n\n Timezone = False\n try:\n Timezone = os.path.realpath(os.path.join(ROOT_PATH, \"etc/localtime\"))\n Timezone = Timezone.split(\"/\")\n except Exception as e:\n PrintAndLog(u\"Cannot read the timezone\" + str(e.args).decode(\"utf-8\"), \"ERROR\")\n\n return Timezone[-2] + \"/\" + Timezone[-1]",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from django.contrib import admin
from .models import Profile
from django.contrib.admin.templatetags.admin_list import admin_actions
admin.site.register(Profile)
# Register your models here.
|
normal
|
{
"blob_id": "89c44d35559504501e4333ea6ff4d3528f1a4c4f",
"index": 5171,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nadmin.site.register(Profile)\n",
"step-3": "from django.contrib import admin\nfrom .models import Profile\nfrom django.contrib.admin.templatetags.admin_list import admin_actions\nadmin.site.register(Profile)\n",
"step-4": "from django.contrib import admin\nfrom .models import Profile\nfrom django.contrib.admin.templatetags.admin_list import admin_actions\n\nadmin.site.register(Profile)\n# Register your models here.\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Calculator(object):
<|reserved_special_token_0|>
def _float_to_string_(self, f, p=40):
result = f'{f:+1.{p}f}'
if '.' in result:
result = result.rstrip('0')
if result[-1] == '.':
result += '0'
return result
def _muldiv_(self, m):
op = operator.mul if m.group('op') == '*' else operator.truediv
return self._float_to_string_(op(float(m.group('n1')), float(m.
group('n2'))))
def _subber_(self, search, replace, target):
subs = -1
while subs != 0:
target, subs = re.subn(search, replace, target, count=1)
target = target.replace('--', '+')
target = target.replace('-+', '-')
return target
def _evaluate_(self, thing):
if type(thing) != str:
thing = thing[1]
thing = self._subber_('\\(([^\\(\\)]*?)\\)', self._evaluate_, thing)
thing = self._subber_(
f'(?P<n1>{self.re_num})(?P<op>\\*|\\/)(?P<n2>{self.re_num})',
self._muldiv_, thing)
return self._float_to_string_(sum(float(val[0]) for val in re.
findall(self.re_num, thing)))
def evaluate(self, thing):
return float(self._evaluate_(thing.replace(' ', '')))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Calculator(object):
re_num = '(([-+])?(\\d+)(\\.\\d+)?)'
def _float_to_string_(self, f, p=40):
result = f'{f:+1.{p}f}'
if '.' in result:
result = result.rstrip('0')
if result[-1] == '.':
result += '0'
return result
def _muldiv_(self, m):
op = operator.mul if m.group('op') == '*' else operator.truediv
return self._float_to_string_(op(float(m.group('n1')), float(m.
group('n2'))))
def _subber_(self, search, replace, target):
subs = -1
while subs != 0:
target, subs = re.subn(search, replace, target, count=1)
target = target.replace('--', '+')
target = target.replace('-+', '-')
return target
def _evaluate_(self, thing):
if type(thing) != str:
thing = thing[1]
thing = self._subber_('\\(([^\\(\\)]*?)\\)', self._evaluate_, thing)
thing = self._subber_(
f'(?P<n1>{self.re_num})(?P<op>\\*|\\/)(?P<n2>{self.re_num})',
self._muldiv_, thing)
return self._float_to_string_(sum(float(val[0]) for val in re.
findall(self.re_num, thing)))
def evaluate(self, thing):
return float(self._evaluate_(thing.replace(' ', '')))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Calculator(object):
re_num = '(([-+])?(\\d+)(\\.\\d+)?)'
def _float_to_string_(self, f, p=40):
result = f'{f:+1.{p}f}'
if '.' in result:
result = result.rstrip('0')
if result[-1] == '.':
result += '0'
return result
def _muldiv_(self, m):
op = operator.mul if m.group('op') == '*' else operator.truediv
return self._float_to_string_(op(float(m.group('n1')), float(m.
group('n2'))))
def _subber_(self, search, replace, target):
subs = -1
while subs != 0:
target, subs = re.subn(search, replace, target, count=1)
target = target.replace('--', '+')
target = target.replace('-+', '-')
return target
def _evaluate_(self, thing):
if type(thing) != str:
thing = thing[1]
thing = self._subber_('\\(([^\\(\\)]*?)\\)', self._evaluate_, thing)
thing = self._subber_(
f'(?P<n1>{self.re_num})(?P<op>\\*|\\/)(?P<n2>{self.re_num})',
self._muldiv_, thing)
return self._float_to_string_(sum(float(val[0]) for val in re.
findall(self.re_num, thing)))
def evaluate(self, thing):
return float(self._evaluate_(thing.replace(' ', '')))
def calc(expression):
return Calculator().evaluate(expression)
if __name__ == '__main__':
print(
f"result = {calc('-(-13) - (84 + 51 * (40)) * (5 / ((((83 * -32)))) / -93)')}"
)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import operator
import re
class Calculator(object):
re_num = '(([-+])?(\\d+)(\\.\\d+)?)'
def _float_to_string_(self, f, p=40):
result = f'{f:+1.{p}f}'
if '.' in result:
result = result.rstrip('0')
if result[-1] == '.':
result += '0'
return result
def _muldiv_(self, m):
op = operator.mul if m.group('op') == '*' else operator.truediv
return self._float_to_string_(op(float(m.group('n1')), float(m.
group('n2'))))
def _subber_(self, search, replace, target):
subs = -1
while subs != 0:
target, subs = re.subn(search, replace, target, count=1)
target = target.replace('--', '+')
target = target.replace('-+', '-')
return target
def _evaluate_(self, thing):
if type(thing) != str:
thing = thing[1]
thing = self._subber_('\\(([^\\(\\)]*?)\\)', self._evaluate_, thing)
thing = self._subber_(
f'(?P<n1>{self.re_num})(?P<op>\\*|\\/)(?P<n2>{self.re_num})',
self._muldiv_, thing)
return self._float_to_string_(sum(float(val[0]) for val in re.
findall(self.re_num, thing)))
def evaluate(self, thing):
return float(self._evaluate_(thing.replace(' ', '')))
def calc(expression):
return Calculator().evaluate(expression)
if __name__ == '__main__':
print(
f"result = {calc('-(-13) - (84 + 51 * (40)) * (5 / ((((83 * -32)))) / -93)')}"
)
<|reserved_special_token_1|>
""" Codewars kata: Evaluate mathematical expression. https://www.codewars.com/kata/52a78825cdfc2cfc87000005/train/python """
#######################################################################################################################
#
# Import
#
#######################################################################################################################
import operator
import re
#######################################################################################################################
#
# Calculator
#
#######################################################################################################################
class Calculator(object):
re_num = r"(([-+])?(\d+)(\.\d+)?)"
def _float_to_string_(self, f, p=40):
# decimal.Decimal would let us avoid these shenanigans, but it's not available.
result = f"{f:+1.{p}f}"
if "." in result:
result = result.rstrip("0")
if result[-1] == ".": result += "0"
return result
def _muldiv_(self, m):
op = operator.mul if m.group("op") == "*" else operator.truediv
return self._float_to_string_(op(float(m.group('n1')), float(m.group('n2'))))
def _subber_(self, search, replace, target):
subs = -1
while subs != 0:
target, subs = re.subn(search, replace, target, count=1)
target = target.replace("--", "+")
target = target.replace("-+", "-")
return target
def _evaluate_(self, thing):
if type(thing) != str:
thing = thing[1]
thing = self._subber_(r"\(([^\(\)]*?)\)", self._evaluate_, thing)
thing = self._subber_(rf"(?P<n1>{self.re_num})(?P<op>\*|\/)(?P<n2>{self.re_num})", self._muldiv_, thing)
return self._float_to_string_(sum(float(val[0]) for val in re.findall(self.re_num, thing)))
def evaluate(self, thing):
return float(self._evaluate_(thing.replace(" ", "")))
def calc(expression):
return Calculator().evaluate(expression)
#######################################################################################################################
#
# __main__
#
#######################################################################################################################
if __name__ == "__main__":
print(f"result = {calc('-(-13) - (84 + 51 * (40)) * (5 / ((((83 * -32)))) / -93)')}") # 12.957005441119316
|
flexible
|
{
"blob_id": "ac14e88810b848dbf4ff32ea99fd274cd0285e1c",
"index": 3539,
"step-1": "<mask token>\n\n\nclass Calculator(object):\n <mask token>\n\n def _float_to_string_(self, f, p=40):\n result = f'{f:+1.{p}f}'\n if '.' in result:\n result = result.rstrip('0')\n if result[-1] == '.':\n result += '0'\n return result\n\n def _muldiv_(self, m):\n op = operator.mul if m.group('op') == '*' else operator.truediv\n return self._float_to_string_(op(float(m.group('n1')), float(m.\n group('n2'))))\n\n def _subber_(self, search, replace, target):\n subs = -1\n while subs != 0:\n target, subs = re.subn(search, replace, target, count=1)\n target = target.replace('--', '+')\n target = target.replace('-+', '-')\n return target\n\n def _evaluate_(self, thing):\n if type(thing) != str:\n thing = thing[1]\n thing = self._subber_('\\\\(([^\\\\(\\\\)]*?)\\\\)', self._evaluate_, thing)\n thing = self._subber_(\n f'(?P<n1>{self.re_num})(?P<op>\\\\*|\\\\/)(?P<n2>{self.re_num})',\n self._muldiv_, thing)\n return self._float_to_string_(sum(float(val[0]) for val in re.\n findall(self.re_num, thing)))\n\n def evaluate(self, thing):\n return float(self._evaluate_(thing.replace(' ', '')))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Calculator(object):\n re_num = '(([-+])?(\\\\d+)(\\\\.\\\\d+)?)'\n\n def _float_to_string_(self, f, p=40):\n result = f'{f:+1.{p}f}'\n if '.' in result:\n result = result.rstrip('0')\n if result[-1] == '.':\n result += '0'\n return result\n\n def _muldiv_(self, m):\n op = operator.mul if m.group('op') == '*' else operator.truediv\n return self._float_to_string_(op(float(m.group('n1')), float(m.\n group('n2'))))\n\n def _subber_(self, search, replace, target):\n subs = -1\n while subs != 0:\n target, subs = re.subn(search, replace, target, count=1)\n target = target.replace('--', '+')\n target = target.replace('-+', '-')\n return target\n\n def _evaluate_(self, thing):\n if type(thing) != str:\n thing = thing[1]\n thing = self._subber_('\\\\(([^\\\\(\\\\)]*?)\\\\)', self._evaluate_, thing)\n thing = self._subber_(\n f'(?P<n1>{self.re_num})(?P<op>\\\\*|\\\\/)(?P<n2>{self.re_num})',\n self._muldiv_, thing)\n return self._float_to_string_(sum(float(val[0]) for val in re.\n findall(self.re_num, thing)))\n\n def evaluate(self, thing):\n return float(self._evaluate_(thing.replace(' ', '')))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Calculator(object):\n re_num = '(([-+])?(\\\\d+)(\\\\.\\\\d+)?)'\n\n def _float_to_string_(self, f, p=40):\n result = f'{f:+1.{p}f}'\n if '.' in result:\n result = result.rstrip('0')\n if result[-1] == '.':\n result += '0'\n return result\n\n def _muldiv_(self, m):\n op = operator.mul if m.group('op') == '*' else operator.truediv\n return self._float_to_string_(op(float(m.group('n1')), float(m.\n group('n2'))))\n\n def _subber_(self, search, replace, target):\n subs = -1\n while subs != 0:\n target, subs = re.subn(search, replace, target, count=1)\n target = target.replace('--', '+')\n target = target.replace('-+', '-')\n return target\n\n def _evaluate_(self, thing):\n if type(thing) != str:\n thing = thing[1]\n thing = self._subber_('\\\\(([^\\\\(\\\\)]*?)\\\\)', self._evaluate_, thing)\n thing = self._subber_(\n f'(?P<n1>{self.re_num})(?P<op>\\\\*|\\\\/)(?P<n2>{self.re_num})',\n self._muldiv_, thing)\n return self._float_to_string_(sum(float(val[0]) for val in re.\n findall(self.re_num, thing)))\n\n def evaluate(self, thing):\n return float(self._evaluate_(thing.replace(' ', '')))\n\n\ndef calc(expression):\n return Calculator().evaluate(expression)\n\n\nif __name__ == '__main__':\n print(\n f\"result = {calc('-(-13) - (84 + 51 * (40)) * (5 / ((((83 * -32)))) / -93)')}\"\n )\n",
"step-4": "<mask token>\nimport operator\nimport re\n\n\nclass Calculator(object):\n re_num = '(([-+])?(\\\\d+)(\\\\.\\\\d+)?)'\n\n def _float_to_string_(self, f, p=40):\n result = f'{f:+1.{p}f}'\n if '.' in result:\n result = result.rstrip('0')\n if result[-1] == '.':\n result += '0'\n return result\n\n def _muldiv_(self, m):\n op = operator.mul if m.group('op') == '*' else operator.truediv\n return self._float_to_string_(op(float(m.group('n1')), float(m.\n group('n2'))))\n\n def _subber_(self, search, replace, target):\n subs = -1\n while subs != 0:\n target, subs = re.subn(search, replace, target, count=1)\n target = target.replace('--', '+')\n target = target.replace('-+', '-')\n return target\n\n def _evaluate_(self, thing):\n if type(thing) != str:\n thing = thing[1]\n thing = self._subber_('\\\\(([^\\\\(\\\\)]*?)\\\\)', self._evaluate_, thing)\n thing = self._subber_(\n f'(?P<n1>{self.re_num})(?P<op>\\\\*|\\\\/)(?P<n2>{self.re_num})',\n self._muldiv_, thing)\n return self._float_to_string_(sum(float(val[0]) for val in re.\n findall(self.re_num, thing)))\n\n def evaluate(self, thing):\n return float(self._evaluate_(thing.replace(' ', '')))\n\n\ndef calc(expression):\n return Calculator().evaluate(expression)\n\n\nif __name__ == '__main__':\n print(\n f\"result = {calc('-(-13) - (84 + 51 * (40)) * (5 / ((((83 * -32)))) / -93)')}\"\n )\n",
"step-5": "\"\"\" Codewars kata: Evaluate mathematical expression. https://www.codewars.com/kata/52a78825cdfc2cfc87000005/train/python \"\"\"\n\n#######################################################################################################################\n#\n# Import\n#\n#######################################################################################################################\n\nimport operator\nimport re\n\n\n#######################################################################################################################\n#\n# Calculator\n#\n#######################################################################################################################\n\nclass Calculator(object):\n re_num = r\"(([-+])?(\\d+)(\\.\\d+)?)\"\n\n def _float_to_string_(self, f, p=40):\n # decimal.Decimal would let us avoid these shenanigans, but it's not available.\n result = f\"{f:+1.{p}f}\"\n if \".\" in result:\n result = result.rstrip(\"0\")\n if result[-1] == \".\": result += \"0\"\n return result\n\n def _muldiv_(self, m):\n op = operator.mul if m.group(\"op\") == \"*\" else operator.truediv\n return self._float_to_string_(op(float(m.group('n1')), float(m.group('n2'))))\n\n def _subber_(self, search, replace, target):\n subs = -1\n while subs != 0:\n target, subs = re.subn(search, replace, target, count=1)\n target = target.replace(\"--\", \"+\")\n target = target.replace(\"-+\", \"-\")\n return target\n\n def _evaluate_(self, thing):\n if type(thing) != str:\n thing = thing[1]\n thing = self._subber_(r\"\\(([^\\(\\)]*?)\\)\", self._evaluate_, thing)\n thing = self._subber_(rf\"(?P<n1>{self.re_num})(?P<op>\\*|\\/)(?P<n2>{self.re_num})\", self._muldiv_, thing)\n return self._float_to_string_(sum(float(val[0]) for val in re.findall(self.re_num, thing)))\n\n def evaluate(self, thing):\n return float(self._evaluate_(thing.replace(\" \", \"\")))\n\n\ndef calc(expression):\n return Calculator().evaluate(expression)\n\n\n#######################################################################################################################\n#\n# __main__\n#\n#######################################################################################################################\n\nif __name__ == \"__main__\":\n print(f\"result = {calc('-(-13) - (84 + 51 * (40)) * (5 / ((((83 * -32)))) / -93)')}\") # 12.957005441119316\n",
"step-ids": [
6,
7,
9,
10,
11
]
}
|
[
6,
7,
9,
10,
11
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.