seq_id stringlengths 4 11 | text stringlengths 113 2.92M | repo_name stringlengths 4 125 ⌀ | sub_path stringlengths 3 214 | file_name stringlengths 3 160 | file_ext stringclasses 18
values | file_size_in_byte int64 113 2.92M | program_lang stringclasses 1
value | lang stringclasses 93
values | doc_type stringclasses 1
value | stars int64 0 179k ⌀ | dataset stringclasses 3
values | pt stringclasses 78
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
24845865028 | """
GraphEdge class
"""
import math
from typing import List, Set, Tuple, Optional
from ordered_set import OrderedSet
import numpy as np
from commonroad_geometric.external.map_conversion.osm2cr import config
from commonroad_geometric.external.map_conversion.osm2cr.converter_modules.utility import geometry
from commonroad_geometric.external.map_conversion.osm2cr.converter_modules.utility.custom_types import (
Road_info,
Assumption_info,
)
from ._graph_node import GraphNode
from ._graph_lane import Lane
class GraphEdge:
"""
Class that represents an edge in the graph structure
"""
def __init__(
self,
id: int,
node1: GraphNode,
node2: GraphNode,
waypoints: List[geometry.Point],
lane_info: Road_info,
assumptions: Assumption_info,
speedlimit: float,
roadtype: str,
):
"""
creates an edge
:param id: unique id
:type id: int
:param node1: node the edge starts at
:type node1: GraphNode
:param node2: node the edge ends at
:type node2: GraphNode
:param waypoints: list of waypoints for the course of the edge
:type waypoints: List[geometry.Point]
:param lane_info: information about lanes on the edge
:type lane_info: Road_info
:param assumptions: assumptions made about the edge
:type assumptions: Assumption_info
:param speedlimit: speed limit on the edge
:type speedlimit: float
:param roadtype: type of road the edge represents
:type roadtype: str
"""
nr_of_lanes, forward_lanes, backward_lanes, oneway, turnlanes, turnlanes_forward, turnlanes_backward = (
lane_info
)
lane_nr_assumed, lanes_assumed, oneway_assumed = assumptions
self.id: int = id
self.node1: GraphNode = node1
self.node2: GraphNode = node2
self.waypoints: List[geometry.Point] = waypoints
self.nr_of_lanes: int = nr_of_lanes
# number of forward lanes
self.forward_lanes: int = forward_lanes
# number of backward lanes
self.backward_lanes: int = backward_lanes
self.oneway: bool = oneway
self.speedlimit: float = speedlimit
self.roadtype: str = roadtype
self.turnlanes_forward: Optional[List[str]] = turnlanes_forward
self.turnlanes_backward: Optional[List[str]] = turnlanes_backward
self.lane_nr_assumed: bool = lane_nr_assumed
self.lanes_assumed: bool = lanes_assumed
self.oneway_assumed: bool = oneway_assumed
self.lanes: List[Lane] = []
self.interpolated_waypoints: Optional[List[np.ndarray]] = None
self.central_points: Optional[Tuple[int, int]] = None
self.forward_successor: Optional[GraphEdge] = None
self.backward_successor: Optional[GraphEdge] = None
self.lanewidth: float = config.LANEWIDTHS[roadtype]
self.forward_restrictions: Set[str] = set()
self.backward_restrictions: Set[str] = set()
self.traffic_signs = []
self.traffic_lights = []
def __str__(self):
return "Graph_edge {}: {}->{}".format(self.id, self.node1.id, self.node2.id)
def __repr__(self):
return "Graph_edge {}: {}->{}".format(self.id, self.node1.id, self.node2.id)
def flip(self) -> None:
"""
flips the direction of the edge and all its lanes
this can be used if nr of forward lanes was changed to zero
only use this if edge has >=1 backward lanes at start
:return: None
"""
assert self.backward_lanes > 0 or self.oneway
if self.oneway:
# flip behaves differently for oneway streets
self.node1, self.node2 = self.node2, self.node1
for lane in self.lanes:
lane.flip(True)
self.lanes = self.lanes[::-1]
if self.waypoints is not None:
self.waypoints = self.waypoints[::-1]
if self.interpolated_waypoints is not None:
self.interpolated_waypoints = self.interpolated_waypoints[::-1]
self.forward_successor, self.backward_successor = (
self.backward_successor,
self.forward_successor,
)
self.forward_restrictions = set()
self.backward_restrictions = set()
self.turnlanes_forward = None
self.turnlanes_backward = None
else:
self.node1, self.node2 = self.node2, self.node1
for lane in self.lanes:
lane.flip(False)
self.lanes = self.lanes[::-1]
if self.waypoints is not None:
self.waypoints = self.waypoints[::-1]
if self.interpolated_waypoints is not None:
self.interpolated_waypoints = self.interpolated_waypoints[::-1]
self.forward_successor, self.backward_successor = (
self.backward_successor,
self.forward_successor,
)
self.forward_restrictions, self.backward_restrictions = (
self.backward_restrictions,
self.forward_restrictions,
)
self.forward_lanes, self.backward_lanes = (
self.backward_lanes,
self.forward_lanes,
)
self.turnlanes_forward, self.turnlanes_backward = (
self.turnlanes_backward,
self.turnlanes_forward,
)
assert self.forward_lanes > 0
def points_to(self, node: GraphNode) -> bool:
"""
determines if edge ends at node
:param node: checked node
:return: True if edge ends at node, else False
"""
return node == self.node2
def get_orientation(self, node: GraphNode) -> float:
"""
calculates the orientation of an edge at a specified end
:param node: node at whose end the orientation is calculated
:return: orientation in radians
"""
if len(self.waypoints) < 2:
raise ValueError(
"this edge has not enough waypoints to determine its orientation"
)
if node == self.node1:
x = self.waypoints[1].x - self.waypoints[0].x
y = self.waypoints[1].y - self.waypoints[0].y
elif node == self.node2:
x = self.waypoints[-2].x - self.waypoints[-1].x
y = self.waypoints[-2].y - self.waypoints[-1].y
else:
raise ValueError("the given node is not an endpoint of this edge")
return np.arctan2(y, x) + np.pi
def get_compass_degrees(self):
"""
calculates the compass degrees of an edge as in
https://en.wikipedia.org/wiki/Points_of_the_compass#/media/File:Compass_Card_B+W.svg
:return: compass orientation in degrees
"""
# compute radians
delta_x = self.node2.x - self.node1.x
delta_y = self.node2.y - self.node1.y
radians = np.arctan2(delta_y, delta_x)
# https://stackoverflow.com/a/7805311
if radians < 0.0:
radians = abs(radians)
else:
radians = 2 * np.pi - radians
degrees = math.degrees(radians)
degrees += 90.0
if degrees > 360.0:
degrees -= 360.0
# return correctly computed degrees
return degrees
def angle_to(self, edge: "GraphEdge", node: GraphNode) -> float:
"""
calculates the angle between two edges at a given node in radians
:param edge: the other edge
:param node: the node at which the angle is calculated
:return: the angle between the edges
"""
diff1 = abs(self.get_orientation(node) - edge.get_orientation(node))
diff2 = np.pi * 2 - diff1
return min(diff1, diff2)
def soft_angle(self, edge: "GraphEdge", node: GraphNode) -> bool:
"""
determines if the angle to another edge is soft
:param edge: other edge
:param node: the node at which the ange is calculated
:return: True if angle is soft, else False
"""
threshold = np.deg2rad(config.SOFT_ANGLE_THRESHOLD)
return self.angle_to(edge, node) > threshold
def get_width(self) -> float:
"""
calculates the width of the road the edge represents
:return: width
"""
return self.nr_of_lanes * config.LANEWIDTHS[self.roadtype]
def generate_lanes(self) -> None:
"""
generates lanes for the edge
:return: None
"""
assert self.forward_lanes + self.backward_lanes == self.nr_of_lanes
backwardlanes = []
for count in range(self.backward_lanes):
turnlane = "none"
if self.turnlanes_backward is not None:
turnlane = self.turnlanes_backward[-(count + 1)]
new_lane = Lane(
self,
OrderedSet(),
OrderedSet(),
turnlane,
self.lanewidth,
self.lanewidth,
self.node2,
self.node1,
self.speedlimit,
)
new_lane.forward = False
backwardlanes.append(new_lane)
forwardlanes = []
for count in range(self.forward_lanes):
turnlane = "none"
if self.turnlanes_forward is not None:
turnlane = self.turnlanes_forward[count]
new_lane = Lane(
self,
OrderedSet(),
OrderedSet(),
turnlane,
self.lanewidth,
self.lanewidth,
self.node1,
self.node2,
self.speedlimit,
)
new_lane.forward = True
forwardlanes.append(new_lane)
for index, lane in enumerate(backwardlanes[:-1]):
lane.adjacent_left = backwardlanes[index + 1]
lane.adjacent_left_direction_equal = True
backwardlanes[index + 1].adjacent_right = lane
backwardlanes[index + 1].adjacent_right_direction_equal = True
for index, lane in enumerate(forwardlanes[:-1]):
lane.adjacent_right = forwardlanes[index + 1]
lane.adjacent_right_direction_equal = True
forwardlanes[index + 1].adjacent_left = lane
forwardlanes[index + 1].adjacent_left_direction_equal = True
if len(forwardlanes) > 0 and len(backwardlanes) > 0:
backwardlanes[-1].adjacent_left = forwardlanes[0]
backwardlanes[-1].adjacent_left_direction_equal = False
forwardlanes[0].adjacent_left = backwardlanes[-1]
forwardlanes[0].adjacent_left_direction_equal = False
self.lanes = backwardlanes + forwardlanes
assert len(self.lanes) == self.nr_of_lanes
def get_interpolated_waypoints(self, save=True) -> List[np.ndarray]:
"""
loads the interpolated waypoints if already generated
interpolates waypoints, otherwise
:param save: set to true if the edge should save the waypoints, default is true
:return: interpolated waypoints
"""
if self.interpolated_waypoints is not None:
return self.interpolated_waypoints
else:
point_distance = config.INTERPOLATION_DISTANCE_INTERNAL
d = config.BEZIER_PARAMETER
result = []
if len(self.waypoints) <= 2:
p1 = self.waypoints[0].get_array()
p2 = self.waypoints[1].get_array()
n = max(int(np.linalg.norm(p1 - p2) / point_distance), 2)
for index in range(n):
result.append(p1 + (p2 - p1) * index / n)
result.append(p2)
if save:
self.interpolated_waypoints = result
self.central_points = (int(len(result) / 2 - 1), int(len(result) / 2))
return result
for index in range(len(self.waypoints) - 1):
if index == 0:
p1, p4 = (
self.waypoints[0].get_array(),
self.waypoints[1].get_array(),
)
p2 = p1 + (p4 - p1) * d
p3 = geometry.get_inner_bezier_point(
self.waypoints[2].get_array(), p4, p1, d
)
elif index == len(self.waypoints) - 2:
p1, p4 = (
self.waypoints[index].get_array(),
self.waypoints[index + 1].get_array(),
)
p2 = geometry.get_inner_bezier_point(
self.waypoints[index - 1].get_array(), p1, p4, d
)
p3 = p4 + (p1 - p4) * d
else:
segment_points = []
for i in range(4):
segment_points.append(self.waypoints[index + i - 1])
segment_points = [x.get_array() for x in segment_points]
p1, p2, p3, p4 = geometry.get_bezier_points_of_segment(
np.array(segment_points), d
)
n = max(int(np.linalg.norm(p1 - p4) / point_distance), 2)
result += geometry.evaluate_bezier(np.array([p1, p2, p3, p4]), n)
if save:
self.interpolated_waypoints = result
self.central_points = (int(len(result) / 2 - 1), int(len(result) / 2))
return result
def get_crop_index(self, node: GraphNode, distance: float) -> Tuple[int, int]:
"""
calculates the index to which the edge needs to be cropped to have a specified distance to a node
:param node: the node, the distance refers to
:param distance: the desired distance to the node
:return: index of new start and end of waypoints
"""
point = np.array([node.x, node.y])
waypoints = self.get_interpolated_waypoints()
if self.node2 == node:
index = len(waypoints) - 1
while (index >= 0 and np.linalg.norm(waypoints[index] - point) < distance):
index -= 1
return 0, index
else:
index = 0
while (
index < len(waypoints)
and np.linalg.norm(waypoints[index] - point) < distance
):
index += 1
return index, len(waypoints) - 1
def crop(
self, index1: int, index2: int, edges_to_delete: List["GraphEdge"]
) -> None:
"""
crops waypoints of edge to given indices
if remaining interval is empty, it is set to the center two elements
also the edge is added to the list of edges that will be deleted
:param index1: index of first waypoint included
:param index2: index of first waypoint excluded
:param edges_to_delete: list of edges that will be deleted
:return: None
"""
waypoints = self.get_interpolated_waypoints()
assert index1 in range(len(waypoints))
assert index2 in range(len(waypoints))
if index1 >= index2 - 1:
if self not in edges_to_delete:
edges_to_delete.append(self)
middle = int((index1 + index2) / 2)
index1 = max(0, middle - 1)
index2 = index1 + 2
assert index1 in range(len(waypoints))
assert index2 in range(len(waypoints) + 1)
self.interpolated_waypoints = waypoints[index1:index2]
def exchange_node(self, node_old: GraphNode, node_new: GraphNode) -> None:
"""
Exchanges a node of an edge with a new node
:param node_old: Node to be replaced
:param node_new: Node to replace with
:return: None
"""
if node_old == self.node1:
self.node1 = node_new
elif node_old == self.node2:
self.node2 = node_new
else:
raise ValueError("node_old is not assigned to Edge")
for lane in self.lanes:
lane.exchange_node(node_old, node_new)
return
def common_node(self, other_edge: "GraphEdge") -> Optional[GraphNode]:
"""
finds the common node between two edges
:param other_edge:
:return: the common node, None if there is no common node
"""
if other_edge.node1 == self.node1 or other_edge.node2 == self.node1:
return self.node1
elif other_edge.node1 == self.node2 or other_edge.node2 == self.node2:
return self.node2
def get_waypoints(self) -> np.ndarray:
"""
returns the waypoints as a numpy array
:return: waypoints as np array
"""
return np.array([p.get_array() for p in self.waypoints])
def add_traffic_sign(self, sign: "GraphTrafficSign"):
"""
adds traffic signs to all lanes of the edge
:param sign: the sign to add
:return: None
"""
# TODO handle direction for traffic signs where no direction is given (e.g parsed maxspeed from OSM).
# Currently, every sign of these is added to the forward lane only
self.traffic_signs.append(sign)
forward = True
sign_direction = sign.direction
# add traffic sign to direction wise lane if direction is given.
# This is the case for all mapillary signs
if sign_direction is not None:
# get compass degrees of edge
edge_orientation = self.get_compass_degrees()
if abs(sign_direction-edge_orientation) < 180:
forward = False
for lane in self.lanes:
# add sign to forward lanes
if lane.forward and forward:
lane.add_traffic_sign(sign)
# add to backward lanes
elif (not lane.forward) and (not forward):
lane.add_traffic_sign(sign)
def add_traffic_light(self, light: "GraphTrafficLight", forward):
"""
adds traffic light to all lanes of the edge
:param light: the light to add
:return: None
"""
self.traffic_lights.append(light)
for lane in self.lanes:
if lane.forward == forward:
lane.add_traffic_light(light)
| CommonRoad/crgeo | commonroad_geometric/external/map_conversion/osm2cr/converter_modules/graph_operations/road_graph/_graph_edge.py | _graph_edge.py | py | 18,504 | python | en | code | 25 | github-code | 13 |
41593821062 | #url: https://www.hackerrank.com/challenges/py-check-strict-superset/problem
# Enter your code here. Read input from STDIN. Print output to STDOUT
N = set(input().split(" "))
n = int(input())
j = 0
for i in range(n):
x = set(input().split(" "))
a = N.issuperset(x)
if a == True:
j += 1
print (j == n)
| Huido1/Hackerrank | Python/04 - Sets/13 - Check Strict Superset.py | 13 - Check Strict Superset.py | py | 326 | python | en | code | 0 | github-code | 13 |
8565605824 | def maxSequence(arr):
n = len(arr)
max_sum=[]
if n==0:
return 0
else:
for k in range(0,n+1):
for i in range(n - k + 1):
current_sum = 0
for j in range(k):
current_sum = current_sum + arr[i + j]
max_sum.append(current_sum)
max_sum.sort()
return max_sum[-1]
| agharsh21/codewars_solutions | 5kyu/Maximum subarray sum/maximum-subarray-sum.py | maximum-subarray-sum.py | py | 384 | python | en | code | 0 | github-code | 13 |
1452557180 | import re
import mariadb
import dbcreds
from flask import request, Response
import json
from app import app
import datetime
@app.route("/api/dishes", methods = ["GET", "POST", "PATCH", "DELETE"])
def dishes():
try:
cursor = None
conn = None
conn = mariadb.connect(
user=dbcreds.user,
password=dbcreds.password,
host=dbcreds.host,
port=dbcreds.port,
database=dbcreds.database
)
cursor = conn.cursor()
# GET method
if request.method == "GET":
params = request.args
cursor.execute("SELECT id, dish_name, price, category, date_created FROM dishes WHERE id=?",[params.get("dishId")])
dishInfo = cursor.fetchone()
print(dishInfo)
if dishInfo:
if dishInfo != None:
dish = {
"dishId" : dishInfo[0],
"dishName" : dishInfo[1],
"price" : dishInfo[2],
"category" : dishInfo[3],
"dateCreated" : dishInfo[4]
}
return Response(json.dumps(dish, default=str),
mimetype="application/json",
status=200)
else:
return Response("Invalid id",
mimetype="text/html",
status=200)
else:
cursor.execute("SELECT id, dish_name, price, category, date_created FROM dishes")
allDishes = cursor.fetchall()
print(allDishes)
if allDishes != None:
dishList = []
for dish in allDishes:
dishData = {
"dishId" : dish[0],
"dishName" : dish[1],
"price" : dish[2],
"category" : dish[3],
"dateCreated" : dish[4]
}
dishList.append(dishData)
return Response(json.dumps(dishList,default=str),
mimetype="application/json",
status=200)
# POST method
elif request.method == "POST":
data = request.json
dateCreated = datetime.datetime.today() # get the present date and time
cursor.execute("SELECT position FROM users INNER JOIN user_login ON users.id=user_login.user_id WHERE login_token=?",[data.get("loginToken")])
position = cursor.fetchone()[0]
print(position)
if position != None:
if position == "manager":
cursor.execute("INSERT INTO dishes(dish_name, price, category, date_created) VALUES(?,?,?,?)",[data.get("dishName"), data.get("price"), data.get("category"), dateCreated])
conn.commit()
newDishId = cursor.lastrowid
print(newDishId)
cursor.execute("SELECT * FROM dishes WHERE id=?",[newDishId])
getDish = cursor.fetchone()
print(getDish)
newDish = {
"dishId" : getDish[0],
"dishName" : getDish[1],
"price" : getDish[2],
"category" : getDish[3],
"dateCreated" : getDish[4]
}
return Response(json.dumps(newDish, default=str),
mimetype="application/json",
status=200)
else:
return Response("You are not authorized",
mimetype="text/html",
status=400)
else:
return Response("Invalid data sent",
mimetype="text/html",
status=500)
# PATCH method
elif request.method == "PATCH":
data = request.json
dateModified = datetime.datetime.today()
cursor.execute("SELECT position FROM users INNER JOIN user_login ON users.id=user_login.user_id WHERE login_token=?",[data.get("loginToken")])
position = cursor.fetchone()[0]
print(position)
cursor.execute("SELECT id FROM dishes WHERE id=?",[data.get("dishId")])
dishId = cursor.fetchone()[0]
print(dishId)
if position != None:
if position == "manager":
if data.get("dishName") != None and data.get("dishName") != "":
cursor.execute("UPDATE dishes SET dish_name=?, date_modified=? WHERE id=?", [data.get("dishName"), dateModified, dishId])
elif data.get("price") != None and data.get("price") != "":
cursor.execute("UPDATE dishes SET price=?, date_modified=? WHERE id=?", [data.get("price"), dateModified, dishId])
elif data.get("category") != None and data.get("category") != "":
cursor.execute("UPDATE dishes SET category=?, date_modified=? WHERE id=?", [data.get("category"), dateModified, dishId])
else:
return Response("Field cannot be empty",
mimetype="text/html",
status=400)
conn.commit()
cursor.execute("SELECT id, dish_name, price, category, date_modified FROM dishes ORDER BY date_modified DESC")
getUpdatedDish = cursor.fetchone()
print(getUpdatedDish)
updatedDish = {
"dishId" : getUpdatedDish[0],
"dishName" : getUpdatedDish[1],
"price" : getUpdatedDish[2],
"category" : getUpdatedDish[3],
"dateModified" : getUpdatedDish[4]
}
return Response(json.dumps(updatedDish, default=str),
mimetype="application/json",
status=200)
else:
return Response("You are not authorized!",
mimetype="text/html",
status=400)
else:
return Response("Invalid data sent",
mimetype="text/html",
status=400)
# DELETE method
elif request.method == "DELETE":
data = request.json
cursor.execute("SELECT position FROM users INNER JOIN user_login ON users.id=user_login.user_id WHERE login_token=?",[data.get("loginToken")])
position = cursor.fetchone()[0]
print(position)
cursor.execute("SELECT id FROM dishes WHERE id=?",[data.get("dishId")])
dishId = cursor.fetchone()[0]
print(dishId)
if position != None:
if position == "manager":
cursor.execute("DELETE FROM dishes WHERE id=?", [dishId])
conn.commit()
return Response("Deleted successfully",
mimetype="text/html",
status=200)
else:
return Response("You are not authorized",
mimetype="text/html",
status=400)
else:
return Response("Invalid data sent",
mimetype="text/html",
status=500)
else:
return Response("Method not allowed",
mimetype="text/html",
status=500)
except mariadb.OperationalError:
print("Operational error on the query")
except mariadb.DataError:
print("Something wrong with your data")
except mariadb.OperationalError:
print("Something wrong with the connection")
except mariadb.ProgrammingError:
print("Your query was wrong")
except mariadb.IntegrityError:
print("Your query would have broken the database and we stopped it")
except:
print("Something went wrong")
finally:
if (cursor != None):
cursor.close()
else:
print("There was never a cursor to begin with")
if (conn != None):
conn.rollback()
conn.close()
else:
print("The connection never opened, nothing to close here") | aldwin101/posbackend | endpoints/dishes.py | dishes.py | py | 9,174 | python | en | code | 0 | github-code | 13 |
5594621790 | import fastcli
import sys
import time
def lancer_test_connexion():
print("Le test de connexion va être lancé...")
server = fastcli.get_best_server()
print("Serveur le plus proche : {}".format(server['sponsor']))
download_speed, upload_speed, ping = fastcli.test_speed(
server['url'], callback=print_progression)
print("Vitesse de téléchargement : {:.2f} Mbps".format(download_speed))
print("Vitesse de déversement : {:.2f} Mbps".format(upload_speed))
print("Ping : {} ms".format(ping))
def print_progression(current, total):
progress = (current / total) * 100
sys.stdout.write('\r[{0}] {1}%'.format('#' * int(progress/10), progress))
sys.stdout.flush()
def menu():
print("\nMenu :")
print("\n\033[35m--------------------------------------------------------------------")
print("1. Lancer le test de connexion")
print("2. Retourner au script main.py")
print("--------------------------------------------------------------------\033[0m")
choix = input("\nEntrez votre choix : ")
return choix
if __name__ == "__main__":
while True:
choix = menu()
if choix == "1":
try:
lancer_test_connexion()
except KeyboardInterrupt:
print("\nLe test a été interrompu.")
elif choix == "2":
break
else:
print("Choix invalide. Veuillez réessayer.")
| BreakingTechFr/IpTools-BreakingTech | Scripts/8-vitesseconnexion.py | 8-vitesseconnexion.py | py | 1,433 | python | en | code | 0 | github-code | 13 |
14833289496 | import re
import paho.mqtt.client as mqtt
# Configure the MQTT broker and topic
mqtt_broker = "13.54.15.49"
mqtt_port = 1883
mqtt_topic = "sensor_data/all"
def on_connect(client, userdata, flags, rc):
print("Connected with result code " + str(rc))
client.subscribe(mqtt_topic)
def on_message(client, userdata, message):
# Callback when a message is received
data = message.payload.decode()
print(f'Received data: {data}')
# Extract temperature value
temperature_match = re.search(r"Temperature:\s+(\d+\.\d+)", data)
if temperature_match:
temperature = float(temperature_match.group(1))
print(f"Extracted temperature: {temperature}°C")
# Extract humidity value
humidity_match = re.search(r"Humidity:\s+(\d+\.\d+)", data)
if humidity_match:
humidity = float(humidity_match.group(1))
print(f"Extracted humidity: {humidity}%")
# Extract soil moisture value
soil_moisture_match = re.search(r"Soil Moisture:\s+(\d+\.\d+)", data)
if soil_moisture_match:
soil_moisture = float(soil_moisture_match.group(1))
print(f"Extracted soil moisture: {soil_moisture}%")
# Create and configure the MQTT client
mqtt_client = mqtt.Client()
mqtt_client.on_connect = on_connect
mqtt_client.on_message = on_message
# Connect to the MQTT broker and start the loop
mqtt_client.connect(mqtt_broker, mqtt_port, 60)
mqtt_client.loop_start()
try:
# Keep the script running
while True:
# You can do other tasks here
pass
except KeyboardInterrupt:
print("Exiting gracefully")
# Stop the loop before finishing
mqtt_client.loop_stop()
| pkeyur9978/IFN649_Tutorials | Soil_project/subscribe.py | subscribe.py | py | 1,650 | python | en | code | 0 | github-code | 13 |
20421032919 | # -*- coding: utf-8 -*-
"""
Created on Thu May 11 21:09:33 2023
@author: alexa
"""
import pygame
from settings import *
class Player(pygame.sprite.Sprite):
def __init__(self,pos,groups, obstacle_sprites):
super().__init__(groups)
self.image = pygame.image.load('C:/Users/alexa/OneDrive/Desktop/Python-RPG-Game/graphics/player/left_idle/idle_left.png').convert_alpha()
self.rect = self.image.get_rect(topleft = pos)
self.hitbox = self.rect.inflate(0,-26)
self.direction = pygame.math.Vector2()
self.speed = 5
self.obstacle_sprites =obstacle_sprites
def key_input(self):
keys = pygame.key.get_pressed()
# "UP" or "DOWN" key inputs, modify direction.y
if keys[pygame.K_UP]:
self.direction.y = -1
elif keys[pygame.K_DOWN]:
self.direction.y = 1
else:
self.direction.y = 0
# "RIGHT" or "LEFT" key inputs, modify direction.x
if keys[pygame.K_RIGHT]:
self.direction.x = 1
elif keys[pygame.K_LEFT]:
self.direction.x = -1
else:
self.direction.x = 0
def move(self, speed):
# Normalize angled movement so that it is not faster than horiz/vert
if self.direction.magnitude() != 0:
self.direction = self.direction.normalize()
# Adjust speed and set up collision
self.hitbox.x += self.direction.x*speed
self.collision('horizontal')
self.hitbox.y += self.direction.y*speed
self.collision('vertical')
self.rect.center = self.hitbox.center
def collision(self, direction):
if direction == 'horizontal':
for sprite in self.obstacle_sprites:
if sprite.hitbox.colliderect(self.hitbox):
# if moving right, prevent player from overlapping
if self.direction.x > 0:
self.hitbox.right = sprite.hitbox.left
# if moving left
if self.direction.x < 0:
self.hitbox.left = sprite.hitbox.right
if direction == 'vertical':
for sprite in self.obstacle_sprites:
if sprite.hitbox.colliderect(self.hitbox):
# if moving down
if self.direction.y > 0:
self.hitbox.bottom = sprite.hitbox.top
# if moving up
if self.direction.y < 0:
self.hitbox.top = sprite.hitbox.bottom
def update(self):
self.key_input()
self.move(self.speed) | alehic173/Python-RPG-Game | player.py | player.py | py | 2,799 | python | en | code | 0 | github-code | 13 |
9620750165 | # ================================== Imports =====================================
from tkinter import *
from tkinter import ttk
from tkinter import messagebox
# =============================== Window Setting ==================================
Window = Tk()
Window.title("Temp Converter")
Window.geometry("520x309+300+115")
Window.resizable(False, False)
# ================================= Variables =====================================
Faren_Input = IntVar()
Cels_Input = IntVar()
Cels_Result = StringVar()
Farn_Result = StringVar()
bg_Color = "brown"
fg_Color = "white"
Font = "Arial"
# ================================= Functions =====================================
def Error_Box():
messagebox.showerror("Invalid Value", "You Must Input A Number . . . !")
def ClearForm():
Clear_Error_CelsForm()
Clear_Error_FarenForm()
def Clear_Error_CelsForm():
global Zero, Clear
Clear = ''
Zero = 0
Farn_Result.set(Clear)
Faren_Input.set(Zero)
def Clear_Error_FarenForm():
Cels_Result.set(Clear)
Cels_Input.set(Zero)
def Convert_To_Cels():
try:
Farn_Result.set(round((((float(Faren_Input.get()))*5/9)-(160/9)), 2))
except:
Error_Box()
Clear_Error_CelsForm()
raise ValueError # This line is not in first and second ".exe" file
def Convert_To_Farn():
try:
Cels_Result.set(round((float(Cels_Input.get()))*9/5+32, 2))
except:
Error_Box()
Clear_Error_FarenForm()
raise ValueError # This line is not in first and second ".exe" file
def Confirm_Box():
Confirm_Box = messagebox.askquestion(
"Confirm", "Are You Sure Do You Want To Exit?")
if Confirm_Box == "yes":
Window.destroy()
# =================================== Labels =====================================
WelcomeLabel = Label(Window, text="Welcome To My Application", font=(19))
WelcomeLabel.grid(row=0, column=1, pady=(7, 7))
F1 = Label(Window, text="Fahrenheit Temperature : ")
F1.grid(row=1, column=0, padx=(7, 0), pady=(3, 0))
C1 = Label(Window, text="Celsius Temperature : ")
C1.grid(row=1, column=2, pady=(3, 0))
F_Res = Label(Window, text="Your Result In Celsius : ")
F_Res.grid(row=4, column=0, pady=(10, 0))
C_Res = Label(Window, text="Your Result In Fahrenheit : ")
C_Res.grid(row=4, column=2, pady=(10, 0))
CopyRight = Label(Window, text="""
©2020 Esfandiar Kiani, All rights reserved.
""")
CopyRight.grid(row=7, column=1)
# =================================== Buttons ====================================
To_C_Btn = ttk.Button(Window, text="Convert To Celsius",
cursor="hand2", command=Convert_To_Cels)
To_C_Btn.grid(row=3, column=0, pady=(7, 7))
To_F_Btn = ttk.Button(Window, text="Convert To Fahrenheit",
cursor="hand2", command=Convert_To_Farn)
To_F_Btn.grid(row=3, column=2, pady=(7, 7))
Clear_Btn = Button(Window, width=8, text="Clear All",
bd=4, font=(10), command=ClearForm)
Clear_Btn.grid(row=3, column=1, pady=(7, 7))
Exit_Btn = Button(Window, width=8, text="Exit", bd=3,
bg=bg_Color, fg=fg_Color, command=Confirm_Box)
Exit_Btn.grid(row=6, column=2, pady=(15, 7))
# =================================== Entries ====================================
F1 = Entry(Window, width=12, textvariable=Faren_Input,
bd=3, font=(Font, 15))
F1.grid(row=2, column=0, padx=(3, 0), pady=(0, 12))
Res_F = Entry(Window, width=12, textvariable=Farn_Result,
bd=3, font=(Font, 15))
Res_F.grid(row=5, column=0, pady=(0, 10))
C1 = Entry(Window, width=12, textvariable=Cels_Input, bd=3, font=(Font, 15))
C1.grid(row=2, column=2, padx=(3, 0), pady=(0, 12))
Res_C = Entry(Window, width=12, textvariable=Cels_Result,
bd=3, font=(Font, 15))
Res_C.grid(row=5, column=2, pady=(0, 10))
Window.mainloop()
| Es-Kiani/Temperature-Converter | My Temp Converter Dev.py | My Temp Converter Dev.py | py | 3,984 | python | en | code | 0 | github-code | 13 |
45022384874 | import urllib.request
import os
from bs4 import BeautifulSoup
from selenium import webdriver
import time
from multiprocessing import Pool
import csv
urls = {}
with open('boards.csv', newline='', encoding="ISO-8859-1") as f:
reader = csv.reader(f)
for row in reader:
url = []
url.append(row[1] + row[2])
url.append(row[1] + row[3])
url.append(row[1] + row[4])
url.append(row[1] + row[5])
urls[row[0]] = url
lofu = list(urls.keys())
ucount = 0
def createdir(dirname):
os.makedirs(dirname)
def savetrainImages(username, boardname, num, link):
datapath = 'F:/Data/Train'
userpath = datapath + '/' + username
bpath = userpath + '/' + boardname
if not os.path.exists(userpath):
createdir(userpath)
if not os.path.exists(bpath):
createdir(bpath)
i = str(num) + '.jpg'
path = bpath + '/' + i
try:
urllib.request.urlretrieve(link, path)
except Exception:
pass
def savetestImages(username, boardname, num, link):
datapath = 'F:/Data/Test'
userpath = datapath + '/' + username
bpath = userpath + '/' + boardname
if not os.path.exists(userpath):
createdir(userpath)
if not os.path.exists(bpath):
createdir(bpath)
i = str(num) + '.jpg'
path = bpath + '/' + i
try:
urllib.request.urlretrieve(link, path)
except Exception:
pass
def saveCVImages(username, boardname, num, link):
datapath = 'F:/Data/CrossValidation'
userpath = datapath + '/' + username
bpath = userpath + '/' + boardname
if not os.path.exists(userpath):
createdir(userpath)
if not os.path.exists(bpath):
createdir(bpath)
i = str(num) + '.jpg'
path = bpath + '/' + i
try:
urllib.request.urlretrieve(link, path)
except Exception:
pass
def url_scrp(links, uname):
for link in links:
count = 0
#uname = lofu[ucount]
driver = webdriver.Chrome()
driver.set_window_position(-2000, 0)
driver.get(link)
for i in range(0,5):
driver.execute_script("window.scrollTo(0, document.body.scrollHeight);")
time.sleep(5)
page_source = driver.page_source
soup = BeautifulSoup(page_source)
imglinks = soup.find_all('img')
bname = link.split('/')[-1]
driver.close()
if len(imglinks) > 63:
for i in imglinks[:50]:
count = count + 1
savetrainImages(uname, bname, count, i['src'])
for i in imglinks[50:56]:
count = count + 1
savetestImages(uname, bname, count, i['src'])
for i in imglinks[56:62]:
count = count + 1
saveCVImages(uname, bname, count, i['src'])
if __name__ == '__main__':
#p = Pool(5)
for i in lofu:
#p.map(url_scrp, urls[i])
url_scrp(urls[i], i)
| Sachin-Ramesh10/Image-Recommendation-System | Crawling/SaveImagesCrawl.py | SaveImagesCrawl.py | py | 2,953 | python | en | code | 0 | github-code | 13 |
3086277385 | from keras.models import Sequential
from keras.layers import Activation
from keras.optimizers import SGD
from keras.layers import Dense
from keras.layers import Dropout
from keras.constraints import maxnorm
from sklearn.metrics import make_scorer
from scipy.stats import spearmanr
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import train_test_split
from keras.utils import np_utils
from matplotlib import pyplot as plt
from keras.wrappers.scikit_learn import KerasClassifier
import scipy
from imutils import paths
import numpy as np
import argparse
import cv2
import os
def spearman(ground_truth, predictions):
return spearmanr(ground_truth, predictions)[0]
class LearnNN():
@staticmethod
def create_model(optimizer='adam', learn_rate=0.01, momentum=0, init_mode_1='uniform', init_mode_2='uniform',
init_mode_3='uniform', activation_1='relu', activation_2='relu', activation_3='relu',
dropout_rate_1=0.0, dropout_rate_2=0.0, weight_constraint_1=0, weight_constraint_2=0,
neurons_1=100, neurons_2=100):
model = Sequential()
model.add(Dense(neurons_1, input_dim=300, kernel_initializer=init_mode_1, activation=activation_1,
kernel_constraint=maxnorm(weight_constraint_1)))
model.add(Dropout(dropout_rate_1))
model.add(Dense(neurons_2, input_dim=neurons_1, kernel_initializer=init_mode_2, activation=activation_2,
kernel_constraint=maxnorm(weight_constraint_2)))
model.add(Dropout(dropout_rate_2))
model.add(Dense(6, kernel_initializer=init_mode_3, activation=activation_3))
sgd = SGD(momentum=momentum, lr=learn_rate)
model.compile(loss="binary_crossentropy", optimizer=optimizer, metrics=["accuracy"])
return model
def __init__(self, train_data, test_data, optimizer='adam', learn_rate=0.01, momentum=0, init_mode_1='uniform', init_mode_2='uniform',
init_mode_3='uniform', activation_1='relu', activation_2='relu', activation_3='relu',
dropout_rate_1=0.0, dropout_rate_2=0.0, weight_constraint_1=0, weight_constraint_2=0,
neurons_1=100, neurons_2=100):
self.X_train = train_data.drop(['Validation Scores'], axis=1)
self.y_train = np_utils.to_categorical(train_data['Validation Scores'], 6)
self.X_test = test_data.drop(['Validation Scores'], axis=1)
self.y_test = test_data['Validation Scores']
self.model = LearnNN.create_model(optimizer, learn_rate, momentum, init_mode_1, init_mode_2,
init_mode_3, activation_1, activation_2, activation_3,
dropout_rate_1, dropout_rate_2, weight_constraint_1, weight_constraint_2,
neurons_1, neurons_2)
self.scoring = make_scorer(spearman, greater_is_better=True)
def tune(self, grid, redefine=False):
cv_model = KerasClassifier(build_fn=LearnNN.create_model, verbose=0)
grid = GridSearchCV(estimator=cv_model, param_grid=grid, n_jobs=-1, verbose=1)
grid_result = grid.fit(self.X_train, self.y_train)
print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_))
means = grid_result.cv_results_['mean_test_score']
stds = grid_result.cv_results_['std_test_score']
params = grid_result.cv_results_['params']
for mean, stdev, param in zip(means, stds, params):
print("%f (%f) with: %r" % (mean, stdev, param))
def predict(self):
self.model.fit(self.X_train, self.y_train, batch_size=128, epochs=50, verbose=1)
preds = []
for num, compound in enumerate(self.X_test.as_matrix()):
compound = compound.reshape(1, -1)
probs = self.model.predict(compound)[0]
# print(probs)
prediction = probs.argmax(axis=0)
# print(prediction, labels_test[num])
preds.append(prediction)
print(preds)
print(self.y_test.tolist())
print('Spearman coef:', scipy.stats.spearmanr(preds, self.y_test.tolist())[0])
| uhh-lt/poincare | Supervised Evaluation/learners_NN.py | learners_NN.py | py | 4,202 | python | en | code | 0 | github-code | 13 |
14734327185 | '''
217. Contains Duplicate
Solution
'''
class Solution:
def containsDuplicate(self, nums: List[int]) -> bool:
hm = {}
for i in nums:
if hm.get(i) == None:
hm[i] = 1
else:
hm[i] += 1
if hm.get(i) > 1:
return True
return False | messiel12pr/LeetCode | Python/Easy/Contains_Duplicate.py | Contains_Duplicate.py | py | 353 | python | en | code | 1 | github-code | 13 |
29320825183 | import logging
import random
from signal import SIGHUP, SIGINT, SIGTERM, signal
from threading import Event, Thread
from typing import List
from rich.logging import RichHandler
from techflurry.controller.mqtt_client import MQTTClient
log = logging.getLogger(__name__)
FORMAT = "%(message)s"
logging.basicConfig(
level="NOTSET", format=FORMAT, datefmt="[%X]", handlers=[RichHandler()]
)
THREADS: List[Thread] = []
STOP_EVENT: Event = Event()
MQTT_CLIENT: MQTTClient = MQTTClient(topic="test/#")
def graceful_exit():
log.debug("Graceful shutdown")
STOP_EVENT.set()
MQTT_CLIENT.shutdown()
stop_sensors()
def signal_catcher(signal_number, frame):
if signal_number == SIGTERM:
log.info("SIGTERM received! Quitting.")
graceful_exit()
if signal_number == SIGHUP:
log.info("SIGHUP received. Restarting.")
if signal_number == SIGINT:
log.info("SIGINT received. Quitting.")
graceful_exit()
def stop_sensors():
for thread in THREADS:
log.debug("Stopping thread '%s'", thread.name)
thread.join()
log.debug("All sensors stopped")
def start_sensor(
exit_event: Thread,
mqtt_topic: str = None,
payload_min: int = 6,
payload_max: int = 10,
):
client = MQTTClient()
client.safe_connect("localhost")
while not exit_event.is_set():
payload = random.uniform(payload_min, payload_max)
client.publish(topic=mqtt_topic, payload=payload)
exit_event.wait(5)
if __name__ == "__main__":
signal(SIGTERM, signal_catcher)
signal(SIGHUP, signal_catcher)
signal(SIGINT, signal_catcher)
for i in range(1, 6):
log.debug("Starting thread 'Thread-%d'", i)
thread = Thread(
target=start_sensor,
args=(
STOP_EVENT,
f"test/topic-{i}",
5 * i,
10 * i,
),
)
thread.start()
THREADS.append(thread)
# both threads completely executed
MQTT_CLIENT.safe_connect("localhost")
MQTT_CLIENT.loop_start()
# graceful_exit()
# mqtt_topic = "test/topic"
# client = MQTTClient()
# client.safe_connect("localhost")
# while True:
# payload = random.uniform(6, 9)
# client.publish(topic=mqtt_topic, payload=payload, qos=0, retain=False)
# log.info("Publishing to '%s' with payload %.2f ", mqtt_topic, payload)
# time.sleep(5)
| atraides/techflurry-controller | examples/mqtt_threaded.py | mqtt_threaded.py | py | 2,422 | python | en | code | 0 | github-code | 13 |
43103613152 | '''
Created on Jul 12, 2017
@author: xgo
'''
import sys
import random
def sample_protein_database(filename_str: str, sample_rate_float: float, output_file_str: str) -> None:
keep_sequence_bool = False
with open(output_file_str, 'w') as fw:
with open(filename_str, 'r') as fr:
for line_str in fr:
if line_str.startswith('>'):
if random.random() < sample_rate_float:
keep_sequence_bool = True
fw.write(line_str)
else:
keep_sequence_bool = False
else:
if keep_sequence_bool == True:
fw.write(line_str)
print('sample_protein_database is done.')
def main():
filename_str = '/media/xgo/Seagate/Proteomics/Data/Ecoli/Ecoli_K12_MG1655.fasta'
sample_rate_float = 0.5
output_file_str = '/media/xgo/Seagate/Proteomics/Experiments/SiprosEnsemble/Ecoli/Data/DB/Ecoli_K12_MG1655_sample_0.5.fasta'
sample_protein_database(filename_str, sample_rate_float, output_file_str)
if __name__ == '__main__':
sys.exit(main()) | guo-xuan/SiprosBenchmark | src/sample_proteins.py | sample_proteins.py | py | 1,159 | python | en | code | 0 | github-code | 13 |
41130624683 | import pygame
class ScoreStage:
def __init__(self, screen : pygame.Surface, x : int, y: int ,score: int)-> None:
"""
Clase que representa el puntaje en un escenario del juego.
Recibe:
Args:
screen (pygame.Surface): Superficie de la pantalla del juego.
x (int): Posición en el eje x del puntaje.
y (int): Posición en el eje y del puntaje.
score (int): Puntaje actual.
Devuelve: None
"""
self.type_fuente = "DSEG"
self.tamanio_fuente = 55
self.color_texto = (255, 60, 60)
self.fuente = pygame.font.SysFont(self.type_fuente, self.tamanio_fuente)
self.texto_score = "Score: "
self.score_text = self.fuente.render(self.texto_score, True, self.color_texto)
self.game_over = False
self.screen = screen
self.x = x
self.y = y
self.elapsed_time = 0
self.score = score
def draw_score(self)-> None:
"""
Dibuja el puntaje en la pantalla.
Recibe. None
Devuelve. None
"""
self.score_text = self.fuente.render("Score: {0}".format(self.score), True, self.color_texto)
self.screen.blit(self.score_text, (self.x, self.y))
def update_score(self)-> None:
"""
Actualiza el puntaje en la pantalla.
Recibe: None
Devuelve: None
"""
self.draw_score()
| HoracioxBarrios/mi_juego_final_limpio | class_score.py | class_score.py | py | 1,469 | python | es | code | 2 | github-code | 13 |
2255836727 | from hyggepowermeter.services.mqtt.mqtt_base_client import MQTTClient
from hyggepowermeter.services.mqtt.topics.topics_factory import TopicFactory
from hyggepowermeter.utils.logger import logger
class EnergySubscriberClient(MQTTClient):
def on_message(self, _, __, msg):
try:
topic = TopicFactory.get_topic_subscriber(msg.topic)
topic.do_action(msg, self._db_client)
except BaseException as err:
logger.exception(str(err))
def __init__(self, config, power_meter_db):
super().__init__(config.mqtt)
self.client.on_message = self.on_message
self.client_id = config.mqtt.id
self._db_client = power_meter_db
self.config = config
| julianhygge/hygge-power-meter-back | hyggepowermeter/services/mqtt/subscriber_client.py | subscriber_client.py | py | 727 | python | en | code | 0 | github-code | 13 |
2074565697 | import datetime
import asyncio
import functools
from .log import *
from .cli import *
from .token import *
def aslist(x):
return x if isinstance(x, (list, tuple)) else [x] if x is not None else []
# data parsing
def pack_entries(data: list, sid=None, ts=None) -> tuple:
'''Pack multiple byte objects into a single bytearray with numeric offsets.'''
entries = bytearray()
offsets = []
offset = 0
for d in aslist(data):
offset += len(d)
offsets.append(offset)
entries += d
if sid:
sid = aslist(sid)
ts = ts and aslist(ts)
assert len(sid) == len(offsets) and (not ts or len(ts) == len(offsets)), (len(offsets), len(sid), len(ts))
offsets = list(zip(sid, ts, offsets)) if ts else list(zip(sid, offsets))
return offsets, entries
def unpack_entries(offsets: list, content: bytes) -> list:
'''Unpack a single bytearray with numeric offsets into multiple byte objects.'''
entries = []
for (sid, ts, i), (_, _, j) in zip(offsets, offsets[1:] + [(None, None, None)]):
entries.append((sid, ts, content[i:j]))
return entries
def parse_time(tid: str):
'''Convert a redis timestamp to a datetime object.'''
return datetime.datetime.fromtimestamp(parse_epoch_time(tid))
def parse_epoch_time(tid: str):
'''Convert a redis timestamp to epoch seconds.'''
return int(tid.split('-')[0])/1000
ts2datetime = parse_time # deprecated
def format_time(dt: datetime.datetime):
return format_epoch_time(dt.timestamp())
def format_epoch_time(tid: float):
return f'{int(tid * 1000)}-0'
# misc
def filternone(d: dict):
'''Filter None values from a dictionary. Useful for updating only a few fields.'''
if isinstance(d, dict):
return {k: v for k, v in d.items() if v is not None}
return d
def interruptable(func):
@functools.wraps(func)
def wrap(*a, **kw):
try:
return func(*a, **kw)
except KeyboardInterrupt:
print('\nInterrupted.')
return wrap
# asyncio
def async2sync(func):
'''Wraps an async function with a synchronous call.'''
@functools.wraps(func)
def sync(*a, **kw):
return asyncio.run(func(*a, **kw))
sync.asyncio = func
return sync
def async_run_safe(future):
loop = asyncio.get_event_loop()
# import signal
# def ask_exit(signame, loop):
# print("got signal %s: exit" % signame)
# loop.stop()
# for signame in {'SIGINT', 'SIGTERM'}:
# loop.add_signal_handler(getattr(signal, signame), functools.partial(ask_exit, signame, loop))
task = asyncio.ensure_future(future)
try:
return loop.run_until_complete(task)
except KeyboardInterrupt:
print('Interrupted asyncio loop')
task.cancel()
loop.run_forever()
task.exception()
raise
finally:
loop.close()
async def async_first_done(*unfinished):
'''Returns when the first task finishes and cancels the rest.
This is used when both sending and receiving data and you interrupt one of them, they should all exit.
'''
finished, unfinished = await asyncio.wait(unfinished, return_when=asyncio.FIRST_COMPLETED)
try:
return next((x for x in (t.result() for t in finished) if x is not None), None)
finally:
for task in unfinished:
task.cancel()
await asyncio.wait(unfinished)
def pretty_bytes(b, scale=1000, names=['b', 'kb', 'mb', 'gb', 'tb']):
return next((
f'{b / (scale**i):.1f}{n}'
for i, n in enumerate(names)
if b / (scale**(i+1)) < 1
),
f'{b / (scale**(len(names)-1))}{names[-1]}')
| VIDA-NYU/ptgctl | ptgctl/util/__init__.py | __init__.py | py | 3,709 | python | en | code | 0 | github-code | 13 |
13667860633 | from __future__ import absolute_import
from __future__ import print_function
import numpy as np
from keras.models import Model, Sequential
from keras.layers import Input, Flatten, Dense, Dropout, Lambda, Conv2D, MaxPooling2D, TimeDistributed, LSTM, Conv1D
from keras.optimizers import RMSprop
from keras import backend as K
from keras.optimizers import SGD
import re
import os.path
import pandas as pd
import tensorflow as tf
import matplotlib.pyplot as plt
BATCH_START = 0
TIME_STEPS = 20
BATCH_SIZE = 50
INPUT_SIZE = 300
OUTPUT_SIZE = 1
CELL_SIZE = 10
LR = 0.006
class CNN:
@staticmethod
def build(input):
x = Conv2D(32, (3, 3), border_mode='same')(input)
x = Conv2D(64, (3, 3),strides=(4, 4))(x)
x = MaxPooling2D((2, 2))(x)
x = Conv2D(32, (3, 3), border_mode='same')(x)
x = Conv2D(30, (5, 1), strides=(5, 28), border_mode='same', activation='relu')(x)
x = Flatten()(x)
# x = Conv1D(100,
# kernel_size=80,
# strides=4,
# padding="same")(x)
# x = TimeDistributed(Dense(1, activation=None))(x)
return x
class kerasLSTM():
@staticmethod
def build(input):
x = LSTM(128, input_shape=(INPUT_SIZE, 1), return_sequences=True)(input)
# x = LSTM(64, return_sequences=True)(input)
# x = Dense(640)(input)
# x = Dense(300)(input)
# x = Dropout(0.5)
# x = Dense(300)(input)
out = TimeDistributed(Dense(1))(x)
# out = Dense(1)(x)
return out
class LSTMRNN(object):
def __init__(self, n_steps, input_size, output_size, cell_size, batch_size):
self.n_steps = n_steps
self.input_size = input_size
self.output_size = output_size
self.cell_size = cell_size
self.batch_size = batch_size
with tf.name_scope('inputs'):
self.xs = tf.placeholder(tf.float32, [None, n_steps, input_size], name='xs')
self.ys = tf.placeholder(tf.float32, [None, n_steps, output_size], name='ys')
with tf.variable_scope('in_hidden'):
self.add_input_layer()
with tf.variable_scope('LSTM_cell'):
self.add_cell()
with tf.variable_scope('out_hidden'):
self.add_output_layer()
with tf.name_scope('cost'):
self.compute_cost()
with tf.name_scope('train'):
self.train_op = tf.train.AdamOptimizer(LR).minimize(self.cost)
def add_input_layer(self,):
l_in_x = tf.reshape(self.xs, [-1, self.input_size], name='2_2D')
# (batch*n_step, in_size)
# # Ws (in_size, cell_size)
Ws_in = self._weight_variable([self.input_size, self.cell_size])
# bs (cell_size, )
bs_in = self._bias_variable([self.cell_size,])
# l_in_y = (batch * n_steps, cell_size)
with tf.name_scope('Wx_plus_b'):
l_in_y = tf.matmul(l_in_x, Ws_in) + bs_in
# reshape l_in_y ==> (batch, n_steps, cell_size)
self.l_in_y = tf.reshape(l_in_y, [-1, self.n_steps, self.cell_size], name='2_3D')
def add_cell(self):
lstm_cell = tf.contrib.rnn.BasicLSTMCell(self.cell_size, forget_bias=1.0, state_is_tuple=True)
with tf.name_scope('initial_state'):
self.cell_init_state = lstm_cell.zero_state(self.batch_size, dtype=tf.float32)
self.cell_outputs, self.cell_final_state = tf.nn.dynamic_rnn(
lstm_cell, self.l_in_y, initial_state=self.cell_init_state, time_major=False)
def add_output_layer(self):
# shape = (batch * steps, cell_size)
l_out_x = tf.reshape(self.cell_outputs, [-1, self.cell_size], name='2_2D')
Ws_out = self._weight_variable([self.cell_size, self.output_size])
bs_out = self._bias_variable([self.output_size, ])
# shape = (batch * steps, output_size)
with tf.name_scope('Wx_plus_b'):
self.pred = tf.matmul(l_out_x, Ws_out) + bs_out
def compute_cost(self):
losses = tf.contrib.legacy_seq2seq.sequence_loss_by_example(
[tf.reshape(self.pred, [-1], name='reshape_pred')],
[tf.reshape(self.ys, [-1], name='reshape_target')],
[tf.ones([self.batch_size * self.n_steps], dtype=tf.float32)],
average_across_timesteps=True,
softmax_loss_function=self.ms_error,
name='losses'
)
with tf.name_scope('average_cost'):
self.cost = tf.div(
tf.reduce_sum(losses, name='losses_sum'),
self.batch_size,
name='average_cost')
tf.summary.scalar('cost', self.cost)
@staticmethod
def ms_error(labels, logits):
return tf.square(tf.subtract(labels, logits))
def _weight_variable(self, shape, name='weights'):
initializer = tf.random_normal_initializer(mean=0., stddev=1.,)
return tf.get_variable(shape=shape, initializer=initializer, name=name)
def _bias_variable(self, shape, name='biases'):
initializer = tf.constant_initializer(0.1)
return tf.get_variable(name=name, shape=shape, initializer=initializer)
| Pykeeper/practice | path_learning_v2/path_learning/model_set.py | model_set.py | py | 5,858 | python | en | code | 0 | github-code | 13 |
72248889939 | #---------------------------------Hi Reader 👋------------------------------------------
# This project contains two projects , means ( Project-112 & Project-113)
#-------------------------------PROJECT - 112-----------------------------------------
import pandas as pd
import plotly.express as pe
import csv
import plotly.graph_objects as go
import statistics as st
import numpy as np
import plotly.figure_factory as pf
import random
#import seaborn as sns
data = pd.read_csv("data.csv")
savings = data["quant_saved"].tolist()
female = data["female"].tolist()
graph = pe.scatter(data , y = savings , color = female)
#graph.show()
with open("data.csv") as file :
read = csv.reader(file)
savingsData=list(read)
print(savingsData[0])
savingsData.pop(0)
totalFemales = 0
totalMale = 0
for i in savingsData:
if int(i[2]) == 1:
totalFemales = totalFemales + 1
else :
totalMale = totalMale + 1
graph = go.Figure(go.Bar(x = ["Females , Male"] , y = [totalFemales , totalMale]))
#graph.show()
#------------------------------Mean / Mode / Median of savings of all people----------------
mean = st.mean(savings)
mode = st.mode(savings)
median = st.median(savings)
print("---------------All People-----------------")
print("Mean is " , mean)
print("Mode is ", mode)
print("Median is ", median)
#------------------------------Mean / Mode / Median of female-------------------------------
females = []
males = []
for i in savingsData:
if int(i[2]) == 1:
females.append( float(i[0]) )
else :
males.append( float(i[0]) )
mean = st.mean(females)
mode = st.mode(females)
median = st.median(females)
print("---------------Only Females-----------------")
print("Mean is " , mean)
print("Mode is ", mode)
print("Median is ", median)
#------------------------------Mean / Mode / Median of female-------------------------------
mean = st.mean(males)
mode = st.mode(males)
median = st.median(males)
print("---------------Only Males-----------------")
print("Mean is " , mean)
print("Mode is ", mode)
print("Median is ", median)
#-------------------------------------Correlation--------------------------------------------
highSchoolCompletion = data["highschool_completed"].tolist()
correlation = np.corrcoef(highSchoolCompletion , savings)
print("Correlation is" , correlation[0,1])
#---------------------Not at all relation 🤣🤣----------------------------------------
graph = pf.create_distplot([savings] , ["Savings"] , show_hist=False)
#graph.show()
#----------------------------------The Happy End 😃---------------------------------------
#---------------------------------A New Start 😇-----------------------------------------
#--------------------------------PROJECT - 113-----------------------------------------
q1 = data["quant_saved"].quantile(0.25)
q3 = data["quant_saved"].quantile(0.75)
iqr = q3-q1
#------------------------------Project-113
print(q1,q3,iqr)
lowerWhisker = q1 - 1.5*iqr
upperWhisker = q3 + 1.5*iqr
print("Lower Whisker is ", lowerWhisker)
print("Upper Whisker is ", upperWhisker)
#---------------------------------Creating New Data -----------------------------------------------
newData = data[data["quant_saved"] < upperWhisker]
newSavings = newData["quant_saved"]
mean = st.mean(newSavings)
mode = st.mode(newSavings)
median = st.median(newSavings)
stdev = st.stdev(newSavings)
print("Mean of New Df is" , mean)
print("Mode of New Df is" , mode)
print("Median of New Df is" , median)
print("Stdev of New Df is" , stdev)
graph = pf.create_distplot( [newSavings] , ["Savings"] , show_hist=False )
#graph.show()
#----------------------------------Sampling 🧪🧪-------------------------------------
meanList=[]
for i in range(1000):
dataSet=[]
for i in range(100):
id = random.choice(newSavings)
dataSet.append(id)
meanList.append(st.mean(dataSet))
meanOfSample = st.mean(meanList)
stdevOfSample = st.stdev(meanList)
print("-------------------------------------------")
print("Mean of sample is" , meanOfSample)
print("Stdev of sample is" , stdevOfSample)
graph = pf.create_distplot( [meanList] , ["Savings"] , show_hist=False )
graph.show()
#----------------------------------Completed 🎊🎉--------------------------------------------------
| GargiJadhav/Project---112-113 | code.py | code.py | py | 4,490 | python | en | code | 0 | github-code | 13 |
37248575394 | from hashlib import new
from re import L
from this import d
import numpy as np
from collections import defaultdict, namedtuple
from network import Network
import scipy.stats as stats
from scipy import signal
import matplotlib.pyplot as plt
class Admissible_Path(object):
def __init__(self):
super(Admissible_Path, self).__init__()
class Algorithm(object):
def __init__(self, network, search_type):
super(Algorithm, self).__init__()
self.search_type = search_type
self.network = network
self.inf = 1e+8
# convoultion parameters: start, end, steps
self.start = -50
self.end = 50
self.dx = 0.01
self.grid = np.arange(self.start, self.end, self.dx)
self.Admissible_Path = namedtuple(
'Admissible_Path', ["pre_node", "pmf"])
def plot(self, F, conved_F):
fig, ax = plt.subplots(1, 1)
ax.plot(self.grid, F, 'g-', lw=3, alpha=0.6, label='F')
ax.plot(self.grid, conved_F, 'b-', lw=3,
alpha=0.6, label='conved_F')
plt.legend()
plt.show()
def determine_next_node(self):
# Dijkstra
if self.search_type == "BSFS":
comp = 1e+8
best_n = None
for n in self.seq_list:
if self.n_labels[n] <= comp:
comp = self.n_labels[n]
best_n = n
return best_n
# NOTE here I used a list to manipulate like "stack" and "heap"
# the computational efficieny is not good. you can improve it easily, e.g., a deque structure
# depth-first search
elif self.search_type == "DFS":
return self.seq_list[-1]
# breadth-first search
elif self.search_type == "BFS":
return self.seq_list[0]
def conv_link(self, f_pmf, link_info):
if len(f_pmf) == 1:
loc = int((f_pmf[0]-self.start) / self.dx)
f_pmf = signal.unit_impulse(self.grid.shape, loc)
if link_info.name == "deterministic":
loc = int((link_info.mean-self.start) / self.dx)
link_pmf = signal.unit_impulse(self.grid.shape, loc)
elif link_info.name == "gamma":
shape = 1 / (link_info.cv ** 2)
scale = link_info.mean / shape
gamma = stats.gamma(a=shape, scale=scale)
link_pmf = gamma.pdf(self.grid) * self.dx
conv_pmf = signal.fftconvolve(f_pmf, link_pmf, 'same')
conv_pmf = conv_pmf/sum(conv_pmf)
return conv_pmf
def comp_dominance(self, conved_pmf, j):
conved_F = np.cumsum(conved_pmf)
dominating_ps = []
drop_flag = False
if len(self.n_label_path_dict[j]) == 0:
return [], False
for p_idx, path in self.n_label_path_dict[j].items():
F = np.cumsum(path.pmf)
if np.all(F - conved_F > -0.001):
print("conved is dominated")
drop_flag = True
if np.all(conved_F - F > -0.001):
print("conved is dominating")
dominating_ps.append(p_idx)
self.plot(F, conved_F)
print("res:", dominating_ps, drop_flag)
return dominating_ps, drop_flag
def init_algo(self):
''' initialization '''
# source and destination are set to be 0
# maintain "node -> admissible paths"
self.n_label_path_dict = defaultdict(dict)
# pmf of the dummy path
loc = int((0-self.start) / self.dx)
pmf_ss = signal.unit_impulse(self.grid.shape, loc)
ad_path = self.Admissible_Path(None, pmf_ss)
self.n_label_path_dict[self.network.s] = {0: ad_path}
self.seq_list = [self.network.s] # starting from source node
def main_loop(self):
count = 0
self.visited_nodes = []
while len(self.seq_list) > 0:
print("-------------------", count, "-------------------")
# select the best node by current label
i = self.determine_next_node()
child_nodes = self.network.get_children(i)
print("scan list:", self.seq_list)
print("node i :", i)
print("child js:", child_nodes)
for j in child_nodes:
# if j in self.visited_nodes:
# continue
print("updating", j, "....")
link_info = self.network.link_info_dict[(i, j)]
for _, existing_path in self.n_label_path_dict[i].items():
conved_pmf = self.conv_link(existing_path.pmf, link_info)
dominating_ps, drop_flag = self.comp_dominance(
conved_pmf, j)
# if k is not dominated by any exisiting path
# add it
if not drop_flag:
# delete the path dominated by k
for p in dominating_ps:
self.n_label_path_dict[j].pop(p, None)
# add k to "Gamma" set
new_p = self.Admissible_Path(
pre_node=i, pmf=conved_pmf)
if len(self.n_label_path_dict[j]) == 0:
self.n_label_path_dict[j][0] = new_p
else:
exist_max = max(self.n_label_path_dict[j])
self.n_label_path_dict[j][exist_max+1] = new_p
# update scan node list
# if j not in self.seq_list and j is not self.network.d:
if j not in self.seq_list:
self.seq_list.append(j)
self.seq_list.remove(i)
self.visited_nodes.append(i)
count += 1
# if count >= 10:
# break
def retrieve_res(self):
for node, path_dict in self.n_label_path_dict.items():
print('----------', node, "-----------")
for _, path in path_dict.items():
print(path.pre_node)
# node = self.network.d
# while node is not None:
# print(node)
# for _, path in self.n_label_path_dict[node].items():
# if path.pre_node != self.network.s:
# node = path.pre_node
# else:
# node = None
# for node, paths in self.n_label_path_dict.items():
# print(node, len(paths))
if __name__ == "__main__":
# network = Network("pentagram", source=1, destination=4)
network = Network("Braess", source=0, destination=3)
# "best-first search" is not defined for random travel time
# search_type = "BSFS"
search_type = "DFS"
# search_type = "BFS"
algo = Algorithm(network, search_type)
algo.init_algo()
algo.main_loop()
algo.retrieve_res()
| Minyu-Shen/SD_shortest_path | main.py | main.py | py | 6,902 | python | en | code | 0 | github-code | 13 |
15470330019 |
from classes import th, pi, os, time #Modules
from classes import low, high, dev_pins, music_path, water_v, play_v, device, operation
from classes import er_pin, u_e, o_e, o_f #Variables
class switch():
def __init__(self, name):
global dev_pins
self.name = name
print(self.name)
self.pin = dev_pins[name]
print(self.pin)
self.__status = get()
def get(self, g_pin):
return pi.digitalRead(g_pin) #get pin status(low/high)
def toggle(self, state, t_pin):
global low, high
i = low
if state:
i = high
pi.digitalWrite(pin, i) #make pin high(1)
if get(t_pin) == i:
self.__status = i
class fan(switch):
__f_state = 0
def __init__(self, name):
global dev_pins
self.name = name
self.l_bit = dev_pins[name][0]
self.h_bit = dev_pins[name][1]
def fan_d(self):
global low, high, device, operation
fan_ops = {False:0, True:1, "up":2, "down":3}
try:
var = fan_ops[operation]
except KeyError:
var = 4
if __f_state == var:
return True
elif var == 1:
if not __f_state:
# toggle(high, self.l_bit) #fan speed
toggle(high, self.h_bit)
__f_state = var
elif not var:
toggle(low, self.l_bit)
toggle(low, self.h_bit)
__f_state = var
elif var == 2:
if get(self.l_bit) and (not get(self.h_bit)):
toggle(high, self.h_bit)
toggle(low, self.l_bit)
elif (not get(self.l_bit)) and get(self.h_bit):
toggle(high, self.l_bit)
elif not (get(self.l_bit) and get(self.h_bit)):
toggle(high, self.l_bit)
toggle(low, self.h_bit)
__f_state = var
elif var == 3:
if get(self.l_bit) and get(self.h_bit):
toggle(low, self.l_bit)
elif (not get(self.l_bit)) and get(self.h_bit):
toggle(low, self.h_bit)
toggle(high, self.l_bit)
elif get(self.l_bit) and (not get(self.h_bit)):
toggle(low, self.l_bit)
__f_state = var
elif var == 4:
if not __f_state:
# toggle(high, self.l_bit) #fan speed
toggle(high, self.h_bit)
__f_state = high
var = high
else:
toggle(low, self.l_bit)
toggle(low, self.h_bit)
__f_state = low
var = low
if __f_state != var:
log("Executing the command\n\t"+str(device+" "+operation)+".").start()
return True
class play(th.Thread):
def __init__(self, dir_n):
th.Thread.__init__(self)
self.dir_name = dir_n
def run(self):
play_d(self.dir_name)
def play_d(dir_name):
global music_path
file_list = []
check_name = re.compile('.*'+dir_name+'.*', re.I)
for root, dirs, files in os.walk(music_path):
found = filter(lambda x: (check_name.match(x)), dirs)
if found:
path = os.path.join(root, found.pop())
file_list = os.listdir(path)
break
if file_list:
for i in file_list:
s_cmd = "vlc" + path + '/' + i
print(s_cmd)
# os.system(s_cmd)
else:
log("Album not found with name \""+dir_name+"\".").start()
return
def error_led():
global low, high, er_pin
pi.digitalWrite(er_pin, low) #make pin 13 low
for i in range(0, 3): #blink LED 3 times
time.sleep(0.5) #delay 0.5sec
pi.digitalWrite(er_pin, high) #make pin high
time.sleep(0.5)
pi.digitalWrite(er_pin, low) #make pin low
class log(th.Thread):
def __init__(self, error):
self.error = error
self.log_d()
def run(self):
error_led()
def log_d(self):
t_data = time.localtime()
with open("Err_log.txt", 'a') as f:
f.write("Date & Time: "+str(t_data[2])+"-"+ str(t_data[1])+ ", "+
str(t_data[3])+ ":"+ str(t_data[4])+ ":"+ str(t_data[5])+"\n\tError: "+self.error+"\n")
def water_d(sw):
global water_v, low, high, u_e, o_e, o_f
water_v = high
while water_v:
if ((not sw.get(o_e)) and sw.get(u_e)) or sw.get():
if not sw.get():
sw.toggle(high) #turn on
while sw.get(u_e) and water_v:
if sw.get(o_f):
break
elif not sw.get():
break
time.sleep(5) #5 seconds
if sw.get():
sw.toggle(low) #turn off
time.sleep(300) #5 minutes
class water(th.Thread, switch):
def __init__(self, pin):
th.Thread.__init__(self)
self.pin = pin
def run(self):
water_d(self)
def motor():
global water_v, low
if not water_v:
w = water() #create water thread
w.Start()
time.sleep(2)
if not w.isAlive:
water_v = low
log("Unable to start water thread.").start()
| rushendranadh/Jarvis | jun_30/new_file.py | new_file.py | py | 4,238 | python | en | code | 1 | github-code | 13 |
31145422596 | import os
import numpy
import module3d
import exportutils
import log
class CProxyRefVert:
def __init__(self, parent, scale):
self._parent = parent
self._scale = scale
def fromSingle(self, words, vnum, proxy):
self._exact = True
v0 = int(words[0])
self._verts = (v0,v0,v0)
self._weights = (1,0,0)
self._offset = numpy.array((0,0,0), float)
self.addProxyVertWeight(proxy, v0, vnum, 1)
return self
def fromTriple(self, words, vnum, proxy):
self._exact = False
v0 = int(words[0])
v1 = int(words[1])
v2 = int(words[2])
w0 = float(words[3])
w1 = float(words[4])
w2 = float(words[5])
if len(words) > 6:
d0 = float(words[6])
d1 = float(words[7])
d2 = float(words[8])
else:
(d0,d1,d2) = (0,0,0)
self._verts = (v0,v1,v2)
self._weights = (w0,w1,w2)
self._offset = numpy.array((d0,d1,d2), float)
self.addProxyVertWeight(proxy, v0, vnum, w0)
self.addProxyVertWeight(proxy, v1, vnum, w1)
self.addProxyVertWeight(proxy, v2, vnum, w2)
return self
def addProxyVertWeight(self, proxy, v, pv, w):
try:
proxy.vertWeights[v].append((pv, w))
except KeyError:
proxy.vertWeights[v] = [(pv,w)]
return
def getHumanVerts(self):
return self._verts
def getWeights(self):
return self._weights
def getOffset(self):
return self._offset
def getCoord(self):
rv0,rv1,rv2 = self._verts
v0 = self._parent.coord[rv0]
v1 = self._parent.coord[rv1]
v2 = self._parent.coord[rv2]
w0,w1,w2 = self._weights
return (w0*v0 + w1*v1 + w2*v2 + self._scale*self._offset)
#
# class CProxy
#
class CProxy:
def __init__(self, file, typ, layer):
self.name = None
self.type = typ
self.file = file
self.uuid = None
self.basemesh = "alpha_7"
self.tags = []
self.vertWeights = {} # (proxy-vert, weight) list for each parent vert
self.refVerts = []
self.xScaleData = None
self.yScaleData = None
self.zScaleData = None
self.z_depth = 50
self.cull = False
self.transparent = False
self.layer = layer
self.material = CMaterial()
self.faces = []
self.texFaces = []
self.texVerts = []
self.texFacesLayers = {}
self.texVertsLayers = {}
self.useBaseMaterials = False
self.faceNumbers = []
self.rig = None
self.mask = None
self.texture = None
self.specular = None
self.bump = None
self.normal = None
self.displacement = None
self.transparency = None
self.specularStrength = 1.0
self.bumpStrength = 1.0
self.normalStrength = 1.0
self.dispStrength = 0.2
self.obj_file = None
self.material_file = None
self.maskLayer = -1
self.textureLayer = 0
self.objFileLayer = 0
self.uvtexLayerName = {0 : "UVTex"}
self.materials = []
self.constraints = []
self.neighbors = {}
self.deleteGroups = []
self.deleteVerts = None
self.wire = False
self.cage = False
self.modifiers = []
self.shapekeys = []
self.weights = None
self.clothings = []
self.transparencies = dict()
self.textures = []
return
def __repr__(self):
return ("<CProxy %s %s %s %s>" % (self.name, self.type, self.file, self.uuid))
def update(self, obj):
coords = [refVert.getCoord() for refVert in self.refVerts]
obj.changeCoords(coords)
def getUuid(self):
if self.uuid:
return self.uuid
else:
return self.name
#
# classes CMaterial, CTexture
#
class CTexture:
def __init__(self, fname):
self.file = fname
self.types = []
class CMaterial:
def __init__(self):
self.name = None
self.settings = []
self.textureSettings = []
self.mtexSettings = []
self.diffuse_color = (0.8,0.8,0.8)
self.diffuse_intensity = 0.8
self.specular_color = (1,1,1)
self.specular_intensity = 0.1
self.specular_hardness = 25
self.transparency = 1
self.translucency = 0.0
self.ambient_color = (0,0,0)
self.emit_color = (0,0,0)
self.use_transparency = False
self.alpha = 1
self.textures = []
#
# class CMeshInfo:
#
class CMeshInfo:
def __init__(self, name):
self.name = name
self.object = None
self.weights = {}
self.shapes = []
self.vertexMask = None
self.faceMask = None
self.vertexMapping = None # Maps vertex index of original object to the attached filtered object
def fromProxy(self, coords, texVerts, faceVerts, faceUvs, weights, shapes):
obj = self.object = module3d.Object3D(self.name)
obj.setCoords(coords)
obj.setUVs(texVerts)
for fv in faceVerts:
if len(fv) != 4:
raise NameError("Mesh %s has non-quad faces and can not be handled by MakeHuman" % self.name)
obj.createFaceGroup("Full Object")
obj.setFaces(faceVerts, faceUvs)
self.weights = weights
self.shapes = shapes
return self
def fromObject(self, object3d, weights, shapes):
self.object = object3d
self.name = object3d.name
self.weights = weights
self.shapes = shapes
return self
def __repr__(self):
return ("<CMeshInfo %s w %d t %d>" % (self.object, len(self.weights), len(self.shapes)))
def getFileName(folder, file, suffix):
folder = os.path.realpath(os.path.expanduser(folder))
(name, ext) = os.path.split(file)
if ext:
return (folder, file)
else:
return (folder, file+suffix)
#
# readProxyFile(obj, file, evalOnLoad=False, scale=1.0):
#
doFaces = 2
doMaterial = 3
doTexVerts = 4
doObjData = 5
doWeights = 6
doRefVerts = 7
doFaceNumbers = 8
doTexFaces = 9
doDeleteVerts = 10
def readProxyFile(obj, file, evalOnLoad=False, scale=1.0):
if not file:
return CProxy(None, 'Proxy', 2)
elif isinstance(file, basestring):
pfile = exportutils.config.CProxyFile()
pfile.file = file
else:
pfile = file
#print "Loading", pfile
folder = os.path.dirname(pfile.file)
objfile = None
try:
tmpl = open(pfile.file, "rU")
except:
tmpl = None
if tmpl == None:
log.error("*** Cannot open %s", pfile.file)
return None
return CProxy(None, proxy.type, pfile.layer)
locations = {}
tails = {}
proxy = CProxy(pfile.file, pfile.type, pfile.layer)
proxy.deleteVerts = numpy.zeros(len(obj.coord), bool)
proxy.name = "MyProxy"
useProjection = True
ignoreOffset = False
scales = numpy.array((1.0,1.0,1.0), float)
status = 0
vnum = 0
for line in tmpl:
words= line.split()
if len(words) == 0:
pass
elif words[0] == '#':
theGroup = None
if len(words) == 1:
continue
key = words[1]
if key == 'verts':
status = doRefVerts
elif key == 'faces':
status = doFaces
elif key == 'weights':
status = doWeights
if proxy.weights == None:
proxy.weights = {}
weights = []
proxy.weights[words[2]] = weights
elif key == 'material':
status = doMaterial
proxy.material.name = " ".join(words[2:])
elif key == 'useBaseMaterials':
proxy.useBaseMaterials = True
elif key == 'faceNumbers':
status = doFaceNumbers
elif key == 'texVerts':
status = doTexVerts
if len(words) > 2:
layer = int(words[2])
else:
layer = 0
proxy.texVerts = []
proxy.texVertsLayers[layer] = proxy.texVerts
elif key == 'texFaces':
status = doTexFaces
if len(words) > 2:
layer = int(words[2])
else:
layer = 0
proxy.texFaces = []
proxy.texFacesLayers[layer] = proxy.texFaces
elif key == 'name':
proxy.name = " ".join(words[2:])
elif key == 'uuid':
proxy.uuid = " ".join(words[2:])
elif key == 'tag':
proxy.tags.append( " ".join(words[2:]) )
elif key == 'z_depth':
proxy.z_depth = int(words[2])
elif key == 'wire':
proxy.wire = True
elif key == 'cage':
proxy.cage = True
elif key == 'x_scale':
proxy.xScaleData = getScaleData(words)
scales[0] = getScale(proxy.xScaleData, obj, 0)
elif key == 'y_scale':
proxy.yScaleData = getScaleData(words)
scales[1] = getScale(proxy.yScaleData, obj, 1)
elif key == 'z_scale':
proxy.zScaleData = getScaleData(words)
scales[2] = getScale(proxy.zScaleData, obj, 2)
elif key == 'use_projection':
useProjection = int(words[2])
elif key == 'ignoreOffset':
ignoreOffset = int(words[2])
elif key == 'delete':
proxy.deleteGroups.append(words[2])
elif key == 'delete_connected':
selectConnected(proxy, obj, int(words[2]))
elif key == "delete_verts":
status = doDeleteVerts
elif key == 'rig':
proxy.rig = getFileName(folder, words[2], ".rig")
elif key == 'mask':
proxy.mask = getFileName(folder, words[2], ".png")
if len(words) > 3:
proxy.maskLayer = int(words[3])
elif key == 'specular':
proxy.specular = getFileName(folder, words[2], ".png")
if len(words) > 4:
proxy.specularStrength = float(words[4])
elif key == 'bump':
proxy.bump = getFileName(folder, words[2], ".png")
if len(words) > 4:
proxy.bumpStrength = float(words[4])
elif key == 'normal':
proxy.normal = getFileName(folder, words[2], ".png")
if len(words) > 4:
proxy.normalStrength = float(words[4])
elif key == 'transparency':
proxy.transparency = getFileName(folder, words[2], ".png")
elif key == 'displacement':
proxy.displacement = getFileName(folder, words[2], ".png")
if len(words) > 4:
proxy.dispStrength = float(words[4])
elif key == 'texture':
proxy.texture = getFileName(folder, words[2], ".png")
if len(words) > 3:
proxy.textureLayer = int(words[3])
elif key == 'objfile_layer':
proxy.objFileLayer = int(words[2])
elif key == 'uvtex_layer':
proxy.uvtexLayerName[int(words[2])] = words[3]
elif key == 'material_file':
pass
#proxy.material_file = getFileName(folder, words[2], ".mhx")
elif key == 'obj_file':
proxy.obj_file = getFileName(folder, words[2], ".obj")
elif key == 'backface_culling':
proxy.cull = words[2].lower() in ["1", "yes", "true", "enable", "enabled"]
elif key == 'transparent':
proxy.transparent = words[2].lower() in ["1", "yes", "true", "enable", "enabled"]
elif key == 'clothing':
if len(words) > 3:
clothingPiece = (words[2], words[3])
else:
clothingPiece = (words[2], None)
proxy.clothings.append(clothingPiece)
elif key == 'transparencies':
uuid = words[2]
proxy.transparencies[uuid] = words[3].lower() in ["1", "yes", "true", "enable", "enabled"]
elif key == 'textures':
proxy.textures.append( (words[2], words[3]) )
elif key == 'subsurf':
levels = int(words[2])
if len(words) > 3:
render = int(words[3])
else:
render = levels+1
proxy.modifiers.append( ['subsurf', levels, render] )
elif key == 'shrinkwrap':
offset = float(words[2])
proxy.modifiers.append( ['shrinkwrap', offset] )
elif key == 'solidify':
thickness = float(words[2])
offset = float(words[3])
proxy.modifiers.append( ['solidify', thickness, offset] )
elif key == 'shapekey':
proxy.shapekeys.append( words[2] )
elif key == 'basemesh':
proxy.basemesh = words[2]
else:
pass
#print "Ignored proxy keyword", key
elif status == doObjData:
if words[0] == 'vt':
newTexVert(1, words, proxy)
elif words[0] == 'f':
newFace(1, words, theGroup, proxy)
elif words[0] == 'g':
theGroup = words[1]
elif status == doFaceNumbers:
proxy.faceNumbers.append(line)
elif status == doRefVerts:
refVert = CProxyRefVert(obj, scales)
proxy.refVerts.append(refVert)
if len(words) == 1:
refVert.fromSingle(words, vnum, proxy)
else:
refVert.fromTriple(words, vnum, proxy)
vnum += 1
elif status == doFaces:
newFace(0, words, theGroup, proxy)
elif status == doTexVerts:
newTexVert(0, words, proxy)
elif status == doTexFaces:
newTexFace(words, proxy)
elif status == doMaterial:
readMaterial(line, proxy.material, proxy, False)
elif status == doWeights:
v = int(words[0])
w = float(words[1])
weights.append((v,w))
elif status == doDeleteVerts:
sequence = False
for v in words:
if v == "-":
sequence = True
else:
v1 = int(v)
if sequence:
for vn in range(v0,v1+1):
proxy.deleteVerts[vn] = True
sequence = False
else:
proxy.deleteVerts[v1] = True
v0 = v1
if evalOnLoad and proxy.obj_file:
if not copyObjFile(proxy):
return None
return proxy
#
# selectConnected(proxy, obj, vn):
#
def selectConnected(proxy, obj, vn):
if not proxy.neighbors:
for n in range(nVerts):
proxy.neighbors[n] = []
for fv in obj.fvert:
for vn1 in fv:
for vn2 in fv:
if vn1 != vn2:
proxy.neighbors[vn1].append(vn2)
#for f in obj.faces:
# for v1 in f.verts:
# for v2 in f.verts:
# if v1 != v2:
# proxy.neighbors[v1.idx].append(v2.idx)
walkTree(proxy, vn)
return
def walkTree(proxy, vn):
proxy.deleteVerts[vn] = True
for vk in proxy.neighbors[vn]:
if not proxy.deleteVerts[vk]:
walkTree(proxy, vk)
return
def deleteGroup(name, groups):
for part in groups:
if part in name:
return True
return False
def copyObjFile(proxy):
(folder, name) = proxy.obj_file
objpath = os.path.join(folder, name)
try:
tmpl = open(objpath, "rU")
except:
log.error("*** Cannot open %s", objpath)
return False
proxy.texVerts = []
proxy.texFaces = []
layer = proxy.objFileLayer
proxy.texVertsLayers[layer] = proxy.texVerts
proxy.texFacesLayers[layer] = proxy.texFaces
theGroup = None
for line in tmpl:
words= line.split()
if len(words) == 0:
pass
elif words[0] == 'vt':
newTexVert(1, words, proxy)
elif words[0] == 'f':
newFace(1, words, theGroup, proxy)
elif words[0] == 'g':
theGroup = words[1]
tmpl.close()
return True
def getScaleData(words):
v1 = int(words[2])
v2 = int(words[3])
den = float(words[4])
return (v1, v2, den)
def getScale(data, obj, index):
if not data:
return 1.0
(vn1, vn2, den) = data
num = abs(obj.coord[vn1][index] - obj.coord[vn2][index])
return num/den
def readMaterial(line, mat, proxy, multiTex):
words= line.split()
key = words[0]
if key in ['diffuse_color', 'specular_color', 'ambient', 'emit']:
mat.settings.append( (key, [float(words[1]), float(words[2]), float(words[3])]) )
elif key in ['diffuse_shader', 'specular_shader']:
mat.settings.append( (key, words[1]) )
elif key in ['use_shadows', 'use_transparent_shadows', 'use_transparency', 'use_raytrace']:
mat.settings.append( (key, int(words[1])) )
elif key in ['diffuse_intensity', 'specular_intensity', 'specular_hardness', 'translucency',
'alpha', 'specular_alpha']:
mat.settings.append( (key, float(words[1])) )
elif key in ['diffuse_color_factor', 'alpha_factor', 'translucency_factor']:
mat.mtexSettings.append( (key, float(words[1])) )
elif key in ['use_map_color_diffuse', 'use_map_alpha']:
mat.mtexSettings.append( (key, int(words[1])) )
elif key in ['use_alpha']:
mat.textureSettings.append( (key, int(words[1])) )
elif key == 'texture':
fname = os.path.realpath(os.path.expanduser(words[1]))
if multiTex:
tex = CTexture(fname)
nmax = len(words)
n = 2
while n < nmax:
tex.types.append((words[n], words[n+1]))
n += 2
mat.textures.append(tex)
else:
proxy.texture = os.path.split(fname)
else:
raise NameError("Material %s?" % key)
if key == 'alpha':
mat.alpha = float(words[1])
mat.use_transparency = True
class CUvSet:
def __init__(self, name):
self.name = name
self.type = "UvSet"
self.filename = None
self.faceMaterials = None
self.materials = []
self.faceNumbers = []
self.texVerts = []
self.texFaces = []
def read(self, human, filename):
try:
fp = open(filename, "r")
except:
raise NameError("Cannot open %s" % filename)
status = 0
for line in fp:
words = line.split()
if words == []:
continue
elif words[0] == '#':
if words[1] == "name":
self.name = words[2]
elif words[1] == "material":
mat = CMaterial()
mat.name = words[2]
self.materials.append(mat)
status = doMaterial
elif words[1] == "faceNumbers":
status = doFaceNumbers
elif words[1] == "texVerts":
status = doTexVerts
elif words[1] == "texFaces":
status = doTexFaces
elif status == doMaterial:
readMaterial(line, mat, self, True)
elif status == doFaceNumbers:
self.faceNumbers.append(line)
elif status == doTexVerts:
self.texVerts.append([float(words[0]), float(words[1])])
elif status == doTexFaces:
newTexFace(words, self)
fp.close()
self.filename = filename
nFaces = len(human.meshData.fvert)
self.faceMaterials = numpy.zeros(nFaces, int)
fn = 0
for line in self.faceNumbers:
words = line.split()
if len(words) < 2:
log.debug(line)
halt
elif words[0] == "ft":
self.faceMaterials[fn] = int(words[1])
fn += 1
elif words[0] == "ftn":
nfaces = int(words[1])
mn = int(words[2])
for n in range(nfaces):
self.faceMaterials[fn] = mn
fn += 1
while fn < nFaces:
self.faceMaterials[fn] = mn
fn += 1
def getJoint(joint, obj, locations):
try:
loc = locations[joint]
except KeyError:
loc = locations[joint] = calcJointPos(obj, joint)
return loc
def calcJointPos(obj, joint):
verts = obj.getVerticesForGroups(["joint-"+joint])
coords = obj.coord[verts]
return coords.mean(axis=0)
return numpy.array(coords).mean(axis=0)
#g = obj.getFaceGroup("joint-"+joint)
#coords = []
#for f in g.faces:
# for v in f.verts:
# coords.append(v.co)
#return numpy.array(coords).mean(axis=0)
def newFace(first, words, group, proxy):
face = []
texface = []
nCorners = len(words)
for n in range(first, nCorners):
numbers = words[n].split('/')
face.append(int(numbers[0])-1)
if len(numbers) > 1:
texface.append(int(numbers[1])-1)
proxy.faces.append((face,group))
if texface:
proxy.texFaces.append(texface)
if len(face) != len(texface):
raise NameError("texface %s %s", face, texface)
return
def newTexFace(words, proxy):
texface = [int(word) for word in words]
proxy.texFaces.append(texface)
def newTexVert(first, words, proxy):
vt = [float(word) for word in words[first:]]
proxy.texVerts.append(vt)
def getMeshInfo(obj, proxy, config, rawWeights, rawShapes, rigname):
if proxy:
coords = [config.scale*refVert.getCoord() for refVert in proxy.refVerts]
faceVerts = [[v for v in f] for (f,g) in proxy.faces]
if proxy.texVerts:
texVerts = proxy.texVertsLayers[proxy.objFileLayer]
texFaces = proxy.texFacesLayers[proxy.objFileLayer]
fnmax = len(texFaces)
faceVerts = faceVerts[:fnmax]
else:
texVerts = []
texFaces = []
weights = getProxyWeights(rawWeights, proxy)
shapes = getProxyShapes(rawShapes, proxy, config.scale)
meshInfo = CMeshInfo(proxy.name).fromProxy(coords, texVerts, faceVerts, texFaces, weights, shapes)
else:
meshInfo = CMeshInfo(obj.name).fromObject(obj, rawWeights, rawShapes)
return meshInfo
def getProxyWeights(rawWeights, proxy):
if not rawWeights:
return {}
weights = {}
for key in rawWeights.keys():
vgroup = []
empty = True
for (v,wt) in rawWeights[key]:
try:
vlist = proxy.vertWeights[v]
except KeyError:
vlist = []
for (pv, w) in vlist:
pw = w*wt
if (pw > 1e-4):
vgroup.append((pv, pw))
empty = False
if not empty:
weights[key] = fixProxyVGroup(vgroup)
return weights
def fixProxyVGroup(vgroup):
fixedVGroup = []
vgroup.sort()
pv = -1
while vgroup:
(pv0, wt0) = vgroup.pop()
if pv0 == pv:
wt += wt0
else:
if pv >= 0 and wt > 1e-4:
fixedVGroup.append((pv, wt))
(pv, wt) = (pv0, wt0)
if pv >= 0 and wt > 1e-4:
fixedVGroup.append((pv, wt))
return fixedVGroup
def getProxyShapes(rawShapes, proxy, scale):
if (not rawShapes) or (proxy.type not in ['Proxy', 'Clothes']):
return []
shapes = []
for (key, rawShape) in rawShapes:
shape = []
for (v,dr) in rawShape.items():
(dx,dy,dz) = dr
try:
vlist = proxy.vertWeights[v]
except KeyError:
vlist = []
for (pv, w) in vlist:
shape.append((pv, scale*w*dx, scale*w*dy, scale*w*dz))
if shape != []:
fixedShape = fixProxyShape(shape)
shapes.append((key,fixedShape))
return shapes
def fixProxyShape(shape):
fixedShape = {}
shape.sort()
pv = -1
#while shape:
# (pv0, dx0, dy0, dz0) = shape.pop()
for (pv0, dx0, dy0, dz0) in shape:
if pv0 == pv:
dx += dx0
dy += dy0
dz += dz0
else:
if pv >= 0 and (dx*dx + dy*dy + dz*dz) > 1e-8:
fixedShape[pv] = (dx, dy, dz)
(pv, dx, dy, dz) = (pv0, dx0, dy0, dz0)
if pv >= 0 and (dx*dx + dy*dy + dz*dz) > 1e-8:
fixedShape[pv] = (dx, dy, dz)
return fixedShape
| KoenBuys/makehuman_datagen | apps/mh2proxy.py | mh2proxy.py | py | 25,934 | python | en | code | 5 | github-code | 13 |
29030601269 | from sympy import *
import numpy as np
rows,cols=map(int,input().split())
A,p,k,tvf,fgh=[],[],[],[],[]
blank_list=[]
for i in range(cols):
blank_list.append(0)
with open('matrix.txt') as f:
pfg = f.read().strip().split('\n')
A = []
for pf in pfg[0:rows]:
e = pf.split()
elements=e[0:cols]
int_elements = []
for element in elements:
int_elements.append(int(element))
A.append(int_elements)
print("MARTRIX=",A)
E=Matrix(A)
R=E.rref()
print("RREF",R[0])
null_space=E.nullspace()
n_space=np.array(null_space)
fgh=n_space.tolist()
for i in range(len(A[0])):
if i not in R[1]:
p.append(i+1)
for i in range(len(p)):
k=[]
for j in range(len(fgh[i])):
k.append(fgh[i][j][0])
tvf.append(k)
for i in range(len(p)):
print("x{}".format(p[i]),"*",tvf[i],"+ ",end='')
print(blank_list,end="") | Namitjain07/homogenous-system-solver | Solver.py | Solver.py | py | 843 | python | en | code | 0 | github-code | 13 |
12775791879 | import psycopg2
import sys
import time
#import pprint
import datetime
# pip install geotext
from geotext import GeoText
debug = False;
def log(msg, obj="", res='y'):
if debug:
if res == "y":
pprint.pprint("[+] " + str(msg) + str(obj))
if res == "e":
pprint.pprint("[-] " + str(msg) + str(obj))
#connection parameters
host = '${hostName}'
port = '${port}'
dbname = '${dbName}'
user = '${userName}'
password = '${password}'
#table and column names used
records_table = '${dblp_historical_records_table}'
proceedings_view = '${dblp_conference_proceedings_view}'
stream_key_col = '${stream_key_col}'
record_key_col = '${record_key_col}'
record_title_col = '${record_title_col}'
cite_key_col = '${cite_key_col}'
event_country_col='${event_country_col}'
conn = psycopg2.connect(user=user, password=password, host=host, port=port, dbname=dbname)
cur = conn.cursor()
print("[+] Running..")
t0 = time.time()
#get all conference proceedings
select_stmt = '''
select distinct procs.{}, hist.{}, hist.{} from {} procs join {} hist on hist.{} = procs.{};
'''.format(stream_key_col, record_key_col, record_title_col, proceedings_view, records_table, record_key_col, cite_key_col)
print(select_stmt)
cur.execute(select_stmt)
res = cur.fetchall()
for record in res:
key = record[1]
title = record[2]
#find geolocations in text
place = GeoText(title)
result = place.countries
found_countries = []
if len(result) < 1:
# Trying to find Stuff by hand..
log("No location found -> Trying to identify manually...", obj="", res='e')
log("Title: ", obj=title, res='e')
#self.log("Searching for USA and UK", obj="", res='y')
if "USA" in title:
log("Found 'USA'", obj="", res='y')
if "USA" not in found_countries:
found_countries.append("USA")
if "UK" in title:
log("Found 'UK'", obj="", res='y')
if "UK" not in found_countries:
found_countries.append("UK")
if "Netherlands" in title:
log("Found 'Netherlands'", obj="", res='y')
if "Netherlands" not in found_countries:
found_countries.append("Netherlands")
else:
for country in result:
if country not in found_countries:
found_countries.append(country)
if len(found_countries) == 1:
stmt_template = "update {} set {}=\'{}\' where {}=\'{}\';"
stmt = stmt_template.format(records_table, event_country_col, found_countries[0], record_key_col, key)
cur.execute(stmt)
conn.commit()
else:
log("Found none or more than 1 country in title")
t1 = time.time()
print("[+] Done - Total Time: " + str(t1-t0))
| Smart-Harvesting/sh2-dblp-aggregation | src/main/resources/script/geolocationfinder.pyt | geolocationfinder.pyt | pyt | 2,772 | python | en | code | 0 | github-code | 13 |
2534337803 | """
clone a linkedlist with a next pointer and also an arbitrary node pointer
"""
def clone(node):
Tclone = {}
curr = node
head_clone = None
# creat a copy of each element mapped to his clone
while curr.next:
Tclone[curr] = curr
curr = curr.next
# iterate through the clone and connect the next and arb pointer
for i in Tclone.keys():
Tclone[i].next = Tclone[i.next]
Tclone[i].abtr = Tclone[i.abtr]
if head_clone is None:
head_clone = Tclone[i]
return head_clone | fizzywonda/CodingInterview | linkedlist/CloneLinkedList.py | CloneLinkedList.py | py | 549 | python | en | code | 0 | github-code | 13 |
11322532936 | from rdopkg.action import Action, Arg
ACTIONS = [
Action('review_patch',
help="send patch(es) for review",
optional_args=[
Arg('local_patches_branch', metavar='PATCHES_BRANCH',
positional=True, nargs='?',
help="local patches branch with changes to review"),
]),
Action('review_spec',
help="send distgit (.spec file) change for review",
optional_args=[
Arg('branch', metavar='DISTGIT_BRANCH',
positional=True, nargs='?',
help="local distgit branch with changes to review"),
],
steps=[
Action('get_package_env', module='distgit'),
Action('review_spec'),
]),
]
| softwarefactory-project/rdopkg | rdopkg/actions/review/__init__.py | __init__.py | py | 792 | python | en | code | 28 | github-code | 13 |
10269796052 | #writing function name as decimaltobinary and it convert the decimal number into binery number..
def decimaltobinary( decNumber):
bit=[]
actualBinary=[]
actualBinaryNum1=""
counter=0
while counter!=8 :
remainder=decNumber%2
bit.append(remainder)
decNumber=decNumber//2
counter+=1
for i in range(len(bit)-1,-1,-1):
actualBinary.append(bit[i])
actualBinaryNum1=actualBinaryNum1+str(bit[i])
return actualBinaryNum1
| BarshaDstudent/Mypython-project | DecimalNumintoBinary.py | DecimalNumintoBinary.py | py | 551 | python | en | code | 0 | github-code | 13 |
17531130949 |
from __future__ import print_function
import logging
import sys
import gc
import inspect
import unittest
import time
import os
import tempfile
import fnmatch
import weakref
from functools import wraps
from .. import listRefs
from .._p4p import _forceLazy
_log = logging.getLogger(__name__)
_forceLazy()
if not hasattr(unittest.TestCase, 'assertRegex'):
unittest.TestCase.assertRegex = unittest.TestCase.assertRegexpMatches
if not hasattr(unittest.TestCase, 'assertRaisesRegex'):
unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
class RefTestMixin(object):
"""Ensure that each test does not result in a net change in extension object counts
"""
# set to list of names to compare. Set to None to disable
ref_check = ('*',)
def __refs(self, refs=None):
refs = refs or listRefs()
_log.debug("REFS %s", refs)
names = set()
for pat in self.ref_check:
names |= set(fnmatch.filter(refs, pat))
return dict([(K, V) for K, V in refs.items() if K in names and V>0])
def setUp(self):
self.__traceme = set()
if self.ref_check is not None:
self.__before = self.__refs()
for mustzero in ('ClientContextImpl',):
if self.__before.get(mustzero, 0)!=0:
self.fail('Leftovers from previous test: %s = %d'%(mustzero, self.__before[mustzero]))
super(RefTestMixin, self).setUp()
def traceme(self, obj):
self.__traceme.add(weakref.ref(obj))
def _sleep(self, delay):
time.sleep(delay)
def tearDown(self):
super(RefTestMixin, self).tearDown()
if self.ref_check is not None:
traceme = list(self.__traceme)
del self.__traceme
gc.collect()
after = self.__refs()
test = self.__before == after
for mustzero in ('ClientContextImpl',):
test &= after.get(mustzero, 0)==0
frame = inspect.currentframe()
for T in traceme:
O = T()
if O is None:
continue
nrefs = sys.getrefcount(O)
refs = gc.get_referrers(O)
nrefs -= len(refs) # exclude tracked refs
refs = filter(lambda o:o not in (frame, traceme), refs)
_log.debug("ALIVE %s -> %s + %d ext refs", O, refs, nrefs)
self.assertDictEqual(self.__before, after)
# check for any obviously corrupt counters, even those not being compared
# self.assertFalse(any([V>1000000 for V in refs.values()]), "before %s after %s"%(self.__raw_before, refs))
if not test:
for mustzero in ('ClientContextImpl', 'ServerPvt'):
self.assertEqual(0, after.get(mustzero, 0), mustzero)
self.assertDictEqual(self.__before, after)
class RefTestCase(RefTestMixin, unittest.TestCase):
def __init__(self, methodName='runTest'):
# skip reference check for tests which have already failed.
meth = getattr(self, methodName)
@wraps(meth)
def wrapper(*args, **kws):
try:
return meth(*args, **kws)
except:
self.ref_check = None
raise
setattr(self, methodName, wrapper)
super(RefTestCase, self).__init__(methodName=methodName)
def setUp(self):
super(RefTestCase, self).setUp()
def tearDown(self):
super(RefTestCase, self).tearDown()
if not hasattr(unittest.TestCase, 'assertRegex'):
def assertRegex(self, text, regex):
import re
self.assertTrue(re.search(regex, text),
"""Regex didn't match: %r not found in %r"""%(regex, text))
def gctrace(obj, maxdepth=8):
# depth first traversal
pop = object()
top = inspect.currentframe()
next = top.f_back
stack, todo = [], [obj]
visited = set()
while len(todo):
obj = todo.pop(0)
I = id(obj)
if inspect.isframe(obj):
S = 'Frame %s:%d' % (obj.f_code.co_filename, obj.f_lineno)
else:
S = str(obj)
if obj is pop:
stack.pop()
# break
continue
print('-' * len(stack), S, end='')
if I in stack:
print(' Recurse')
continue
elif I in visited:
print(' Visited')
continue
elif len(stack) >= maxdepth:
print(' Depth limit')
continue
else:
print(' ->')
stack.append(I)
visited.add(I)
todo.insert(0, pop)
for R in gc.get_referrers(obj):
if R is top or R is next or R is todo:
continue
todo.insert(0, R)
class RegularNamedTemporaryFile(object):
"""Like tempfile.NamedTemporaryFile which doesn't use O_TEMPORARY on windows
"""
def __init__(self, *args, **kws):
fd, self.name = tempfile.mkstemp()
try:
self.file = os.fdopen(fd, *args, **kws)
self.read = self.file.read
self.write = self.file.write
self.flush = self.file.flush
self.seek = self.file.seek
except:
os.unlink(self.name)
raise
def __del__(self):
self.close()
def __enter__(self):
return self
def __exit__(self,A,B,C):
self.close()
def close(self):
if self.file is not None:
self.file.close()
os.unlink(self.name)
self.file = None
self.read = self.write = self.flush = self.seek = None
| mdavidsaver/p4p | src/p4p/test/utils.py | utils.py | py | 5,717 | python | en | code | 20 | github-code | 13 |
8090387645 | #!/usr/bin/env python3
import os
from parseResults import ParseResults_SubSubfolder, Update_Global_Variables
from readParams import Read_Required_Params
def find_files(root_dir, target_file, target_folder,test_parameter, only_digit_folders=False):
# print(root_dir)
for dirpath, dirnames, filenames in os.walk(root_dir):
# print(dirpath, dirnames)
# if dirpath.split(os.sep)[-1] == target_folder and test_parameter != dirnames:
# print("(*&^&*(*&*()))",dirpath.split(os.sep)[-1],dirnames)
# continue
if target_file in filenames and target_folder in dirpath and test_parameter in dirpath:
# if only_digit_folders is True, check if the directory name starts with a digit
if only_digit_folders and not os.path.basename(dirpath)[0].isdigit():
continue
# remove the target_file from the path
target_dir = dirpath
# process the files in target_dir
process_files(target_dir)
# remove the first two parts of the path
parts = target_dir.split(os.sep)[9:-2]
relative_dir = os.sep.join(parts)
# since you mentioned that once you find a target_file,
# you can assume that all other subfolders will have the same structure,
# we can break the loop here to avoid unnecessary searching
break
return target_dir, relative_dir
def process_files(target_dir):
# here goes the logic of your script
# you can iterate over the subfolders in target_dir and do whatever processing you need
pass
if __name__ == "__main__":
parameters = Update_Global_Variables('parseResults_Generic_EVO_Parameters.txt')
# print(parameters)
# usage:
# full_path, relative_path = find_files("carlaDatasets/testParametersEffect", "FrameTrajectory_TUM_Format.txt", "stereo")
full_path, relative_path = find_files(parameters['ROOT_DIR'], "FrameTrajectory_TUM_Format.txt", parameters['STEREO_FOLDER_NAME'],parameters['test_parameter'])
print("Full path:", full_path)
print("Relative path:", relative_path)
# if parameters['test_parameter'] !=
# print(os.listdir(os.path.join(parameters['ROOT_DIR'],relative_path)))
# subfolder = '30_0'
dataset_ground_truth_folder = parameters['ROOT_DIR'].split(os.sep)[1]
testParameterIndexFolder_Path = os.path.join(parameters['ROOT_DIR'],relative_path)
testParameterIndexFolder_list = [f for f in os.listdir(testParameterIndexFolder_Path) if os.path.isdir(os.path.join(testParameterIndexFolder_Path, f))]
for testIndexFodler in sorted(os.listdir(testParameterIndexFolder_Path)):
# print(subfolder,subfolder_list, subfolder_path,dataset_ground_truth_folder)
# print("***** SUBFOLDER PATH", subfolder_path)
testParameterFolder_Path = os.path.join(testParameterIndexFolder_Path,testIndexFodler)
testParameterFolder_list = [f for f in sorted(os.listdir(testParameterFolder_Path)) if os.path.isdir(os.path.join(testParameterFolder_Path, f))]
print(testParameterFolder_Path)
for subfolder in testParameterFolder_list:
subfolder_path = os.path.join(testParameterFolder_Path,subfolder)
subfolder_list = os.listdir(subfolder_path)
ParseResults_SubSubfolder(subfolder,subfolder_list, subfolder_path)
# subfolder: subfolder name under stereo(/1/, /2/, /3/)
# subfolder_path: path from ROOT till the subfolder (under stereo: /1/, /2/, /3/)
# subfolder_list: list of files (and possibly folders) under the subfolder path which is the trajectory txt files
# dataset_ground_truth_folder: renamed from timestamped_folder, the folder inside "carlaDatasets/" which contains the groundtruth file,
# stereo/stereo_inertial folder
| Johnemad96/masters | orbslam3_docker/orbslam_modifiedFork/Datasets/parseResults_Generic.py | parseResults_Generic.py | py | 3,879 | python | en | code | 1 | github-code | 13 |
11417331311 | import copy # Import copy for deepcopy
X = "X"
O = "O"
EMPTY = None
def initial_state():
return [[EMPTY, EMPTY, EMPTY],
[EMPTY, EMPTY, EMPTY],
[EMPTY, EMPTY, EMPTY]]
def player(board):
num_x = 0
num_o = 0
for i in range(0,len(board)):
for j in range(0,len(board[0])):
if board[i][j] == X:
num_x += 1
elif board[i][j] == O:
num_o += 1
if num_x > num_o:
return O
elif not terminal(board) and num_x == num_o:
return X
else:
return None
def actions(board):
actions_set = set()
for i in range(0,len(board)):
for j in range(0,len(board[i])):
if board[i][j] == EMPTY:
actions_set.add((i,j))
return actions_set
def result(board, action):
if action not in actions(board):
raise Exception("Not a valid action.")
elif terminal(board):
raise Exception("Game Over")
else:
# Make a deep copy of the board first before making any changes.
result_board = copy.deepcopy(board)
# Return new board state for given action.
result_board[action[0]][action[1]] = player(board)
return result_board
def winner(board):
for i in range(3):
if board[i][0] != EMPTY and board[i][0] == board[i][1] == board[i][2]:
return board[i][0]
if board[0][i] != EMPTY and board[0][i] == board[1][i] == board[2][i]:
return board[0][i]
if board[1][1] != EMPTY and (board[0][0] == board[1][1] == board[2][2] or board[0][2] == board[1][1] == board[2][0]):
return board[1][1]
return None
def terminal(board):
if winner(board) is not None or not any(j==EMPTY for i in board for j in i):
return True
else:
return False
def utility(board):
if terminal(board):
if winner(board) == X:
return 1
elif winner(board) == O:
return -1
else:
return 0
def minimax(board):
if terminal(board):
return None
else:
if player(board) == X:
optimal_action = None
v = float("-inf")
for action in actions(board):
min_val = min_value(result(board, action))
if min_val > v:
v = min_val
optimal_action = action
else:
optimal_action = None
v = float("inf")
for action in actions(board):
max_val = max_value(result(board, action))
if max_val < v:
v = max_val
optimal_action = action
return optimal_action
def max_value(board):
if terminal(board):
return utility(board)
v = float('-inf')
for action in actions(board):
v = max(v, min_value(result(board, action)))
return v
def min_value(board):
if terminal(board):
return utility(board)
v = float('inf')
for action in actions(board):
v = min(v, max_value(result(board, action)))
return v | yuzgulfatih/minimax_tic_tac_toe | tictactoe.py | tictactoe.py | py | 3,134 | python | en | code | 0 | github-code | 13 |
43083814752 | #
# @lc app=leetcode.cn id=1996 lang=python3
#
# [1996] 游戏中弱角色的数量
#
# @lc code=start
class Solution:
def numberOfWeakCharacters(self, properties: List[List[int]]) -> int:
# 攻击力从大到小排序,防御力从小到大,右边攻击力天然小于等于左边,只需要判定防御力情况即可
properties.sort(key=lambda x: (-x[0], x[1]))
ans = 0
max_defense = 0
for _, defense in properties:
if max_defense > defense: # 比之前的攻击力小的同时防御力也小,当前角色为弱角色
ans += 1
else:
max_defense = max(max_defense, defense)
return ans
# @lc code=end
| Guo-xuejian/leetcode-practice | 1996.游戏中弱角色的数量.py | 1996.游戏中弱角色的数量.py | py | 720 | python | zh | code | 1 | github-code | 13 |
21747288994 | from bookmarks.models import Bookmark
from django.contrib.auth.models import User
from bookmarks.serializers import BookmarkSerializer, UserSerializer
from rest_framework import generics
from rest_framework.authentication import SessionAuthentication, BasicAuthentication
from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly
from rest_framework import permissions
from rest_framework.response import Response
class BookmarkList(generics.ListCreateAPIView):
authentication_classes = [SessionAuthentication, BasicAuthentication]
permission_classes = [IsAuthenticatedOrReadOnly]
queryset = Bookmark.objects.all()
serializer_class = BookmarkSerializer
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
def list(self, request):
if not request.user.is_authenticated:
queryset1 = Bookmark.objects.all().filter(is_public=True)
serializer1 = BookmarkSerializer(queryset1, many=True)
return Response(serializer1.data)
queryset1 = Bookmark.objects.all().filter(is_public=True)
queryset2 = Bookmark.objects.all().filter(owner=request.user.id)
serializer1 = BookmarkSerializer(queryset1, many=True)
serializer2 = BookmarkSerializer(queryset2, many=True)
data = serializer1.data + serializer2.data
return Response(data)
class BookmarkDetail(generics.RetrieveUpdateDestroyAPIView):
authentication_classes = [SessionAuthentication, BasicAuthentication]
permission_classes = [IsAuthenticated]
queryset = Bookmark.objects.all()
serializer_class = BookmarkSerializer
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
class UserList(generics.ListCreateAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
class UserDetail(generics.RetrieveAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
| msvalina/ksolutions | bookmarks/views.py | views.py | py | 2,117 | python | en | code | 1 | github-code | 13 |
34525423735 |
import time
import json
from pymemcache.client.base import Client
'''
memcached是一款开源、高性能、分布式内存对象缓存系统,
可应用各种需要缓存的场景,
其主要目的是通过降低对Database的访问来加速web应用程序。
'''
data = {'iphone': ['iphone6', 'iphone7', 'iphone8'], 'Android': ['oppo', 'vivo']}
client = Client(('127.0.0.1', 1121))
key = 'Phone_menu'
# client.set(key, json.dumps(data))
print(client)
def get_data():
"""获取数据
1.从mysql中获取
2.从其他的一些接口获取数据
"""
data = {'iphone': ['iphone6', 'iphone7', 'iphone8'], 'Android': ['oppo', 'vivo']}
time.sleep(2)
return data
def show_data(data):
"""显示数据内容
"""
for k, v in data.items():
print(f'{k} : {v}')
def set_memcache(k, data):
'''将数据加入到缓存中
'''
try:
client = Client(('127.0.0.1', 1121))
client.set(k, json.dump(data))
return True
except Exception as e:
print(e)
return False
def get_memcached(k):
'''获取memcached数据
'''
try:
client = Client(('172.20.10.7', 11211))
return json.load(client.get(k))
except Exception as e:
print(e)
return False
def main():
'''入口函数
'''
k = 'Phone_menu'
get_memch = get_memcached(k)
if get_memch:
print('这是从缓存中取数据')
show_data(get_data())
else:
print('这是从数据库取数据')
data = get_data()
show_data(data)
set_memcache(k, data)
# main()
| sczaixian/learn_pratice | learn_pratice/learn_pratice/apps/learn_python/python_memcached.py | python_memcached.py | py | 1,631 | python | zh | code | 0 | github-code | 13 |
25542620752 |
from silverback import *;
import glass;
from ringbuffer import RingBuffer;
class ChatBox( glass.GlassContainer ):
def __init__(self, scopeList):
self.scopeList = scopeList;
glass.GlassContainer.__init__(self);
self.setOpaque(False);
self.buffer = MessageBuffer( self.scopeList );
for i in range(10):
self.buffer.addRow(" ");
self.buffer.setEditable(False);
self.buffer.showTime(True);
self.buffer.addListener(self)
self.scroll = glass.GlassScrollArea(self.buffer);
self.scroll.setScrollPolicy( glass.GlassScrollArea.SHOW_NEVER , glass.GlassScrollArea.SHOW_ALWAYS );
self.scroll.setAutoscroll(True);
self.add(self.scroll);
self.buffer.parentScroll = self.scroll;
self.input = glass.GlassTextField();
self.input.setForegroundColor( white );
self.input.setBackgroundColor( glass.Color(0,0,0,128) );
self.add( self.input);
self.input.addKeyListener( self);
self.bShowInput = False;
def onKeyPress(self, e):
if e.key == glass.Key.ESCAPE: #escape
self.deactivate();
def onKeyReleased(self, e):
if e.key == glass.Key.ENTER:
content = self.input.getText().rstrip();
if content == "":
self.deactivate();
return;
self.buffer.addRow(content);
def alwaysShowInput( self, x ):
if x == 1:
x=True
if x == True:
self.input.setVisible(True);
self.bShowInput = x;
def resize( self ):
self.scroll.setSize( self.getWidth() , int(self.getHeight() - 0.4*inputLineHeight) );
self.buffer.setSize( self.scroll.getWidth(), self.scroll.getHeight() );
self.input.setSize(self.getWidth(), inputLineHeight);
self.input.setPosition(0, self.getHeight() - inputLineHeight );
def deactivate( self ):
self.input.setText("");
if not self.bShowInput:
self.input.setVisible(False); #this also REMOVES the focus
def activate( self ):
self.input.setText("");
self.input.setVisible(True);
self.input.requestFocus();
class MenuChatBox(ChatBox):
def __init__(self, name, scopeList, isConference=False):
glass.GlassContainer.__init__(self);
self.setOpaque(False);
self.jid = name;
self.isConference = isConference;
self.scopeList = scopeList;
self.buffer = MessageBuffer( self.scopeList );
self.buffer.setEditable(False);
self.buffer.showTime(True);
self.scroll = glass.GlassScrollArea(self.buffer);
self.scroll.setScrollPolicy( glass.GlassScrollArea.SHOW_NEVER , glass.GlassScrollArea.SHOW_ALWAYS );
self.scroll.setAutoscroll(False);
self.add(self.scroll);
self.buffer.parentScroll = self.scroll;
# extending the input box to make it look a bit better:
self.inputContainer = DefaultContainer();
self.inputContainer.setBackgroundColor(glass.Color(23,14,13));
self.add(self.inputContainer);
self.div = DefaultContainer();
self.div.setBackgroundColor(glass.Color(30,26,25));
self.inputContainer.add(self.div);
self.input = glass.GlassTextField();
self.input.setForegroundColor( white );
self.input.setBackgroundColor( glass.Color(0,0,0,128) );
self.inputContainer.add( self.input);
self.input.addKeyListener( self);
self.bShowInput = False;
gblXMPPHandler.addListener(self);
def resize( self ):
self.inputContainer.setSize(self.getWidth(), 2 * inputLineHeight);
self.inputContainer.setPosition(0, self.getHeight() - self.inputContainer.getHeight())
self.div.setSize(self.inputContainer.getWidth(), 1);
self.div.setPosition(0, 0);
self.scroll.setSize( self.getWidth() - 5 , self.getHeight() - self.inputContainer.getHeight() );
self.buffer.setSize( self.scroll.getWidth() - 10, self.scroll.getHeight() );
self.input.setSize(self.inputContainer.getWidth() - 25, inputLineHeight);
self.input.setPosition(8, self.inputContainer.getHeight() - int(1.5*inputLineHeight) );
def onKeyReleased(self, e):
if e.key == glass.Key.ENTER:
content = self.input.getText().rstrip();
if content == "":
self.deactivate();
return;
#self.buffer.addRow(content);
self.input.setText("");
#self.scroll.setVerticalScrollAmount(99999999); #Since autoscroll is somehow broken, this makes sure it scrolls automatically to the bottom.
if self.isConference:
gblXMPPHandler.chatEvent("muc_send_msg", cvar_get('username'), content, room=self.jid);
else:
gblXMPPHandler.chatEvent("chat_send_msg", self.jid, content);
# messagebuffer is still an gblEventHandler-Listener and no XMPP one, so I have to
# pass fake events. kind of hacky, but better than changing the whole messagebuffer.
def onChatEvent(self, e):
# check if the chat_event got delivered to the right place:
#if e.scope == "chat_msg" or e.scope == "chat_send_msg" or e.scope == "chat_create":
if self.isConference:
if e.scope.startswith("muc_"):
if str(e.room) != self.jid:
return;
else:
self.buffer.onEvent(e);
self.scroll.setVerticalScrollAmount(99999999);
else:
if str(e.fromstr) != self.jid:
return;
elif e.scope.startswith("chat_"):
# pass the event to the messagebuffer:
self.buffer.onEvent(e);
self.scroll.setVerticalScrollAmount(99999999);
class ConversationTab(DefaultContainer):
def __init__(self, jid, isConference=False):
DefaultContainer.__init__(self);
self.jid = jid
#self.setBackgroundColor(tangoGrey5);
# A conversation tab consists of:
# 1. profile pic:
self.setVisible(True);
self.picContainer = DefaultContainer();
self.picContainer.setBackgroundColor(tangoOrangeDark);
self.add(self.picContainer);
self.pic = DefaultImage();
self.pic.setImage("nopic.png");
self.picContainer.add(self.pic, 2, 2);
# 2. The name and status of the contact
self.contactName = DefaultLabel(self.jid);
self.add(self.contactName);
self.contactStatus = DefaultLabel(""); #TODO
self.contactStatus.setForegroundColor(tangoGreen);
self.add(self.contactStatus);
# 3. various option buttons, e.g. Invite to game, Invite to chat, View Stats...
# TODO
# 3.1: Conference stuff:
if isConference:
self.pic.setImage("icons/clans.png")
self.contactName.setCaption(self.jid + " conference");
self.contactStatus.setForegroundColor(tangoOrange);
# 4. The chatbox itself:
self.div = DefaultContainer();
self.div.setBackgroundColor(glass.Color(30,26,25));
self.add(self.div);
self.chatboxContainer = DefaultContainer();
self.chatboxContainer.setBackgroundColor(glass.Color(23,14,13));
self.div.add(self.chatboxContainer, 1, 1)
scopes = ["chat_msg", "chat_send_msg", "chat_join","chat_connect", "chat_history_update",
"muc_msg", "muc_presence", "chat_quit", "chat_disconnect"] if self.jid != "System" else ["chat_join","chat_connect",
"chat_history_update", "chat_establish", "chat_quit", "chat_disconnect"];
self.chatBox = MenuChatBox(self.jid, scopes, isConference);
self.chatBox.alwaysShowInput(True);
self.chatboxContainer.add(self.chatBox);
#self.oldBufferEvent = self.chatBox.buffer.onEvent;
#self.chatBox.buffer.onEvent = self.bufferEvent;
if self.jid == "System":
self.chatBox.alwaysShowInput(False);
self.chatBox.input.setVisible(False);
self.pic.setImage("/icons/options.png");
self.contactStatus.setCaption("");
def resize(self):
# I love that part.... :|
# Place the picture at the top left, next to it username and status
w = self.getWidth();
h = self.getHeight();
self.picContainer.setPosition(10, 10); # fixed position, if we're going to support resizing one day...
self.picContainer.setSize(40, 40);
self.pic.setSize(self.picContainer.getWidth() - 4, self.picContainer.getHeight() - 4);
self.contactName.setPosition(self.picContainer.getWidth() + 20, self.picContainer.getY() - 2);
self.contactStatus.setPosition(self.contactName.getX(), self.contactName.getHeight() + 12);
self.div.setPosition(10, self.picContainer.getHeight() + 20);
self.div.setSize( w - 25, h - self.div.getY() - 10);
self.chatboxContainer.setSize( self.div.getWidth() - 2, self.div.getHeight() - 2);
self.chatBox.setSize(self.chatboxContainer.getWidth(), self.chatboxContainer.getHeight());
# Most important line:
self.chatBox.resize();
class TabbedChatBox(glass.GlassTabbedArea):
def __init__(self, w, h):
glass.GlassTabbedArea.__init__(self);
self.currentTab = None;
self.chatTabs = {};
self.setSize(w, h);
self.openConversation("System");
#self.setBackgroundColor(white);
def openConversation(self, jid, conference=False):
# 1. Create a new tab that contains all the information
tab = ConversationTab(jid, isConference=conference);
tab.setSize(self.getWidth(), self.getHeight());
tab.setSize(self.getWidth(), self.getHeight() - 50);
tab.resize();
self.chatTabs[tab.jid] = tab;
self.addTab(tab.jid, tab);
self.setSelectedTab(len(self.chatTabs) - 1);
#tab.chatBox.activate();
def joinMUC(self, room, password=False):
pass;
def deleteTab(self, jid):
"""
self.setSelectedTab(0)
if jid not in self.chatTabs:
logger.error("Trying to delete a tab that doesn't exist.");
else:
self.removeTab(self.chatTabs.pop(jid));
"""
pass; #buggy
def resize(self):
pass;
| biggeruniverse/srdata | client/game/gui/main/chatbox.py | chatbox.py | py | 9,092 | python | en | code | 1 | github-code | 13 |
33198554329 | from contextlib import contextmanager
from collections import namedtuple
from .compat import is_type, type_name
from .config import Section, Compose
Context = namedtuple('Context', 'section key')
Error = namedtuple('Error', 'context message')
def type_repr(t):
if is_type(t):
return repr(t)
else:
return t.__name__
class ValidationError(TypeError):
def __init__(self, message, errors):
message += '\n' + '\n'.join(self._iter_errors(errors))
super(ValidationError, self).__init__(message)
def _iter_errors(self, errors):
for error in errors:
if error.context.key:
ctx = '{}[{}]'.format(error.context.section, error.context.key)
else:
ctx = error.context.section or '<config>'
yield ' - {} => {}'.format(ctx, error.message)
class TypeChecker(object):
def __init__(self, ctx, value, errors):
self.ctx = ctx
self.stack = [value]
self.errors = errors
self.path = []
def visit(self, type_):
value = self.stack[-1]
if is_type(type_):
method_name = 'visit_{}'.format(type_name(type_))
visit_method = getattr(self, method_name, self.not_implemented)
visit_method(type_, value)
elif not isinstance(value, type_):
self.fail(type_, value)
def not_implemented(self, type_, value):
raise NotImplementedError('Type check is not implemented for this '
'type: {!r}'.format(type_))
@contextmanager
def push(self, value, path_element):
self.path.append(path_element)
self.stack.append(value)
try:
yield
finally:
self.path.pop()
self.stack.pop()
def fail(self, type_, value):
provided = type_repr(type(value))
expected = type_repr(type_)
msg = '"{}" instead of "{}"'.format(provided, expected)
if self.path:
msg = '{} - {}'.format(''.join(self.path), msg)
self.errors.append(Error(self.ctx, msg))
def visit_Union(self, type_, value):
args = set(type_.__args__)
if not len(args) == 2 or not type(None) in args:
raise NotImplementedError('Union types are supported '
'only as Optional type')
if value is not None:
arg = (args - {type(None)}).pop()
self.visit(arg)
def visit_List(self, type_, value):
if isinstance(value, list):
item_type, = type_.__args__
for i, item in enumerate(value):
with self.push(item, '[{!r}]'.format(i)):
self.visit(item_type)
else:
self.fail(type_, value)
def visit_Dict(self, type_, value):
if isinstance(value, dict):
key_type, val_type = type_.__args__
for key, val in value.items():
with self.push(key, '[{!r}]'.format(key)):
self.visit(key_type)
with self.push(val, '[{!r}]'.format(key)):
self.visit(val_type)
else:
self.fail(type_, value)
def validate_type(ctx, value, type_, errors):
if isinstance(type_, type) or is_type(type_):
TypeChecker(ctx, value, errors).visit(type_)
else:
message = ('"{}" instead of "{}"'
.format(type_repr(type(value)), type_repr(type_)))
errors.append(Error(ctx, message))
def validate_section(obj, value, name, errors):
assert isinstance(obj, Section), repr(type(obj))
ctx = Context(name, None)
validate_type(ctx, value, dict, errors)
if not errors:
for key in obj.__keys__.values():
ctx = Context(name, key.name)
if key.name not in value:
errors.append(Error(ctx, 'missing key'))
else:
key_value = value[key.name]
validate_type(ctx, key_value, key.type, errors)
def validate_config(obj, value, variant, sep, errors):
assert isinstance(obj, Compose), repr(type(obj))
ctx = Context(None, None)
validate_type(ctx, value, dict, errors)
if errors:
return
key = sep.join(('compose', variant))
if key not in value:
errors.append(Error(Context(key, None), 'missing section'))
return
ctx = Context(key, None)
compose_section = value[key]
validate_type(ctx, compose_section, dict, errors)
if errors:
return
for section in obj.__sections__.values():
ctx = Context(key, section.__section_name__)
if section.__section_name__ not in compose_section:
errors.append(Error(ctx, 'missing key'))
continue
section_variant = compose_section[section.__section_name__]
validate_type(ctx, section_variant, str, errors)
if errors:
continue
full_section_name = sep.join((section.__section_name__,
section_variant))
if full_section_name not in value:
msg = '"{}" not found'.format(full_section_name)
errors.append(Error(ctx, msg))
else:
section_value = value[full_section_name]
validate_section(section, section_value, full_section_name, errors)
def validate(conf, data, variant, sep):
errors = []
validate_config(conf, data, variant, sep, errors)
return errors
| vmagamedov/strictconf | strictconf/checker.py | checker.py | py | 5,480 | python | en | code | 1 | github-code | 13 |
33361974713 | #!/home/zsiegel/anaconda3/bin/python
import zauxpy
def main():
# print(...)
if False:
# print(zauxpy.__dict__)
for key, value in zauxpy.__dict__.items():
if key not in ['__builtins__']:
print(key)
print(value, '\n')
print(zauxpy)
print(zauxpy.intan.RHSData)
# print(zauxpy.RHSData)
print(zauxpy.formatting.sinum(12435234523))
zauxpy.formatting.msg('hi')
if True:
import pathlib
trial_rhs = pathlib.Path('/home/zsiegel/projects/stim/zarek_20200302/1_200302_163336.rhs')
trial = zauxpy.intan.RHSData(trial_rhs, do_load=True)
print(trial)
if __name__ == "__main__":
main()
| kerazarek/zauxpy | zauxpy_testing_20200306.py | zauxpy_testing_20200306.py | py | 726 | python | en | code | 0 | github-code | 13 |
2355703163 | #!/usr/bin/env python
import pandas as pd
import numpy as np
import fmriprep_singularity as fs
# Define your path names
project_dir = '/sc/arion/projects/k23'
bids_root = f'{project_dir}/BIDS_new/'
output_dir = f'{project_dir}/derivatives/'
fs_license = f'{project_dir}/software/license.txt'
# Define your list of subjects (if 'sub-' is at the start of the subject string, it will be removed)
pd_participants = pd.read_csv(f'{project_dir}/code/fmriprep/participants.tsv', delimiter='\t') # add this
participants = { 'participant_id': list(pd_participants['id']) }
# Define the minerva options
image_location = f'{project_dir}/software/fmriprep-20.2.0.simg' # where is the fmriprep-20.2.0.simg file located?
batch_dir = f'{project_dir}/code/batch_dir' # output directory for all batch scripts
minerva_options = {'image_location': image_location,
'batch_dir': batch_dir,
'project_dir': project_dir}
# Run the fmriprep-docker command through Minerva on the created BIDS directory
fp_singularity = fs.FmriprepSingularityPipeline(participants, bids_root, output_dir, minerva_options,
queue='Gu', freesurfer=False, cifti_output=False)
fp_singularity.create_singularity_batch()
fp_singularity.run_singularity_batch(participants) # to submit jobs
| matty-gee/fmri_tools | preprocessing/fmriprep/run_fmriprep.py | run_fmriprep.py | py | 1,331 | python | en | code | 0 | github-code | 13 |
72999631698 | #Front end for solar forecasting
import backend_solar as backend_solar
from tkinter import *
import tkinter.messagebox
import datetime
from datetime import date
import csv
from tkinter import Menu
import os
from tkinter import filedialog, messagebox, ttk
import tkinter.font as font
import random
def main():
root = Tk()
app = welcome(root)
class welcome:
#The first window that appears the user containing the navigation for UI aswell as methods for navigation
def __init__(self, master):
#Constructor method will create GUI layout for login screen such as: Frames, Buttons, Labels and entry boxes
self.energyForecast = backend_solar.backend()
self.Location = self.energyForecast.getLocation()
self.master = master
self.master.title("TTS Solar Forecast")
self.master.geometry('480x200+0+0')
self.frame = Frame(self.master)
self.frame.pack()
self.master.configure(background='gray40')
self.frame.configure(background='gray40')
self.header = Label(self.frame, text = "Solar Forecast", bg = "gray40", fg = "#34c9eb", font = ("helvetica new", 25), pady = 15)
self.header.grid(row = 0, column = 0, columnspan = 2)
self.genButton = Button(self.frame, text = "Forecast", width = 15, bg = "gray60", fg = "#34c9eb", command = self.forecastWindow)
self.genButton.grid(row = 1, column = 0)
self.viewButton = Button(self.frame, text = "Saved", width = 15, bg = "black", fg = "#34c9eb", command = self.viewWindow)
self.viewButton.grid(row = 1, column = 1)
self.settingsButton = Button(self.frame, text = "Settings", width = 15, bg = "gray25", fg = "#34c9eb", command = self.settingsWindow)
self.settingsButton.grid(row = 2, column = 0, columnspan = 2)
self.infoLabel = Label(self.frame, text = "Tysco Technology Solutions (TTS)", bg = "gray40", fg = "grey", font = ("helvetica new", 15))
self.infoLabel.grid(row = 3, column = 0, columnspan = 2, pady = 15)
#Preset navigation in the super class:
def forecastWindow(self):
self.master.withdraw()
self.newWindow = Toplevel(self.master)
self.app = forecast(self.newWindow)
def viewWindow(self):
self.master.withdraw()
self.newWindow = Toplevel(self.master)
self.app = viewPrevious(self.newWindow)
def settingsWindow(self):
self.master.withdraw()
self.newWindow = Toplevel(self.master)
self.app = settings(self.newWindow)
def goHome(self):
self.master.withdraw()
self.newWindow = Toplevel(self.master)
self.app = welcome(self.newWindow)
#===============================================================================================
class settings(welcome):
#Settings provides the ability to change the metrics used and to show or hide temperture readings
def __init__(self, master):
self.master = master
self.master.title("TTS Solar Forecast")
self.master.geometry('480x480+0+0')
self.frame = Frame(self.master)
self.frame.pack()
self.master.configure(background='gray40')
self.frame.configure(background='gray40')
self.header = Label(self.frame, text = "Settings", bg = "gray40", fg = "#34c9eb", font = ("helvetica new", 25), pady = 15)
self.header.grid(row = 0, column = 0, columnspan = 3)
self.metricsKwLabel = Label(self.frame, text = "Wattage", bg = "gray40", fg = "#34c9eb")
self.metricsKwLabel.grid(row = 1, column = 0, pady = 15, padx = 30)
self.kWhYesBut = Button(self.frame, text = "KW", width = 10, bg = "gray25", fg = "#34c9eb", command = self.showKwatt)
self.kWhYesBut.grid(row =1, column = 1, pady = 15, padx = 5)
self.kWhNoBut = Button(self.frame, text = "W", width = 10, bg = "gray25", fg = "#34c9eb", command = self.showNwatt)
self.kWhNoBut.grid(row =1, column = 2, pady = 15, padx = 5)
self.showTempLabel = Label(self.frame, text = "Show temperature", bg = "gray40", fg = "#34c9eb")
self.showTempLabel.grid(row = 2, column = 0, pady = 15, padx = 30)
self.tempYesBut = Button(self.frame, text = "Show", width = 10, bg = "gray25", fg = "#34c9eb", command = self.showTemp)
self.tempYesBut.grid(row = 2, column = 1, pady = 15, padx = 5)
self.tempNoBut = Button(self.frame, text = "Hide", width = 10, bg = "gray25", fg = "#34c9eb", command = self.hideTemp)
self.tempNoBut.grid(row = 2, column = 2, pady = 15, padx = 5)
self.saveBut = Button(self.frame, text = "SAVE", command = self.save, width = 12, fg = "#34c9eb")
self.saveBut.grid(row = 3, column = 1, columnspan = 2, pady = 5)
self.homeButton = Button(self.frame, text = "HOME", command = self.goHome, width = 12, fg = "#34c9eb")
self.homeButton.grid(row = 3, column = 0, columnspan = 2, pady = 5)
self.infoLabel = Label(self.frame, text = "Tysco Technology Solutions (TTS)", bg = "gray40", fg = "grey", font = ("helvetica new", 15))
self.infoLabel.grid(row = 4, column = 0, columnspan = 3, pady = 10)
self.file = open('preferences.csv')
self.reader = csv.reader(self.file)
self.fileData = list(self.reader)
#Incase the user needs to change one entity load them both
self.wattPref = self.fileData[0][0]
self.tempPref = self.fileData[0][1]
#When corresponding button is pressed change value to a key of 1 and 0
def showKwatt(self):
self.wattPref = 0
messagebox.showinfo("showinfo", "Changed to kW")
return self.wattPref
def showNwatt(self):
self.wattPref = 1
messagebox.showinfo("showinfo", "Changed to W")
return self.wattPref
def showTemp(self):
self.tempPref = 1
messagebox.showinfo("showinfo", "Showing")
return self.tempPref
def hideTemp(self):
self.tempPref = 0
messagebox.showinfo("showinfo", "Hidden")
return self.tempPref
def save(self):
#Overwrite what is in csv and write preference
data = [str(self.wattPref), str(self.tempPref)]
with open('preferences.csv', 'w') as self.savedFile:
self.writer = csv.writer(self.savedFile)
self.writer.writerow(data)
self.savedFile.close()
messagebox.showinfo("showinfo", "Saved")
#===============================================================================================
class forecast(welcome):
#Forecast is using the backend to provide a score and calculate the solar forecast right now
def __init__(self, master):
self.energyForecast = backend_solar.backend()
self.location = self.energyForecast.getLocation()
self.kWhText = self.energyForecast.kWhTextPref()
self.tempShow = self.energyForecast.tempPrefShow()
self.temperatureShow = self.energyForecast.getTemperature()
if self.kWhText == "0":
self.kWhTextStart = "kWh per day: "
elif self.kWhText == "1":
self.kWhTextStart = "Wh per day: "
else:
self.kWhTextStart = "error"
self.master = master
self.master.title("TTS Solar Forecast")
self.master.geometry('480x480+0+0')
self.frame = Frame(self.master)
self.frame.pack()
self.master.configure(background='gray40')
self.frame.configure(background='gray40')
self.header = Label(self.frame, text = "Solar Forecast (Now)", bg = "gray40", fg = "#34c9eb", font = ("helvetica new", 25), pady = 15)
self.header.grid(row = 0, column = 0, columnspan = 2)
self.locationLabel = Label(self.frame, text = "Location: ", pady = 10, bg = "gray40", fg = "#34c9eb")
self.locationLabel.grid(row = 1, column = 0)
self.locationDisplay = Label(self.frame, text = self.location, pady = 10, bg = "gray40", fg = "#34c9eb")
self.locationDisplay.grid(row = 1, column = 1)
self.panelWattStr = StringVar()
self.panelWattLabel = Label(self.frame, text = "Enter System/Panel Wattage: ", pady = 10, bg = "gray40", fg = "#34c9eb")
self.panelWattLabel.grid(row = 2, column = 0)
self.panelWattEntry = Entry(self.frame, textvariable = self.panelWattStr, bg = "gray40", fg = "white")
self.panelWattEntry.grid(row = 2, column = 1)
self.kWhPerDayLabel = Label(self.frame, text = self.kWhTextStart, pady = 10, bg = "gray40", fg = "#34c9eb")
self.kWhPerDayLabel.grid(row = 3, column = 0)
self.kWhPerDayDisplay = Label(self.frame, text = "----", pady = 10, bg = "gray40", fg = "#34c9eb")
self.kWhPerDayDisplay.grid(row = 3, column = 1)
self.efficiencyLabel = Label(self.frame, text = "Efficiency: ", pady = 10, bg = "gray40", fg = "#34c9eb")
self.efficiencyLabel.grid(row = 4, column = 0)
self.efficiencyDisplay = Label(self.frame, text = "----", pady = 10, bg = "gray40", fg = "#34c9eb")
self.efficiencyDisplay.grid(row = 4, column = 1)
self.panelPowerLabel = Label(self.frame, text = "Panel after Efficiency: ", pady = 10, bg = "gray40", fg = "#34c9eb")
self.panelPowerLabel.grid(row = 5, column = 0)
self.panelPowerDisplay = Label(self.frame, text = "----", pady = 10, bg = "gray40", fg = "#34c9eb")
self.panelPowerDisplay.grid(row = 5, column = 1)
self.genButton = Button(self.frame, text = "GENERATE", command = self.update, width = 15)
self.genButton.grid(row = 6, column = 0)
self.saveButton = Button(self.frame, text = "SAVE", width = 15, command = self.saveResult)
self.saveButton.grid(row = 6, column = 1)
self.viewButton = Button(self.frame, text = "VIEW OTHER", width = 15, command = self.viewWindow)
self.viewButton.grid(row = 7, column = 1)
self.genButton = Button(self.frame, text = "HOME", command = self.goHome, width = 15)
self.genButton.grid(row = 7, column = 0)
self.tempNowLabel = Label(self.frame, text = "Temperature: ", bg = "gray40", fg = "#34c9eb")
self.tempNowLabel.grid(row = 8, column = 0)
self.tempNowDisplay = Label(self.frame, text = self.temperatureShow, bg = "gray40", fg = "#34c9eb")
self.tempNowDisplay.grid(row = 8, column = 1)
if self.tempShow == "1":
self.tempNowLabel.grid(row = 8, column = 0)
elif self.tempShow == "0":
self.tempNowLabel.grid_forget()
self.tempNowDisplay.configure(text = "")
else:
print("error")
def update(self):
self.panelWatt = self.panelWattStr.get()
self.efficiency = self.energyForecast.calculateScore()
if self.panelWatt == '':
self.panelWatt = 0
elif self.panelWatt.isnumeric() == False:
self.panelWatt = 0
else:
self.kWhTextEnd = "error"
self.kWhPerDay = float(self.energyForecast.predictedEnergy2(self.panelWatt))
if self.kWhText == "0":
self.kWhTextEnd = " kWh"
elif self.kWhText == "1":
self.kWhTextEnd = " Wh"
self.kWhPerDay = self.kWhPerDay * 1000
else:
self.kWhTextEnd = "error"
self.kWhPerDayDisplay.configure(text = str(round(self.kWhPerDay, 2)) + self.kWhTextEnd)
self.efficiencyDisplay.configure(text = str(self.efficiency) + "%")
self.panelPowerDisplay.configure(text = str(int(float(self.panelWatt) * (self.efficiency / 100))) + "W" )
def saveResult(self):
self.today = date.today()
#open new csv
#save date
#save kwh
#save efficiency
data = [self.today, str(round(self.kWhPerDay, 2)), str(self.efficiency), str(self.panelWatt)]
with open('saved.csv', 'w') as self.savedFile:
self.writer = csv.writer(self.savedFile)
self.writer.writerow(data)
self.savedFile.close()
messagebox.showinfo("showinfo", "Saved")
def goHome(self):
self.master.withdraw()
self.newWindow = Toplevel(self.master)
self.app = welcome(self.newWindow)
#===============================================================================================
class viewPrevious(welcome):
#ViewPrevious gives th euser the ability to view saved forecasts from another time
def __init__(self, master):
self.master = master
self.master.title("TTS Solar Forecast")
self.master.geometry('480x480+0+0')
self.frame = Frame(self.master)
self.frame.pack()
self.master.configure(background='gray40')
self.frame.configure(background='gray40')
self.header = Label(self.frame, text = "Previous Forecasts", bg = "gray40", fg = "#34c9eb", font = ("helvetica new", 25), pady = 15)
self.header.grid(row = 0, column = 0, columnspan = 2)
self.file = open('saved.csv')
self.reader = csv.reader(self.file)
self.fileData = list(self.reader)
self.listOfData = []
for x in list(range(0, len(self.fileData))):
self.listOfData.append(self.fileData[x][0])
self.userStockChoice = Listbox(self.frame)
self.userStockChoice.grid(row = 1, column = 0)
#Inserting the data to the list box for the first time of loading the window
for x, y in enumerate(self.listOfData):
self.userStockChoice.insert(x, y)
self.dateLabel = Label(self.frame, text = "Date: ", bg = "gray40", fg = "#34c9eb")
self.dateLabel.grid(row = 2, column = 0)
self.dateDisplay = Label(self.frame, text = "----", bg = "gray40", fg = "#34c9eb")
self.dateDisplay.grid(row = 2, column = 1)
self.kWhLabel = Label(self.frame, text = "kWh:", bg = "gray40", fg = "#34c9eb")
self.kWhLabel.grid(row = 3, column = 0)
self.kWhDisplay = Label(self.frame, text = "----", bg = "gray40", fg = "#34c9eb")
self.kWhDisplay.grid(row = 3, column = 1)
self.effLabel = Label(self.frame, text = "Efficiency: ", bg = "gray40", fg = "#34c9eb")
self.effLabel.grid(row = 4, column = 0)
self.effDisplay = Label(self.frame, text = "----", bg = "gray40", fg = "#34c9eb")
self.effDisplay.grid(row = 4, column = 1)
self.systemLabel = Label(self.frame, text = "System: ", bg = "gray40", fg = "#34c9eb")
self.systemLabel.grid(row = 5, column = 0)
self.systemDisplay = Label(self.frame, text = "----", bg = "gray40", fg = "#34c9eb")
self.systemDisplay.grid(row = 5, column = 1)
self.updateButton = Button(self.frame, text = "VIEW", command = self.updatePrev, width = 15, bg = "black", fg = "#34c9eb")
self.updateButton.grid(row = 6, column = 0, padx = 5)
self.DeleteButton = Button(self.frame, text = "DELETE", command = self.deletePrev, width = 15, bg = "black", fg = "#34c9eb")
self.DeleteButton.grid(row = 6, column = 1, padx = 5)
self.homeButton = Button(self.frame, text = "HOME", command = self.goHome, width = 15, bg = "black", fg = "#34c9eb")
self.homeButton.grid(row = 7, column = 0, padx = 5, columnspan = 2)
def updatePrev(self):
try:
self.index = self.userStockChoice.curselection()[0]
self.dateDisplay.config(text = self.fileData[self.index][0])
self.kWhDisplay.config(text = self.fileData[self.index][1])
self.effDisplay.config(text = self.fileData[self.index][2])
self.systemDisplay.config(text = self.fileData[self.index][3])
except ValueError:
print("Error Execpetion caught!")
def deletePrev(self):
#Creating an index of curser selcetion to know what to delete
self.index = self.userStockChoice.curselection()[0]
self.indexCode = str(self.fileData[self.index][3])
tempList = list()
with open('saved.csv', 'r') as readFile:
reader = csv.reader(readFile)
for row in reader:
tempList.append(row)
for field in row:
if field == self.indexCode:
tempList.remove(row)
with open('saved.csv', 'w') as writeFile:
writer = csv.writer(writeFile)
writer.writerows(tempList)
#Once the entity is deleted, the list box with updated info is displayed on top of the old
file = open('saved.csv')
reader = csv.reader(file)
self.fileData = list(reader)
self.listOfData = []
for x in list(range(0, len(self.fileData))):
self.listOfData.append(self.fileData[x][0])
self.userStockChoice = Listbox(self.frame)
self.userStockChoice.grid(row = 1, column = 0)
#Inserting the data from listOfData in to the list box
for x, y in enumerate(self.listOfData):
self.userStockChoice.insert(x, y)
main()
| TylerTobin-CS/Solar-Energy-Forecast | frontend_solar.py | frontend_solar.py | py | 17,213 | python | en | code | 0 | github-code | 13 |
74861058257 | import logging
import os
import sys
DEBUG = True # this guy is a flag for extra messaging while debugging tests
#NOTE: Logger and Platform are initialized in TestRunner's main() or Configuration.GetLogger/Platform
Logger = None
LoggerFile = None
Platform = None
PLATFORM_PRO = "PRO"
PLATFORM_DESKTOP = "DESKTOP"
''' Testing paths '''
currentPath = os.path.dirname(__file__) # should go to .\solutions-geoprocessing-toolbox\utils\test
repoPath = os.path.dirname(os.path.dirname(currentPath))
''' Download path '''
testDataPath = os.path.normpath(os.path.join(currentPath, r"test_data")) # should go to .\solutions-geoprocessing-toolbox\utils\test\test_data
''' Log Path: the folder where the log files go wild and multiply '''
logPath = os.path.normpath(os.path.join(currentPath, r"log")) # should go to .\solutions-geoprocessing-toolbox\utils\test\log
'''Distance To Assets paths'''
distancetoAssetsToolboxPath = os.path.normpath(os.path.join(currentPath, r"../../distance_to_assets/Distance To Assets"))
distanceToAssetsDataPath = os.path.normpath(os.path.join(testDataPath, r"DistanceToAssets"))
distanceToAssetsInputGDB = os.path.join(distanceToAssetsDataPath, "DistancetoAssetsTestData/DistanceToAssets.gdb")
distanceToAssetsOutputGDB = os.path.join(currentPath, r"../../distance_to_assets/DistanceToAssets.gdb")
distanceToAssetsInputNDGDB=os.path.join(distanceToAssetsDataPath, "DistancetoAssetsTestData/SanFrancisco.gdb")
distanceToAssetsURL = r"http://www.arcgis.com/sharing/content/items/700e44eb3e114c098818ea71f7ad72b6/data"
''' Clearing Operations - Test Data/Paths '''
clearingOperationsToolboxPath = os.path.normpath(os.path.join(currentPath,
r"../../clearing_operations/ClearingOperationsTools.pyt"))
clearingOperationsPath = os.path.normpath(os.path.join(testDataPath, r"clearing_operations"))
clearingOperationsURL = r"http://www.arcgis.com/sharing/content/items/198f01e263474c209198c9c3c3586287/data"
clearingOperationsInputGDB = os.path.join(clearingOperationsPath, "test_clearing_operations.gdb")
''' GriddedReferenceGraphic = Test Data/Paths '''
grgToolboxPath = os.path.normpath(os.path.join(currentPath,
r"../../clearing_operations/GriddedReferenceGraphicTools.pyt"))
grgPath = os.path.normpath(os.path.join(testDataPath,r"clearing_operations"))
grgURL = r"http://www.arcgis.com/sharing/content/items/bb592332393b4443817f5986af611e3d/data"
grgInputGDB = os.path.join(grgPath, r"grg_test_data.gdb")
''' Geonames - Test Data/Paths '''
geonamesToolboxPath = os.path.normpath(os.path.join(currentPath, r"../../geonames/Geonames Tools"))
geonamesDataPath = os.path.normpath(os.path.join(testDataPath, r"geonames"))
geonamesInputGDB = os.path.join(geonamesDataPath, "Geonames.gdb")
geonamesURL = r"http://www.arcgis.com/sharing/content/items/afc766d5276648ab80aa85b819af1ffc/data"
''' Military Features - Test Data/Paths '''
militaryFeaturesToolboxPath = os.path.normpath(os.path.join(currentPath, r"../../military_features/Military Features Tools"))
militaryFeaturesDataPath = os.path.normpath(os.path.join(testDataPath, r"military_features"))
militaryFeaturesGeodatabasesPath = os.path.normpath(os.path.join(militaryFeaturesDataPath, r"data/mil2525c/testdata/geodatabases"))
militaryFeaturesMessagesPath = os.path.join(militaryFeaturesDataPath, r"data/mil2525c/testdata/messagefiles")
militaryFeaturesInputGDB = os.path.join(militaryFeaturesGeodatabasesPath, r"test_inputs.gdb")
militaryFeaturesInputGDBNonMilitaryFeatures = os.path.join(militaryFeaturesGeodatabasesPath, "test_inputs_non_military_features.gdb")
militaryFeaturesBlankMilFeaturesGDB = os.path.join(militaryFeaturesGeodatabasesPath, "MilitaryOverlay10.1.1-Blank.gdb")
militaryFeaturesURL = r"http://www.arcgis.com/sharing/content/items/3a18f91b34d14a5aa72aa67f32c97497/data"
''' Incident Analysis - Test Data/Paths '''
incidentToolboxPath = os.path.normpath(os.path.join(currentPath, r"../../incident_analysis/Incident Analysis Tools"))
incidentAnalysisDataPath = os.path.normpath(os.path.join(testDataPath, r"incident_analysis"))
incidentURL = "http://www.arcgis.com/sharing/content/items/528faf6b23154b04a8268b33196fa9ad/data"
incidentInputGDB = os.path.join(incidentAnalysisDataPath, "test_incident_analysis_tools.gdb")
incidentResultGDB = os.path.join(incidentAnalysisDataPath, "test_incident_analysis_results.gdb")
''' Sun Position Analysis - Test Data/Paths '''
sunPositionAnalysisToolboxPath = os.path.normpath(os.path.join(currentPath, r"../../sun_position_analysis/Sun Position Analysis Tools"))
sunPositionAnalysisDataPath = os.path.normpath(os.path.join(testDataPath, r"sun_position_analysis"))
sunPositionAnalysisURL = r"http://www.arcgis.com/sharing/content/items/bf6a04b4c9a3447b91e9c0b4074ca1e4/data"
sunPositionInputGDB = os.path.join(sunPositionAnalysisDataPath, "test_sun_position.gdb")
''' MAoT - Test Data/Paths '''
maotToolboxPath = os.path.normpath(os.path.join(currentPath, r"../../military_aspects_of_terrain/Military Aspects of Terrain Tools"))
maotPath = os.path.normpath(os.path.join(testDataPath, r"maot"))
maotURL = r"http://www.arcgis.com/sharing/content/items/127bff2341694342a6df884aaa51237e/data"
''' MAoW - Test Data/Paths '''
maowToolboxPath = os.path.normpath(os.path.join(currentPath, r"../../military_aspects_of_weather/Military Aspects of Weather Tools"))
maowPath = os.path.normpath(os.path.join(testDataPath, r"maow"))
maowURL = "http://www.arcgis.com/sharing/content/items/74eeb356c7dd4422bf52f36f38bb8a9b/data"
def checkTokenizeWorkaround() :
#################################################
# WORKAROUND: for Python 3 choking on reading some binary files (with nulls)
# For example in ArcPy when loading a toolbox when run from command line
# Get error like: detect_encoding...tokenize.py...find_cookie...raise SyntaxError(msg)
# ...SyntaxError: invalid or missing encoding declaration for '...XXXX.tbx'
# Workaround borrowed/used from:
# https://github.com/habnabit/passacre/commit/2ea05ba94eab2d26951ae7b4b51abf53132b20f0
# Code should work with Python 2, but only do workaround for Python 3
# Workaround needed in Versions 3.0 - 3.5.2
if sys.version_info >= (3, 0) and sys.version_info < (3, 5, 3):
import tokenize
try:
_detect_encoding = tokenize.detect_encoding
except AttributeError:
pass
else:
def detect_encoding(readline):
try:
return _detect_encoding(readline)
except SyntaxError:
return 'latin-1', []
tokenize.detect_encoding = detect_encoding
## END WORKAROUND
#################################################
def GetLogger(logLevel = logging.DEBUG) :
global Logger
if Logger is None:
import UnitTestUtilities
logName = UnitTestUtilities.getLoggerName()
Logger = UnitTestUtilities.initializeLogger(logName, logLevel)
return Logger
def GetPlatform() :
global Platform
if Platform is None :
import arcpy
Platform = PLATFORM_DESKTOP
installInfo = arcpy.GetInstallInfo()
if installInfo['ProductName'] == 'ArcGISPro':
Platform = PLATFORM_PRO
checkTokenizeWorkaround()
return Platform
def GetToolboxSuffix() :
platform = GetPlatform()
# default to ArcMap
suffix = "_arcmap.tbx"
if Platform == PLATFORM_PRO :
suffix = "_pro.tbx"
return suffix
| Esri/solutions-geoprocessing-toolbox | utils/test/Configuration.py | Configuration.py | py | 7,568 | python | en | code | 129 | github-code | 13 |
31151668773 | import math
from itertools import chain
from itertools import accumulate
from functools import reduce
from collections import Counter
from collections import defaultdict
from copy import deepcopy
import numpy as np
import heapq
import sys
sys.setrecursionlimit(10000)
f = open(0).read().strip().split('\n')
field = {}
LAVA = 1
TRAPPED = 2
CHECKING = 3
FREE = 4
minx = +1000
maxx = -1000
miny = +1000
maxy = -1000
minz = +1000
maxz = -1000
for l in f:
x, y, z = map(int, l.split(','))
field[(x, y, z)] = LAVA
minx = min(minx, x)
maxx = max(maxx, x)
miny = min(miny, y)
maxy = max(maxy, y)
minz = min(minz, z)
maxz = max(maxz, z)
print(minx, miny, minz)
print(maxx, maxy, maxz)
faces = 0
PART = 1
def check(x, y, z):
i = (x, y, z) in field
if PART == 1:
return not i
if i and field[(x, y, z)] == CHECKING:
return 2
if i and field[(x, y, z)] == FREE:
return 1
# make sure this isn't a trapped air pocket
if i and field[(x, y, z)] in (TRAPPED, LAVA):
return 0
if x < minx or x > maxx or y < miny or y > maxy or z < minz or z > maxz:
return 1
trapped = []
field[(x, y, z)] = CHECKING
trapped.append(check(x + 1, y, z))
trapped.append(check(x - 1, y, z))
trapped.append(check(x, y + 1, z))
trapped.append(check(x, y - 1, z))
trapped.append(check(x, y, z + 1))
trapped.append(check(x, y, z - 1))
del field[(x, y, z)]
if trapped.count(0) == 6 - trapped.count(2):
field[(x, y, z)] = TRAPPED
return 0
field[(x, y, z)] = FREE
return 1
for cube in frozenset(field.keys()):
x, y, z = cube
faces += check(x + 1, y, z)
faces += check(x - 1, y, z)
faces += check(x, y + 1, z)
faces += check(x, y - 1, z)
faces += check(x, y, z + 1)
faces += check(x, y, z - 1)
print(faces)
| obiwac/advent-of-code | 2022/18/main.py | main.py | py | 1,729 | python | en | code | 2 | github-code | 13 |
16886380988 | import os
import jwt
import functools
from datetime import datetime, timedelta
from dateutil import parser
from database import Database
db = Database()
db.init_users_table()
db.init_urns_table()
def remove_key(d, key):
r = dict(d)
del r[key]
return r
def check_dn(dn):
db = Database()
row = db.execute("SELECT id, username from users WHERE dn = ?", (dn,))
if len(row) > 0:
row = row[0]
return {"id": row[0], "username": row[1]}
else:
return {"error": "Invalid credentials"}
def update_jwt(_id, token):
db = Database()
token = token.decode("utf-8")
values = (token, _id,)
db.execute("UPDATE users SET jwt=? WHERE id=?", values)
# def find_user_by_dn(dn):
# conn = sqlite3.connect('urn.db')
# c = conn.cursor()
# query = "SELECT * from users WHERE dn=?"
# result = c.execute(query, (dn,))
# row = result.fetchone()
# if row:
# user(row[0], row[1], row[2], row[3])
# else:
# user = None
# conn.close()
# return user
def jwt_required(key, request):
def jwt_req(func):
@functools.wraps(func)
def function_that_runs_func(*args, **kwargs):
headers = request.headers
r_dn = request.environ.get('HTTP_SSL_CLIENT_S_DN')
if "Authorization" in headers:
encoded = headers['Authorization']
db = Database()
values = (encoded,)
row = db.execute("SELECT id FROM users WHERE jwt=?", values)
if len(row) <= 0:
return {"error": "Invalid token!"}
decoded = jwt.decode(encoded, key, algorithms='HS256')
if row[0][0] != decoded['id']:
return {"error": "Invalid token!"}
dt = parser.parse(decoded['expiration'])
if dt > datetime.now():
kwargs['decoded'] = decoded
else:
return {"error": "Your token has expired"}
return func(*args, **kwargs)
else:
return {"error": "Invalid request!"}
return function_that_runs_func
return jwt_req
def cert_required(request):
def cert_req(func):
@functools.wraps(func)
def func_that_runs_func(*args, **kwargs):
r_dn = request.environ.get('HTTP_SSL_CLIENT_S_DN')
if r_dn:
return func(*args, **kwargs)
else:
return {"error": "You need a valid certificate"}
return func_that_runs_func
return cert_req | kylecribbs/Flask-URN | dockerfiles/urn-flask/security.py | security.py | py | 2,607 | python | en | code | 0 | github-code | 13 |
17059832274 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.Signer import Signer
class SignField(object):
def __init__(self):
self._auto_execute = None
self._signer = None
self._struct_key = None
@property
def auto_execute(self):
return self._auto_execute
@auto_execute.setter
def auto_execute(self, value):
self._auto_execute = value
@property
def signer(self):
return self._signer
@signer.setter
def signer(self, value):
if isinstance(value, Signer):
self._signer = value
else:
self._signer = Signer.from_alipay_dict(value)
@property
def struct_key(self):
return self._struct_key
@struct_key.setter
def struct_key(self, value):
self._struct_key = value
def to_alipay_dict(self):
params = dict()
if self.auto_execute:
if hasattr(self.auto_execute, 'to_alipay_dict'):
params['auto_execute'] = self.auto_execute.to_alipay_dict()
else:
params['auto_execute'] = self.auto_execute
if self.signer:
if hasattr(self.signer, 'to_alipay_dict'):
params['signer'] = self.signer.to_alipay_dict()
else:
params['signer'] = self.signer
if self.struct_key:
if hasattr(self.struct_key, 'to_alipay_dict'):
params['struct_key'] = self.struct_key.to_alipay_dict()
else:
params['struct_key'] = self.struct_key
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = SignField()
if 'auto_execute' in d:
o.auto_execute = d['auto_execute']
if 'signer' in d:
o.signer = d['signer']
if 'struct_key' in d:
o.struct_key = d['struct_key']
return o
| alipay/alipay-sdk-python-all | alipay/aop/api/domain/SignField.py | SignField.py | py | 2,005 | python | en | code | 241 | github-code | 13 |
11261647475 | from functools import lru_cache
import sys
from typing import List
class Voxel:
def __init__(self, x, y, z) -> None:
self.x: int = x
self.y: int = y
self.z: int = z
# @lru_cache(None)
def sides(self):
for c in [-1, 1]:
yield Voxel(self.x + c, self.y, self.z)
yield Voxel(self.x, self.y + c, self.z)
yield Voxel(self.x, self.y, self.z + c)
def __eq__(self, __o: object) -> bool:
return __o.x == self.x and __o.y == self.y and __o.z == self.z
def __str__(self) -> str:
return str((self.x, self.y, self.z))
def __repr__(self) -> str:
return str(self)
def __sub__(self, o):
return Voxel(self.x - o.x, self.y - o.y, self.z - o.z)
def __add__(self, o):
return Voxel(self.x + o.x, self.y + o.y, self.z + o.z)
def __hash__(self) -> int:
return (self.x, self.y, self.z).__hash__()
blocks = set()
for line in sys.stdin:
v = Voxel(*[int(c) for c in line.strip().split(",")])
blocks.add(v)
air = set()
for voxel in blocks:
for side in voxel.sides():
if side not in blocks:
air.add(side)
print(len(air))
bubbles = []
covered = set()
while len(air) > 0:
voxel = air.pop()
bubble = set()
bubble.add(voxel)
# Explore each pocket
q = [(1, voxel)]
while q:
dc, vx = q.pop()
sides = list(vx.sides())
if side in air:
air.remove(side)
if dc > 10:
continue
if any(s in blocks for s in sides):
dc = 1
else:
dc += 1
for side in sides:
if side in blocks:
continue
if side not in bubble:
bubble.add(side)
q.append((dc, side))
if side in air:
air.remove(side)
bubbles.append(bubble)
print("# Joining bubbles..")
for i in range(len(bubbles)):
joined = []
for j in range(i+1, len(bubbles)):
if any(v in bubbles[j] for v in bubbles[i]):
bubbles[i] = bubbles[i] | bubbles[j]
joined.append(j)
continue
for j in joined[::-1]:
del bubbles[j]
surfaces = []
print("# Calculating surfaces..")
for bubble in bubbles:
print(len(bubble))
exposed = 0
for voxel in bubble:
for b2 in bubbles:
if b2 == bubble:
continue
if voxel in b2:
print("wat", voxel, len(b2), len(bubble))
sys.exit(1)
for side in voxel.sides():
if side in blocks:
exposed += 1
surfaces.append(exposed)
print(sorted(surfaces))
| Tethik/advent-of-code | 18/b.py | b.py | py | 2,700 | python | en | code | 0 | github-code | 13 |
74905184017 | import itertools
print('Advent of Code 2015 - Day 09')
with open('day09.txt') as f:
paths = {}
# format of paths:
# {
# start1: {
# dest1: length of start1 to dest1,
# dest2: length of start1 to dest2,
# ...
# },
# start2: {
# dest1: length of start2 to dest1,
# dest2: lenght of start2 to dest2,
# ...
# },
# ...
# }
for line in f.read().splitlines():
way, distance = line.split(' = ')
place1, place2 = way.split(' to ')
if place1 not in paths:
paths[place1] = {place2: int(distance)}
else:
paths[place1][place2] = int(distance)
if place2 not in paths:
paths[place2] = {place1: int(distance)}
else:
paths[place2][place1] = int(distance)
def get_total_distance(path: list) -> int:
starting = path.pop(0)
distance = 0
while path:
destination = path.pop(0)
distance += paths[starting][destination]
starting = destination
return distance
min_total = 100000
max_total = 0
for path in itertools.permutations(paths.keys()):
total = get_total_distance(list(path))
min_total = min(min_total, total)
max_total = max(max_total, total)
print(f'Part 1: {min_total}') # 207 part 1
print(f'Part 2: {max_total}') # 804 part 2
| kdmontero/aoc | 2015/day09.py | day09.py | py | 1,383 | python | en | code | 0 | github-code | 13 |
6163680529 | import socket
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
import time, pickle, os, sys
import json
from encrypt_decrypt import encrypt, decrypt
from cryptography.hazmat.primitives import serialization
HOST = '127.0.0.1'
PORT = 65433
BOB_MESSAGE = b""
digest = hashes.Hash(hashes.SHA256())
bob_priv = ec.generate_private_key(ec.SECP384R1())
filename = sys.argv[1]
fileContents = open(filename, 'rb')
digest.update(fileContents.read())
fileHash = digest.finalize()
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.setsockopt( socket.SOL_SOCKET, socket.SO_REUSEADDR, 1 )
s.bind((HOST, PORT))
s.listen()
conn, addr = s.accept()
with conn:
alice_public = conn.recv(102400)
loaded_public_key = serialization.load_pem_public_key(alice_public)
if not alice_public:
exit()
conn.sendall(bob_priv.public_key().public_bytes(encoding=serialization.Encoding.PEM,format=serialization.PublicFormat.SubjectPublicKeyInfo)) #Send over bob's public key
bob_shared = bob_priv.exchange(ec.ECDH(), loaded_public_key)
bob_hkdf = HKDF(algorithm=hashes.SHA256(),length=32,salt=None,info=b'',).derive(bob_shared)
iv, ciphertext, tag, associated_data = encrypt(bob_hkdf,fileHash,b"Bob's Hash")
myCiphertext = ciphertext
conn.send(pickle.dumps((iv,ciphertext,tag, associated_data)))
(iv, ciphertext, tag, associated_data) = pickle.loads(conn.recv(102400))
pText = decrypt(bob_hkdf, associated_data, iv, ciphertext,tag)
isSame = b""
if pText == fileHash:
isSame = b"Success!"
else:
isSame = b"Failed!"
iv, ciphertext, tag, associated_data = encrypt(bob_hkdf, isSame ,b"Bob's Result")
conn.sendall(pickle.dumps((iv, ciphertext, tag, associated_data)))
print("Our result: ", isSame.decode('utf-8'))
(iv, ciphertext, tag, associated_data) = pickle.loads(conn.recv(102400))
alice_result = decrypt(bob_hkdf, associated_data, iv, ciphertext, tag)
print("Alice result:", alice_result.decode('utf-8'))
| rajKarra69420/cs355project | bob.py | bob.py | py | 2,226 | python | en | code | 0 | github-code | 13 |
37474234200 | # -*- coding: utf-8 -*-
"""
Created on Sat Apr 6 11:48:49 2019
@author: Joop
"""
import numpy as np
from scipy.optimize import linprog
# var names in all CAPS are supposed to be constants
# coordinates counting from up to down, left to right in the grid:
GOAL = (0, 3)
SHIPWRECK = (2, 2)
CRACKS = [(1, 1), (1, 3), (2, 3), (3, 1), (3, 2), (3, 3)]
TERMINALS = set([GOAL] + CRACKS)
# S_PLUS -> {(0, 0), .., (3, 3)}
S_PLUS = set([(i, j) for i in range(4) for j in range(4)])
S = S_PLUS - TERMINALS
A = { # only non-terminal states are listed. Example: A[(0, 0)] -> ['D', 'R']
(0, 0): ['U', 'D', 'L', 'R'],
(0, 1): ['U', 'D', 'L', 'R'],
(0, 2): ['U', 'D', 'L', 'R'],
(1, 0): ['U', 'D', 'L', 'R'],
(1, 2): ['U', 'D', 'L', 'R'],
(2, 0): ['U', 'D', 'L', 'R'],
(2, 1): ['U', 'D', 'L', 'R'],
(2, 2): ['U', 'D', 'L', 'R'],
(3, 0): ['U', 'D', 'L', 'R']
}
ACTION_VEC = { # example: up is one row up (-1) and no column change (0)
'U': (-1, 0),
'D': (1, 0),
'L': (0, -1),
'R': (0, 1)
}
ACTION_TO_INT = { # integer representation of an action. Helper function
'U': 0,
'D': 1,
'L': 2,
'R': 3
}
GAMMA = .9
# reward doesn't depend on s or a, just on s'
def R(s, a, s_):
if s_ == GOAL:
r = 100
elif s_ == SHIPWRECK:
r = 20
elif s_ in CRACKS:
r = -10
else:
r = 0
return r
# transition probabilities
def get_P():
# initialize array with 4 * 4 * s, 4 * a, and 4 * 4 * s_ to all zeros
P = np.zeros((4, 4, 4, 4, 4), dtype=np.float16)
for s in S:
for a in A[s]:
# define state s_new that agent intends to go to
s_new = (s[0] + ACTION_VEC[a][0], s[1] + ACTION_VEC[a][1])
for s_ in S_PLUS:
if s_new == s_:
if s_ in TERMINALS:
p = 1 # no sliding over terminals
elif a == 'U' and s_[0] == 0:
p = 1 # no sliding up over top row states
elif a == 'D' and s_[0] == 3:
p = 1 # no sliding down over bottom row states
elif a == 'L' and s_[1] == 0:
p = 1 # no sliding left over left column states
else: # this s' has possibility of sliding over
p = .95 # '''0.95'''
# define sliding end states for assigning P = .05
if a == 'U':
s_slip = (0, s_[1]) # first row, column of s_
elif a == 'D':
s_slip = (3, s_[1]) # last row, column of s_
elif a == 'R':
s_slip = (s_[0], 3) # row of s_, last column
elif a == 'L':
s_slip = (s_[0], 0) # row of s_, first column
P[s[0], s[1], ACTION_TO_INT[a], s_slip[0], s_slip[1]]\
= .05 #'''0.05'''
P[s[0], s[1], ACTION_TO_INT[a], s_[0], s_[1]] = p
else: # we leave p to 0
pass
return P
def get_A_ub():
A_ub = []
for s in S:
for a in A[s]:
A_ub_el = np.empty(len(S), dtype=np.float16)
E_R_sa = 0
for s_ in S_PLUS:
E_R_sa += P[(*s, ACTION_TO_INT[a], *s_)] * R(None, None, s_)
b_ub.append(-E_R_sa)
for j, s_ in enumerate(S):
if s == s_:
A_ub_el[j] = -1
else:
A_ub_el[j] = GAMMA * P[(*s, ACTION_TO_INT[a], *s_)]
A_ub.append(A_ub_el)
return np.array(A_ub)
def get_V():
res = linprog(c, A_ub=A_ub, b_ub=b_ub, bounds=bounds,
options={"disp": True})
print(res, '\n')
print(S, res.x)
V = np.zeros((4, 4), dtype=np.float16)
for i, s in enumerate(S):
V[s] = res.x[i]
print(V, '\n')
return V
def pol_improv(V):
for s in S:
Q = {}
for a in A[s]:
Q[a] = 0
for s_ in S_PLUS:
Q[a] += P[(*s, ACTION_TO_INT[a], *s_)] *\
(R(None, None, s_) + GAMMA * V[s_])
pi[s] = max(Q, key=Q.get)
P = get_P()
S = list(S)
# first we get V from linear programming, then one round of policy improvement
c = np.ones(len(S))
bounds = (None, None)
b_ub = []
A_ub = get_A_ub()
V = get_V()
pi = { # start with arbitrary pi(s)
(0, 0): 'D',
(0, 1): 'D',
(0, 2): 'D',
(1, 0): 'U',
(1, 2): 'U',
(2, 0): 'U',
(2, 1): 'U',
(2, 2): 'U',
(3, 0): 'U'
}
pol_improv(V)
print('Optimal policy:', '\n', pi)
| EdoardoGuerriero/Reinforcement-Learning-VU-2019- | Linear_programming_policy_iteration.py | Linear_programming_policy_iteration.py | py | 4,937 | python | en | code | 1 | github-code | 13 |
73492596816 | # Binary Tree to Doubly Linked List using Morris Traversal
from queue import Queue
class Node:
def __init__(self,val):
self.val=val
self.left=None
self.right=None
def BuildTree(nodes):
n=len(nodes)
if n==0 or nodes[0]=='N':
return None
q=Queue()
root=Node(int(nodes[0]))
q.put(root)
i=1
while i<n and q.empty()==False:
curr=q.get()
# Left Child
if nodes[i]!='N':
curr.left=Node(int(nodes[i]))
q.put(curr.left)
i+=1
if i>=n:
break
if nodes[i]!='N':
curr.right=Node(int(nodes[i]))
q.put(curr.right)
i+=1
return root
def Inorder(root):
if root==None:
return
Inorder(root.left)
print(root.val,end=" ")
Inorder(root.right)
def BTtoDLL(root):
head=tail=Node(-1)
temp=head
curr=root
while curr!=None:
if curr.left==None:
temp.right=curr
curr.left=temp
temp=curr
tail=temp
curr=curr.right
else:
t=curr.left
while t.right!=None and t.right!=curr:
t=t.right
if t.right==None:
t.right=curr
curr=curr.left
else:
t.right=None
temp.right=curr
curr.left=temp
temp=curr
tail=temp
curr=curr.right
head=head.right
head.left=None
return head,tail
def printHead(head):
curr=head
while curr!=None:
print(curr.val,end=" ")
curr=curr.right
print()
def printTail(tail):
curr=tail
while tail!=None:
print(tail.val,end=" ")
tail=tail.left
print()
def main():
nodes=input().split()
root=BuildTree(nodes)
# Inorder(root)
head,tail=BTtoDLL(root)
printHead(head)
printTail(tail)
main()
| Ayush-Tiwari1/DSA | Days.31/Python/1.Binary-Tree-to-DLL-using-Morris-Traversal.py | 1.Binary-Tree-to-DLL-using-Morris-Traversal.py | py | 1,950 | python | en | code | 0 | github-code | 13 |
27885129242 | import dgl
import torch
import numpy as np
import itertools
import os
import time
import warnings
import argparse
import random
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from utils.criterions import NCESoftmaxLoss, NCESoftmaxLossNS
from models.pretrain.memory_moco import MemoryMoCo
from utils.graph_data_load import (RecDataset, LoadBalanceGraphDataset, worker_init_fn)
from utils.graph_data_util import batcher
from models.pretrain.graph_encoder import GraphEncoder
from models.pretrain.seq_encoder import SeqEncoder
from utils.utils import adjust_learning_rate
from models.attacker.attacker import Attacker
from utils.data_load import Data
def parse_args():
parser = argparse.ArgumentParser("argument for training")
# dataset definition
parser.add_argument("--dataset", type=str, default="filmtrust")
parser.add_argument("--target-dataset", type=str, default="filmtrust")
parser.add_argument("--target-item", type=int, default=5)
parser.add_argument("--target-restart-prob", type=float, default=0.8)
parser.add_argument("--target-rw-hops", type=int, default=64)
#
parser.add_argument("--is-load", type=int, default=0)
parser.add_argument("--path_load_model", type=str, help="path to load model")
# specify folder
parser.add_argument("--model-path", type=str, default='saved', help="path to save model")
parser.add_argument("--result-dir", type=str, default='results', help="path to save result")
parser.add_argument("--model-type", type=str, default='attacker')
parser.add_argument("--gpu", default=0, type=int, help="GPU id to use.")
parser.add_argument("--seed", type=int, default=1234, help="random seed.")
parser.add_argument("--save-freq", type=int, default=1, help="save frequency")
# optimization
parser.add_argument("--batch-size", type=int, default=32, help="batch_size")
parser.add_argument("--epochs", type=int, default=200, help="number of training epochs")
parser.add_argument("--optimizer", type=str, default='adam', choices=['sgd', 'adam', 'adagrad'], help="optimizer")
parser.add_argument("--learning-rate", type=float, default=0.005, help="learning rate")
parser.add_argument("--lr_decay_epochs", type=str, default="120,160,200", help="where to decay lr, can be a list")
# random walk, get 2 data_augmentation(sub_graph).
parser.add_argument("--restart-prob", type=float, default=0.8)
parser.add_argument("--rw-hops", type=int, default=64)
parser.add_argument("--positional-embedding-size", type=int, default=32)
parser.add_argument("--num-workers", type=int, default=12, help="num of workers to use")
parser.add_argument("--num-copies", type=int, default=6, help="num of dataset copies that fit in memory")
parser.add_argument("--num-samples", type=int, default=2000, help="num of samples per batch per worker")
# graph encoder model
parser.add_argument("--num-layer", type=int, default=5, help="gnn layers")
parser.add_argument("--hidden-size", type=int, default=64)
parser.add_argument("--max-degree", type=int, default=512)
parser.add_argument("--degree-embedding-size", type=int, default=16)
parser.add_argument("--norm", action="store_true", default=True, help="apply 2-norm on output feats")
parser.add_argument("--moco", type=int, default=1, help="using MoCo (otherwise Instance Discrimination)")
# loss function
parser.add_argument("--nce-k", type=int, default=256)
parser.add_argument("--nce-t", type=float, default=0.07)
parser.add_argument("--alpha", type=float, default=0.999, help="exponential moving average weight")
parser.add_argument("--lambda_g", type=float, default=0.5)
parser.add_argument("--lambda_s", type=float, default=0.5)
parser.add_argument("--lambda_1", type=float, default=0.5)
parser.add_argument("--lambda_2", type=float, default=0.5)
parser.add_argument("--lambda_user", type=float, default=0.5)
parser.add_argument("--lambda_item", type=float, default=0.5)
args = parser.parse_args()
args.lr_decay_epochs = [int(x) for x in args.lr_decay_epochs.split(",")]
return args
def build_result_graph(path_load, path_save):
data = np.loadtxt(path_load, delimiter='\t')
fig = plt.figure()
plt.subplot(1, 1, 1)
plt.plot(data[:, 0], data[:, 1], label='loss1')
plt.plot(data[:, 0], data[:, 2], label='loss2')
# plt.title('Training Loss')
plt.legend()
plt.xlabel(u'epoch')
plt.ylabel(u'loss')
plt.savefig(os.path.join(path_save, 'loss'))
def args_update(args):
args.sub_target_dataset = args.target_dataset
args.target_dataset = args.target_dataset.split('_')[0]
path_train = './data/' + args.target_dataset + '/preprocess/train.data'
path_test = './data/' + args.target_dataset + '/preprocess/test.data'
sep = '\t'
header = ['user_id', 'item_id', 'rating', 'timestamp']
dataset_class = Data(path_train, path_test, test_bool=True, header=header, sep=sep, type='pretrain')
_, _, args.ori_n_users, args.ori_n_items = dataset_class.load_file_as_dataFrame()
args.model_name = "{}_to_{}_{}".format(args.dataset, args.sub_target_dataset, args.target_item)
args.model_folder = os.path.join(args.model_path, args.model_type)
args.model_save_dir = os.path.join(args.model_folder, args.model_name)
if not os.path.exists(args.model_save_dir):
os.makedirs(args.model_save_dir)
result_save_folder = os.path.join(args.result_dir, args.model_type, args.model_name)
args.result_graph_save_path = os.path.join(result_save_folder, 'diagram')
if not os.path.exists(args.result_graph_save_path):
os.makedirs(args.result_graph_save_path)
return args
def moment_update(model, model_ema, m):
""" model_ema = m * model_ema + (1 - m) model """
for p1, p2 in zip(model.parameters(), model_ema.parameters()):
p2.data.mul_(m).add_(1 - m, p1.detach().data)
def train_moco(epoch, train_loader, target_train_loader, model_graph, model_graph_ema, model_seq, model_seq_ema, contrast, atk, criterion, optimizer, args):
"""one epoch training for moco"""
model_graph.train()
model_graph_ema.eval()
model_seq.train()
model_seq_ema.eval()
def set_bn_train(m):
classname = m.__class__.__name__
if classname.find("BatchNorm") != -1:
m.train()
model_graph_ema.apply(set_bn_train)
model_seq_ema.apply(set_bn_train)
loss1_total = 0.0
for idx, batch in enumerate(train_loader):
graph_q, graph_k = batch
graph_q.to(torch.device(args.gpu))
graph_k.to(torch.device(args.gpu))
# ===================Moco forward=====================
graph_view_q = model_graph(graph_q)
seq_view_q = model_seq(graph_q)
with torch.no_grad():
graph_view_k = model_graph_ema(graph_k)
seq_view_k = model_seq_ema(graph_k)
out_g, out_s = contrast(graph_view_q, graph_view_k, seq_view_q, seq_view_k)
loss1 = args.lambda_g * criterion(out_g) + args.lambda_s * criterion(out_s)
loss1_total += loss1
# ===================backward=====================
optimizer.zero_grad()
loss1.backward()
optimizer.step()
if args.moco:
moment_update(model_graph, model_graph_ema, args.alpha)
moment_update(model_seq, model_seq_ema, args.alpha)
print("[train-in-A] [{}]\t loss1: {:.2f}".format(epoch, loss1))
loss1_total /= len(train_loader)
emb_list = []
for idx, batch in enumerate(target_train_loader):
subgraph, _ = batch
subgraph.to(torch.device(args.gpu))
feat = args.lambda_1 * model_graph(subgraph) + args.lambda_2 * model_seq(subgraph)
emb_list.append(feat)
emb = torch.cat(emb_list)
user_emb, item_emb = emb[0:args.ori_n_users], emb[args.ori_n_users:]
loss2 = atk.run(item_emb, user_emb)
optimizer.zero_grad()
loss2.backward()
optimizer.step()
if args.moco:
moment_update(model_graph, model_graph_ema, args.alpha)
moment_update(model_seq, model_seq_ema, args.alpha)
print("[train-in-B] [{}]\t loss2: {:.2f}".format(epoch, loss2))
print("[pre-train][{}]\t loss1: {:.2f}\t loss2: {:.2f}".format(epoch, loss1_total, loss2))
return loss1_total, loss2
def main(args):
random.seed(args.seed)
dgl.random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
torch.cuda.manual_seed(args.seed)
if args.is_load:
if os.path.isfile(args.path_load_model):
print("[pre-train]=> loading checkpoint '{}'".format(args.path_load_model))
checkpoint = torch.load(args.path_load_model)
pretrain_args = checkpoint["opt"]
pretrain_args.gpu = args.gpu
pretrain_args.dataset = args.dataset
pretrain_args.target_dataset = args.target_dataset
pretrain_args.target_item = args.target_item
pretrain_args.model_type = args.model_type
pretrain_args.epochs = args.epochs
pretrain_args.batch_size = args.batch_size
pretrain_args.rw_hops = args.rw_hops
pretrain_args.restart_prob = args.restart_prob
pretrain_args.target_rw_hops = args.target_rw_hops
pretrain_args.target_restart_prob = args.target_restart_prob
pretrain_args.nce_k = args.nce_k
pretrain_args.is_load = args.is_load
pretrain_args.path_load_model = args.path_load_model
args = pretrain_args
else:
print("=> no checkpoint found at '{}'".format(args.resume))
args = args_update(args)
print('[PRETRIAN ARGS] ', args)
if args.dataset == "dgl":
train_dataset = LoadBalanceGraphDataset(
rw_hops=256,
restart_prob=args.restart_prob,
positional_embedding_size=args.positional_embedding_size,
num_workers=args.num_workers,
num_copies=args.num_copies,
num_samples=args.num_samples,
dgl_graphs_file="data/dgl_graph.bin",
)
else:
train_dataset = RecDataset(
rw_hops=args.rw_hops,
dataset=args.dataset,
restart_prob=args.restart_prob,
positional_embedding_size=args.positional_embedding_size,
)
train_loader = torch.utils.data.DataLoader(
dataset=train_dataset,
batch_size=args.batch_size,
collate_fn=batcher(),
shuffle=True,
num_workers=args.num_workers,
worker_init_fn=None if args.dataset != "dgl" else worker_init_fn,
)
target_train_dataset = RecDataset(
rw_hops=args.target_rw_hops,
dataset=args.target_dataset,
restart_prob=args.target_restart_prob,
positional_embedding_size=args.positional_embedding_size,
)
target_train_loader = torch.utils.data.DataLoader(
dataset=target_train_dataset,
batch_size=len(target_train_dataset),
collate_fn=batcher(),
shuffle=False,
num_workers=args.num_workers,
)
# create model and optimizer
model_graph, model_graph_ema = [
GraphEncoder(
positional_embedding_size=args.positional_embedding_size,
max_degree=args.max_degree,
degree_embedding_size=args.degree_embedding_size,
output_dim=args.hidden_size,
node_hidden_dim=args.hidden_size,
num_layers=args.num_layer,
norm=args.norm,
gnn_model='gin',
degree_input=True,
).cuda(args.gpu)
for _ in range(2)
]
model_seq, model_seq_ema = [
SeqEncoder(
positional_embedding_size=args.positional_embedding_size,
max_degree=args.max_degree,
degree_embedding_size=args.degree_embedding_size,
hidden_size=args.hidden_size,
num_layers=2,
degree_input=True,
).cuda(args.gpu)
for _ in range(2)
]
if args.moco:
moment_update(model_graph, model_graph_ema, 0)
moment_update(model_seq, model_seq_ema, 0)
contrast = MemoryMoCo(args.hidden_size, args.nce_k, args.nce_t, use_softmax=True).cuda(args.gpu)
atk = Attacker(args.sub_target_dataset, args.target_item, args.gpu, args.hidden_size,
lambda_item=args.lambda_item, lambda_user=args.lambda_user)
criterion = NCESoftmaxLoss() if args.moco else NCESoftmaxLossNS()
criterion = criterion.cuda(args.gpu)
if args.optimizer == "sgd":
optimizer = torch.optim.SGD(
itertools.chain(model_graph.parameters(), model_seq.parameters()),
lr=args.learning_rate,
momentum=0.9,
weight_decay=1e-5,
)
elif args.optimizer == "adam":
optimizer = torch.optim.Adam(
itertools.chain(model_graph.parameters(), model_seq.parameters()),
lr=args.learning_rate,
betas=(0.9, 0.999),
weight_decay=1e-5,
)
elif args.optimizer == "adagrad":
optimizer = torch.optim.Adagrad(
itertools.chain(model_graph.parameters(), model_seq.parameters()),
lr=args.learning_rate,
lr_decay=0.0,
weight_decay=1e-5,
)
else:
raise NotImplementedError
if args.is_load:
model_graph.load_state_dict(checkpoint["model_graph"])
model_seq.load_state_dict(checkpoint["model_seq"])
contrast.load_state_dict(checkpoint["contrast"])
if args.moco:
model_graph_ema.load_state_dict(checkpoint["model_graph_ema"])
model_seq_ema.load_state_dict(checkpoint["model_seq_ema"])
del checkpoint
torch.cuda.empty_cache()
loss_save_path = os.path.join(args.result_graph_save_path, 'model_loss.txt')
f = open(loss_save_path, mode='w')
print("==> pre-training...")
for epoch in range(1, args.epochs + 1):
adjust_learning_rate(epoch, args, optimizer)
time1 = time.time()
loss_list = train_moco(
epoch,
train_loader,
target_train_loader,
model_graph,
model_graph_ema,
model_seq,
model_seq_ema,
contrast,
atk,
criterion,
optimizer,
args,
)
line = '\t'.join(
[str(epoch), str(loss_list[0].item()), str(loss_list[1].item())]) + '\n'
f.write(line)
time2 = time.time()
print("pre-train epoch {}, total time {:.2f}".format(epoch, time2 - time1))
# save model
if epoch % args.save_freq == 0:
print("==> model Saving(epoch=%d)..." % epoch)
state = {
"opt": args,
"model_graph": model_graph.state_dict(),
"model_seq": model_seq.state_dict(),
"contrast": contrast.state_dict(),
"optimizer": optimizer.state_dict(),
"epoch": epoch,
}
if args.moco:
state["model_graph_ema"] = model_graph_ema.state_dict()
state["model_seq_ema"] = model_seq_ema.state_dict()
save_file = os.path.join(args.model_save_dir, "ckpt_epoch_{epoch}.pth".format(epoch=epoch))
torch.save(state, save_file)
# help release GPU memory
del state
torch.cuda.empty_cache()
# saving the final model
print("==> model saving(final model)...")
state = {
"opt": args,
"model_graph": model_graph.state_dict(),
"model_seq": model_seq.state_dict(),
"contrast": contrast.state_dict(),
"optimizer": optimizer.state_dict(),
"epoch": args.epochs+1,
}
if args.moco:
state["model_graph_ema"] = model_graph_ema.state_dict()
state["model_seq_ema"] = model_seq_ema.state_dict()
save_file = os.path.join(args.model_save_dir, "model.pth")
torch.save(state, save_file)
f.close()
build_result_graph(loss_save_path, args.result_graph_save_path)
if __name__ == "__main__":
warnings.simplefilter("once", UserWarning)
args = parse_args()
main(args)
| KDEGroup/PC-Attack | train.py | train.py | py | 16,692 | python | en | code | 2 | github-code | 13 |
72214435217 | #!/usr/bin/python3
#fileName = "sample.txt"
fileName = "input.txt"
f = open(fileName).readlines()
countLines = 0
line1 = ""
line2 = ""
line3 = ""
commonChar = []
commonChar2 = []
commonChar3 = []
lowercase = "abcdefghijklmnopqrstuvwxyz"
uppercase = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
totalCharValue = 0
for line in f:
info = line.strip()
#print(info)
countLines += 1
if countLines == 1:
line1 = info
if countLines == 2:
line2 = info
if countLines == 3:
line3 = line.strip()
#print("3rd Line")
print("Line 1: " + line1)
print("Line 2: " + line2)
print("Line 3: " + line3)
# Compare the Lines
for a in line1:
for b in line2:
if a == b:
if a not in commonChar:
commonChar.append(a)
for b in line2:
for c in line3:
if b == c:
if b not in commonChar2:
commonChar2.append(b)
for d in commonChar:
for e in commonChar2:
if d == e:
if d not in commonChar3:
commonChar3.append(d)
countLines = 0
cChar = commonChar3[0]
countLowercase = 1
for c in lowercase:
if cChar == c:
print("Char: " + c + " Value: " + str(countLowercase))
totalCharValue += countLowercase
countLowercase += 1
countUppercase = 27
for c in uppercase:
if cChar == c:
print("Char: " + c + " Value: " + str(countUppercase))
totalCharValue += countUppercase
countUppercase += 1
commonChar = []
commonChar2 = []
commonChar3 = []
print(totalCharValue)
| thepcn3rd/AdventofCode2022 | d3part2.py | d3part2.py | py | 1,821 | python | en | code | 0 | github-code | 13 |
20215277503 | from selenium import webdriver
from selenium.webdriver.common.by import By
from Pages.BasePage import BasePage
class Checkboxes(BasePage):
def __init__(self,driver):
super().__init__(driver)
# locators for the page
# locator_checkbox_div=(By.XPATH,"//div[@class='example']")
locator_checkbox=(By.XPATH,"//input[@type='checkbox']")
locator_checkbox_one=(By.XPATH,"(//input[@type='checkbox'])[1]")
locator_checkbox_two=(By.XPATH,"(//input[@type='checkbox'])[2]")
# actions methods
def get_status_of_checkboxes(self):
#initial status before click
before_click_status_list=self.status_of_check_box(self.locator_checkbox)
return before_click_status_list
def do_click_on_checkbox(self):
# all_checkboxes=self.get_all_checkboxes(self.locator_checkbox)
self.perform_click(self.locator_checkbox_one)
self.perform_click(self.locator_checkbox_two)
after_click_status_list=self.status_of_check_box(self.locator_checkbox)
return after_click_status_list
| kakamband/HerokuPracticeSelenium | Pages/Checkboxes.py | Checkboxes.py | py | 1,086 | python | en | code | 1 | github-code | 13 |
22497462831 | import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
import py
from jinja2 import Environment
from jinja2.loaders import BaseLoader
from jinja2.exceptions import TemplateNotFound
try:
# This code adds support for coverage.py (see
# http://nedbatchelder.com/code/modules/coverage.html).
# It prints a coverage report for the modules specified in all
# module globals (of the test modules) named "coverage_modules".
import coverage, atexit
IGNORED_MODULES = ['jinja2._speedups', 'jinja2.defaults',
'jinja2.translators']
def report_coverage():
coverage.stop()
module_list = [
mod for name, mod in sys.modules.copy().iteritems() if
getattr(mod, '__file__', None) and
name.startswith('jinja2.') and
name not in IGNORED_MODULES
]
module_list.sort()
coverage.report(module_list)
def callback(option, opt_str, value, parser):
atexit.register(report_coverage)
coverage.erase()
coverage.start()
py.test.config.addoptions('Test options', py.test.config.Option('-C',
'--coverage', action='callback', callback=callback,
help='Output information about code coverage (slow!)'))
except ImportError:
coverage = None
class GlobalLoader(BaseLoader):
scope = globals()
def get_source(self, environment, name):
try:
return self.scope[name.upper() + 'TEMPLATE'], None, None
except KeyError:
raise TemplateNotFound(name)
loader = GlobalLoader()
simple_env = Environment(trim_blocks=True, loader=loader, cache_size=0)
class Directory(py.test.collect.Directory):
def run(self):
rv = super(Directory, self).run()
if self.fspath.basename == 'tests':
rv.append('doctests')
return rv
def join(self, name):
if name == 'doctests':
return JinjaDocTestModule(name, parent=self)
return super(Directory, self).join(name)
class Module(py.test.collect.Module):
def __init__(self, *args, **kwargs):
self.env = simple_env
super(Module, self).__init__(*args, **kwargs)
def makeitem(self, name, obj, usefilters=True):
if name.startswith('test_'):
if hasattr(obj, 'func_code'):
return JinjaTestFunction(name, parent=self)
elif isinstance(obj, basestring):
return JinjaDocTest(name, parent=self)
class JinjaTestFunction(py.test.collect.Function):
def execute(self, target, *args):
loader.scope = target.func_globals
co = target.func_code
if 'env' in co.co_varnames[:co.co_argcount]:
target(self.parent.env, *args)
else:
target(*args)
class JinjaDocTest(py.test.collect.Item):
def __init__(self, *args, **kwargs):
realmod = kwargs.pop('realmod', False)
super(JinjaDocTest, self).__init__(*args, **kwargs)
self.realmod = realmod
def run(self):
if self.realmod:
mod = __import__(self.name, None, None, [''])
else:
mod = py.std.types.ModuleType(self.name)
mod.__doc__ = self.obj
mod.env = self.parent.env
mod.MODULE = self.parent.obj
self.execute(mod)
def execute(self, mod):
failed, tot = py.compat.doctest.testmod(mod, verbose=True)
if failed:
py.test.fail('doctest %s: %s failed out of %s' % (
self.fspath, failed, tot))
class JinjaDocTestModule(py.test.collect.Module):
def __init__(self, *args, **kwargs):
super(JinjaDocTestModule, self).__init__(*args, **kwargs)
self.doctest_modules = [
'jinja2.environment', 'jinja2.compiler', 'jinja2.parser',
'jinja2.lexer', 'jinja2.ext', 'jinja2.sandbox',
'jinja2.filters', 'jinja2.tests', 'jinja2.utils',
'jinja2.runtime'
]
def run(self):
return self.doctest_modules
def join(self, name):
return JinjaDocTest(name, parent=self, realmod=True)
| minixalpha/SourceLearning | jinja2/jinja2-2.0/tests/conftest.py | conftest.py | py | 4,141 | python | en | code | 107 | github-code | 13 |
18361326709 | import pygame
import pygame_gui
import math
from pygame import Vector2
import pygameoflife.renderer
from pygameoflife.renderer import MenuBar, Renderer, Camera
from pygameoflife.game import Game
MIN_SIZE = (800, 600)
FRAMERATE = 60
class App:
def __init__(self):
pygame.init()
print("Init pygame")
pygame.font.init()
print("Init pygame font")
pygame.display.set_caption('PyGameOfLife')
self.win_surf = pygame.display.set_mode(MIN_SIZE, pygame.RESIZABLE)
self.is_running = True
self.game_paused = True
self.game = Game()
# testing only
self.game.activate_cell((5,-5))
self.game.activate_cell((6,-5))
self.game.activate_cell((6,-6))
self.game.activate_cell((6,-4))
self.game.activate_cell((7,-4))
self.renderer = Renderer(self.win_surf)
self.camera = Camera(Vector2(-9,5), 25)
print("Made everything else")
self.menubar = MenuBar(self)
print("Made menubar")
self.menubar.update(self.game)
self.renderer.render_grid(self.camera)
self.renderer.render_cells(self.camera, self.game)
self.renderer.render_menubar(self.menubar)
pygame.display.update()
self.dragging = False
self.shift_pressed = False
self.btn_active = None
self.prev_mouse_loc = None
def get_cell_at(self, pos: Vector2):
x = pos.x/self.camera.get_scale()
y = (pos.y-pygameoflife.renderer.HDR_HEIGHT)/self.camera.get_scale()
cx = math.floor(self.camera.pos.x+x)
cy = math.floor(self.camera.pos.y-y)
return (cx, cy)
def toggle_cell_at(self, pos: Vector2):
self.game.toggle_cell(self.get_cell_at(pos))
def activate_cell_at(self, pos: Vector2):
cell = self.get_cell_at(pos)
if not self.game.is_alive(cell):
self.game.activate_cell(cell)
def deactivate_cell_at(self, pos: Vector2):
cell = self.get_cell_at(pos)
if self.game.is_alive(cell):
self.game.deactivate_cell(cell)
def play_pause(self):
self.game_paused = not self.game_paused
def reset(self):
self.game.reset()
self.menubar.update(self.game)
self.renderer.render_menubar(self.menubar)
self.renderer.render_grid(self.camera)
self.renderer.render_cells(self.camera, self.game)
def inc_freq(self):
if self.game.freq < 32:
self.game.freq = self.game.freq * 2
self.menubar.update(self.game)
self.renderer.render_menubar(self.menubar)
def dec_freq(self):
if self.game.freq > 1:
self.game.freq = int(self.game.freq / 2)
self.menubar.update(self.game)
self.renderer.render_menubar(self.menubar)
def handle_mouse_down_event(self, evt):
if evt.button == pygame.BUTTON_LEFT:
self.prev_mouse_loc = Vector2(evt.pos)
self.drag_start_loc = Vector2(evt.pos)
if self.btn_active:
self.btn_active.color = pygameoflife.renderer.BTN_CLICK_COLOR
self.renderer.render_menubar(self.menubar)
self.btn_active.onclick()
def handle_mouse_up_event(self, evt):
if evt.button == pygame.BUTTON_LEFT:
if not self.dragging and evt.pos[1] > pygameoflife.renderer.HDR_HEIGHT:
# just a click
self.toggle_cell_at(self.prev_mouse_loc)
self.menubar.update(self.game)
self.renderer.render_menubar(self.menubar)
self.renderer.render_grid(self.camera)
self.renderer.render_cells(self.camera, self.game)
elif self.btn_active:
self.btn_active.color = pygameoflife.renderer.ACTIVE_BTN_COLOR
self.renderer.render_menubar(self.menubar)
self.dragging = False
def handle_mouse_motion_event(self, evt):
curr_pos = Vector2(evt.pos)
if evt.buttons[0]:
if (self.drag_start_loc - curr_pos).length() > 5:
self.dragging = True
if pygame.key.get_pressed()[pygame.K_LSHIFT] or pygame.key.get_pressed()[pygame.K_RSHIFT]:
self.activate_cell_at(curr_pos)
elif pygame.key.get_pressed()[pygame.K_LCTRL] or pygame.key.get_pressed()[pygame.K_RCTRL]:
self.deactivate_cell_at(curr_pos)
elif self.dragging:
delta = Vector2(curr_pos - self.prev_mouse_loc)
delta.x *= -1
self.camera.pos += delta/self.camera.get_scale()
self.menubar.update(self.game)
self.renderer.render_menubar(self.menubar)
self.renderer.render_grid(self.camera)
self.renderer.render_cells(self.camera, self.game)
self.prev_mouse_loc = curr_pos
elif curr_pos.y < pygameoflife.renderer.HDR_HEIGHT and not self.btn_active:
for button in self.menubar.buttons:
if button.has_coord(curr_pos):
button.color = pygameoflife.renderer.ACTIVE_BTN_COLOR
self.renderer.render_menubar(self.menubar)
self.btn_active = button
break
elif self.btn_active and not self.btn_active.has_coord(curr_pos):
self.btn_active.color = pygameoflife.renderer.BUTTON_COLOR
self.renderer.render_menubar(self.menubar)
self.btn_active = None
def handle_mouse_wheel_event(self, evt):
self.camera.add_to_scale(evt.y)
self.renderer.render_grid(self.camera)
self.renderer.render_cells(self.camera, self.game)
def handle_quit_event(self, evt):
self.is_running = False
def handle_video_resize_event(self, evt):
# video resize events are only fired after resizing is complete,
# so the picture on screen is linearly scaled while the frame is
# being resized. This is a known bug, and for now I'll stick
# with it because there's no way of changing it in SDL atleast.
# bug @ https://github.com/libsdl-org/SDL/issues/1059
w, h = evt.size
if w < MIN_SIZE[0]:
w = MIN_SIZE[0]
if h < MIN_SIZE[1]:
h = MIN_SIZE[1]
self.win_surf = pygame.display.set_mode((w,h), pygame.RESIZABLE)
self.renderer.surface = self.win_surf
self.renderer.render_grid(self.camera)
self.renderer.render_menubar(self.menubar)
self.renderer.render_cells(self.camera, self.game)
def run(self):
evt_dict = {
pygame.MOUSEBUTTONDOWN: self.handle_mouse_down_event,
pygame.MOUSEBUTTONUP: self.handle_mouse_up_event,
pygame.MOUSEMOTION: self.handle_mouse_motion_event,
pygame.MOUSEWHEEL: self.handle_mouse_wheel_event,
pygame.QUIT: self.handle_quit_event,
pygame.VIDEORESIZE: self.handle_video_resize_event,
}
clock = pygame.time.Clock()
nticks = 0
while self.is_running:
time_elapsed = clock.tick(FRAMERATE)
nticks += 1
evts = [e for e in pygame.event.get() if e.type in evt_dict]
for evt in evts:
evt_dict[evt.type](evt) # run method corresponsing to event
nticks %= int(FRAMERATE/self.game.freq)
if nticks == 0 and not self.game_paused:
self.game.update()
self.menubar.update(self.game)
self.renderer.render_menubar(self.menubar)
self.renderer.render_grid(self.camera)
self.renderer.render_cells(self.camera, self.game)
if self.renderer.surface_changed:
self.renderer.surface_changed = False
pygame.display.update()
| Aniruddha-Deb/PyGameOfLife | pygameoflife/app.py | app.py | py | 6,606 | python | en | code | 0 | github-code | 13 |
28014034129 | # coding: utf-8
import numpy as np
def softmax(x):
"""use softmax(x) = softmax(x - max(x)) for x too large
"""
x_T = np.transpose(x)
x_T -= np.max(x_T, axis=0)
y = (np.exp(x_T) / np.sum(np.exp(x_T), axis=0)).T
return y
def sigmoid(z):
return 1.0 / (1.0 + np.exp(-z))
import scipy as sp
def logloss(real, pred):
epsilon = 1e-15
pred = sp.maximum(epsilon, pred)
pred = sp.minimum(1-epsilon, pred)
ll = sum(real*sp.log(pred) +
sp.subtract(1, real)*sp.log(sp.subtract(1, pred)))
ll = ll * 1.0/len(real)
return ll
def auc(preds, labels):
def tied_rank(x):
sorted_x = sorted(zip(x, range(len(x))))
r = [0 for k in x]
cur_val = sorted_x[0][0]
last_rank = 0
for i in range(len(sorted_x)):
if cur_val != sorted_x[i][0]:
cur_val = sorted_x[i][0]
for j in range(last_rank, i):
r[sorted_x[j][1]] = float(last_rank + 1 + i) / 2.0
last_rank = i
if i == len(sorted_x) - 1:
for j in range(last_rank, i + 1):
r[sorted_x[j][1]] = float(last_rank + i + 2) / 2.0
return r
r = tied_rank(preds)
num_pos = len([0 for x in labels if x == 1])
num_neg = len(labels) - num_pos
sum_pos = sum([r[i] for i in range(len(r)) if labels[i] == 1])
auc = (sum_pos - num_pos*(num_pos + 1)/2.0) / (num_neg*num_pos)
return auc
if __name__ == "__main__":
preds = [0.4, 0.6, 0.1, 0.8]
labels = [1, 0, 0, 1]
print(logloss(labels, preds))
print(auc(preds, labels))
| ShangruZhong/Toy-Machine-Learning | formulas.py | formulas.py | py | 1,616 | python | en | code | 0 | github-code | 13 |
29206911756 | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import os
import albumentations
import torch
from torch.utils.tensorboard import SummaryWriter
from .cloud_dataset import CloudDataset
from .constants import DATA_DIR, MODEL_DIR, VALIDATION_SPLIT, IMAGE_SIZE_SMALL, IDX2LABELS
from .helpers import BCEDiceLoss, DiceCoefficient, DiceLoss
class Train:
def __init__(self, model, preprocessing, shuffle: bool=True,
print_every=1000, log_every=100, batch_size = 1,\
use_tensorboard: bool = True, resume: bool=False,
device=torch.device('cpu')):
"""
Train the model with the specified train and validation loaders.
Args:
model: The model to be used for training
preprocessing: The preprocessing steps to be used as per the encoder and the weights.
shuffle: Whether to shuffle the train and val dataloaders. Default=True
print_every: Number of batches to process before printing the stats Default=1000
log_every: Number of batches after which to log stats into Tensorboard
batch_size: The number of data points processed before weights are updated.
use_tensorboard: Use Tensorboard to monitor the training. Default=True
resume: Resume training from the latest loaded model? Default=False
device: The PyTorch device on which to train. Default=CPU
"""
self.model = model.to(device)
self.shuffle = shuffle
self.print_every = print_every
self.use_tensorboard = use_tensorboard
self.resume = resume
self.device = device
self.batch_size = batch_size
# Keep a track of all the losses and accuracies
self.train_loss = list()
self.class_loss = list()
self.mask_loss = list()
self.val_class_loss = list()
self.val_mask_loss = list()
self.val_class_acc = list()
self.val_mask_acc = list()
# Create the dataloaders
transforms = albumentations.Compose([
# albumentations.VerticalFlip(p=0.2),
albumentations.ElasticTransform(p=0.2),
albumentations.GridDistortion(p=0.2),
albumentations.HorizontalFlip(p=0.2),
albumentations.ShiftScaleRotate(p=0.2),
# albumentations.Normalize(p=1.0)
])
# Read the dataframe and instantiate the dataset
df_train = pd.read_csv(os.path.join(DATA_DIR, 'train.csv'))
cloud_dataset = CloudDataset(df_train, transforms, get_preprocessing(preprocessing),
output_img_shape=IMAGE_SIZE_SMALL)
# Creating indices for train and validation set
dataset_size = len(cloud_dataset)
indices = list(range(dataset_size))
split = int(np.floor(VALIDATION_SPLIT * dataset_size))
if self.shuffle:
np.random.seed(42)
np.random.shuffle(indices)
train_indices, val_indices = indices[split:], indices[:split]
train_sampler = torch.utils.data.sampler.SubsetRandomSampler(train_indices)
val_sampler = torch.utils.data.sampler.SubsetRandomSampler(val_indices)
self.train_loader = torch.utils.data.DataLoader(cloud_dataset, batch_size=batch_size, sampler=train_sampler)
self.val_loader = torch.utils.data.DataLoader(cloud_dataset, batch_size=batch_size, sampler=val_sampler)
# Create the criterions and the optimziers
self.criterion_mask = BCEDiceLoss()
self.criterion_class = torch.nn.CrossEntropyLoss()
# self.optimizer = torch.optim.Adam(model.parameters(), lr=1e-2)
self.optimizer = RAdam(self.model.parameters(), lr=1e-2)
# Reduce LR on Plateau
self.scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(self.optimizer,
mode='min',
factor=1e-1,
verbose=True,
patience=0,
)
# self.scheduler = torch.optim.lr_scheduler.StepLR(self.optimizer, step_size=1)
self.dice_coefficient = DiceCoefficient() # Used for validation
#TODO work on tensorboard section
if self.use_tensorboard:
# Create a tensorboard SummaryWriter()
self.tensorboard_writer = SummaryWriter()
myiter = iter(self.train_loader)
images, _, _ = next(myiter)
self.tensorboard_writer.add_graph(self.model, images.to(self.device), verbose=True)
del myiter, images
if self.resume:
self.read_saved_state()
def save_model(self):
"""
Save the model and stats. For resuming later.
"""
checkpoint = {
'model': self.model.state_dict(),
'optimizer': self.optimizer.state_dict(),
'scheduler': self.scheduler.state_dict(),
'train_loss': self.train_loss,
'class_loss': self.class_loss,
'mask_loss': self.mask_loss,
'val_class_loss': self.val_class_loss,
'val_class_acc': self.val_class_acc,
'val_mask_loss': self.val_mask_loss,
'val_mask_acc': self.val_mask_acc
}
torch.save(checkpoint, os.path.join(os.getcwd(), MODEL_DIR, 'efficientnet-b2_unet.pth'))
def read_saved_state(self):
"""
Read the model's state and variables into the class variables.
"""
print('Reading the saved state...')
checkpoint = torch.load(os.path.join(os.getcwd(), MODEL_DIR, 'efficientnet-b2_unet.pth'))
self.model.load_state_dict(checkpoint['model'])
self.optimizer.load_state_dict(checkpoint['optimizer'])
self.scheduler.load_state_dict(checkpoint['scheduler'])
self.train_loss = checkpoint['train_loss']
self.class_loss = checkpoint['class_loss']
self.mask_loss = checkpoint['mask_loss']
self.val_class_loss = checkpoint['val_class_loss']
self.val_class_acc = checkpoint['val_class_acc']
self.val_mask_loss = checkpoint['val_mask_loss']
self.val_mask_acc = checkpoint['val_mask_acc']
print('Loaded the model state and metrics!')
if self.use_tensorboard:
# for (train_loss, class_loss, mask_loss) in zip(self.train_loss, self.class_loss, self.mask_loss):
# self.tensorboard_writer.add_scalar('Loss/train', train_loss, 0)
# self.tensorboard_writer.add_scalar('Loss/class', class_loss, 0)
# self.tensorboard_writer.add_scalar('Loss/mask', mask_loss, 0)
# Read the stuff into tensorboard too.
for (train_loss, class_loss, mask_loss, val_class_loss, val_class_acc, val_mask_loss, val_mask_acc) in zip(self.train_loss,
self.class_loss,
self.mask_loss,
self.val_class_loss,
self.val_class_acc,
self.val_mask_loss,
self.val_mask_acc):
self.tensorboard_writer.add_scalar('Loss/train', train_loss, 0)
self.tensorboard_writer.add_scalar('Loss/class', class_loss, 0)
self.tensorboard_writer.add_scalar('Loss/mask', mask_loss, 0)
self.tensorboard_writer.add_scalar('Loss/val_mask', val_mask_loss, 0)
self.tensorboard_writer.add_scalar('Acc/val_mask', val_mask_acc, 0)
self.tensorboard_writer.add_scalar('Loss/val_class', val_class_loss, 0)
self.tensorboard_writer.add_scalar('Acc/val_class', val_class_acc, 0)
print('Logged previous data into tensorboard')
def predict_sample(self):
"""
Takes in a single batch from the vlaidation loader and performs inference.
Plots the result with the original image, mask, label and predicted mask
and label
"""
val_iter = iter(self.val_loader)
image, mask, label = next(val_iter)
predicted_mask, _ = self.model(image.to(self.device))
predicted_mask = predicted_mask.squeeze().detach().cpu().numpy()
f, axarr = plt.subplots(self.batch_size, 3, figsize=(10, self.batch_size*4))
if self.batch_size == 1:
axarr[0].imshow(image.squeeze().permute(1, 2, 0).cpu().detach().numpy())
axarr[0].set_title(f'Generated image: {IDX2LABELS[label.item()]}')
axarr[1].imshow(image.squeeze().numpy())
axarr[1].set_title(f'Generated mask: {IDX2LABELS[label.item()]}')
axarr[2].imshow(predicted_mask)
# axarr[2].set_title(f'Predicted mask: {IDX2LABELS}')
else:
for i in range(self.batch_size):
axarr[i, 0].imshow(image[i].permute(1, 2, 0).cpu().detach().numpy())
axarr[i, 0].set_title(f'Generated image: {IDX2LABELS[label[i].item()]}')
axarr[i, 1].imshow(mask[i].squeeze().numpy())
axarr[i, 1].set_title(f'Generated mask: {IDX2LABELS[label[i].item()]}')
axarr[i, 2].imshow(predicted_mask[i])
# axarr[i, 2].set_title(f'Generated mask: {IDX2LABELS[label[i].item()]}')
def validate(self):
"""
Validate the model. Return the validation scores like:
class loss, mask loss, class accuracy, mask accuracy.
Class accuracy is simple measured by (correct/total)*100.
Mask accuracy is measured by the dice coefficient.
"""
self.model.eval()
running_mask_loss = 0
running_class_loss = 0
running_mask_accuracy = 0
running_class_accuracy = 0
with torch.no_grad():
for image, mask, label in self.val_loader:
image, mask, label = image.to(self.device), mask.to(self.device), label.to(self.device)
predicted_mask, predicted_class = self.model(image)
running_mask_loss += self.criterion_mask(predicted_mask.squeeze().float(),
mask.squeeze().float())
running_class_loss += self.criterion_class(predicted_class, label)
_, indices = torch.max(predicted_class, dim=1)
running_class_accuracy += (indices.squeeze() == label.squeeze()).sum().item()
running_mask_accuracy += self.dice_coefficient(predicted_mask.squeeze().float(),
mask.squeeze().float())
self.model.train()
return running_mask_loss/ len(self.val_loader), running_mask_accuracy/ len(self.val_loader), running_class_loss/ len(self.val_loader), running_class_accuracy/ len(self.val_loader)
def train(self, n_epochs):
"""
Train the model. The model will be trained and the metrics will be saved in the 'training_logs' directory.
Args:
n_epochs: The number of epochs for which to train.
"""
print('Starting training...' if not self.resume else 'Resuming training...')
for epoch in range(n_epochs):
self.model.train()
print(f'Epoch: {epoch}')
running_loss = 0
class_running_loss = 0
mask_running_loss = 0
for i, (image, mask, label) in enumerate(self.train_loader):
image, mask, label = image.to(self.device), mask.to(self.device), label.to(self.device)
predicted_mask, predicted_class = self.model(image)
# Calculate the losses.
mask_loss = self.criterion_mask(predicted_mask.squeeze(), mask.squeeze())
class_loss = self.criterion_class(predicted_class, label)
total_loss = (mask_loss + class_loss) / 2 # Average the losses.
# Append to the lists to log them.
running_loss += total_loss.item()
class_running_loss += class_loss.item()
mask_running_loss += mask_loss.item()
self.optimizer.zero_grad()
total_loss.backward()
self.optimizer.step()
# At every "self.print_every" step, display the log TODO save the logs too.
if (i+1) % self.print_every == 0:
print(f'Epoch: {epoch}, Batch: {i+1}, Loss: {running_loss/(self.print_every)} ' \
f'Class loss: {class_running_loss/(self.print_every)}, Mask loss: {mask_running_loss/(self.print_every)}')
self.train_loss.append(running_loss/self.print_every)
self.class_loss.append(class_running_loss/self.print_every)
self.mask_loss.append(mask_running_loss/self.print_every)
if self.use_tensorboard:
self.tensorboard_writer.add_scalar('Loss/train', running_loss/self.print_every, 0)
self.tensorboard_writer.add_scalar('Loss/class', class_loss/self.print_every, 0)
self.tensorboard_writer.add_scalar('Loss/mask', mask_loss/self.print_every, 0)
running_loss = 0
mask_running_loss = 0
class_running_loss = 0
self.save_model()
# Validate after every epoch
val_mask_loss, val_mask_acc, val_class_loss, val_class_acc = self.validate()
print(f'Validation Stats:\nMask loss: {val_mask_loss}, Mask accuracy: {val_mask_acc}, ' \
f'Class loss: {val_class_loss}, Class_accuracy: {val_class_acc}\n')
# Call the learning rate scheduler
self.scheduler.step(val_mask_loss)
self.val_class_loss.append(val_class_loss)
self.val_mask_loss.append(val_mask_loss)
self.val_class_acc.append(val_class_acc)
self.val_mask_acc.append(val_mask_acc)
if self.use_tensorboard:
self.tensorboard_writer.add_scalar('Loss/val_mask', val_mask_loss, 0)
self.tensorboard_writer.add_scalar('Acc/val_mask', val_mask_acc, 0)
self.tensorboard_writer.add_scalar('Loss/val_class', val_class_loss, 0)
self.tensorboard_writer.add_scalar('Acc/val_class', val_class_acc, 0)
def current_lr(self):
print(f'The current learning rate: {self.scheduler.get_lr()}')
def current_lr(self):
for param_group in self.optimizer.param_groups:
return param_group['lr']
def plot_previous_metrics(self):
if self.resume:
print(f'Plotting training metrics...')
f, axarr = plt.subplots(1, 3, figsize = (15, 4))
axarr[0].plot(self.train_loss)
axarr[0].set_ylabel('Loss')
axarr[0].set_title('Training loss')
axarr[1].plot(self.class_loss)
axarr[1].set_ylabel('Loss')
axarr[1].set_title('Class loss')
axarr[2].plot(self.mask_loss)
axarr[2].set_ylabel('Loss')
axarr[2].set_title('Mask loss')
print(f'Plotting validation metrics...')
f, axarr = plt.subplots(1, 4, figsize = (20, 4))
axarr[0].plot(self.val_class_loss)
axarr[0].set_ylabel('Loss')
axarr[0].set_title('Validation Class Loss')
axarr[1].plot(self.val_class_acc)
axarr[1].set_ylabel('Accuracy')
axarr[1].set_title('Validation Class Accuracy')
axarr[2].plot(self.val_mask_loss)
axarr[2].set_ylabel('Loss')
axarr[2].set_title('Validation Mask Loss')
axarr[3].plot(self.val_mask_acc)
axarr[3].set_ylabel('Accuracy')
axarr[3].set_title('Validation Mask Accuracy')
else:
print(f'No metrics to plot yet.')
| myidispg/kaggle-cloud | utils/train_utils.py | train_utils.py | py | 16,304 | python | en | code | 0 | github-code | 13 |
33990148704 | from flask import Flask, render_template, request
from meal_plan import mp
app = Flask(__name__)
@app.route('/')
def home():
return render_template('index.html')
@app.route('/project/')
def project_history():
return render_template('project_history.html')
@app.route('/register/')
def form():
return render_template('register.html')
@app.route('/endofreg/', methods=["POST", "GET"])
def regend():
if request.method == "POST":
mp.userdata = dict(request.form)
return render_template('end_of_register.html')
@app.route('/options/', methods=["POST", "GET"])
def fill():
return render_template('options.html')
@app.route('/generator/', methods=["POST", "GET"])
def processing():
if request.method == "POST":
mp.options = dict(request.form)
rowdata = mp.get_api_data(request.form["calories"], request.form["diet type"], request.form["exc"])
mp.dict_save(rowdata)
mp.get_nutritions()
mp.get_images()
return render_template('plan_generator.html')
@app.route('/mealplan/')
def result():
return render_template('meal_plan.html', mealplan=mp.meals, nutritions=mp.nutritions, images=mp.images)
@app.route('/summary/', methods=["POST", "GET"])
def theend():
mp.selected = dict(request.form)
mp.selected = {int(k): v for k, v in mp.selected.items()}
return render_template('final.html',
login=mp.userdata['login'],
ward=mp.userdata['ward'],
wing=mp.userdata['wing'],
cutlery=mp.options['cutlery'],
calories=mp.options['calories'],
mealsnr=mp.options['meals_nr'],
diet=mp.options['diet type'],
beverage=mp.options['beverage'],
exc=mp.options['exc'],
selection=mp.selected,
images=mp.images,
meals=mp.meals)
if __name__ == '__main__':
app.run(debug=True)
| kajankowska/final_project | app.py | app.py | py | 2,084 | python | en | code | 0 | github-code | 13 |
4210181010 | def aWayTooLongString(string):
last = string[len(string)-1]
size = len(string) - 2
if len(string) > 10:
s = string[0] + str(size) + last
return s
else:
return string
print(aWayTooLongString("pneumonoultramicroscopicsilicovolcanoconiosis")) | mhasan09/leetCode_M | aWayTooLongString.py | aWayTooLongString.py | py | 280 | python | en | code | 0 | github-code | 13 |
20507691167 | # -*- coding:UTF-8 -*-
from django.shortcuts import *
from django.template import RequestContext
from django.http import HttpResponse
import urllib
import urllib2
import cookielib
from Server.models import Profile
import BeautifulSoup
from env.env import *
from env.urlmap import *
from django.contrib import auth
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
from django.contrib.auth import login as auth_login
import re
from account_handler import handle_course,dict_deanprofile
from django.views.decorators.http import require_http_methods
from utility.TimeUtil import getTermNumber
from Server.settings import MEDIA_ROOT
import MasterStu_handler
import Stu_elective_handler
"这是教务登陆后返回的字串里包含的一条信息,用于识别是否成功登陆"
plogin=re.compile(u'parent\.location\.href="student_index\.php\?PHPSESSID')
def _loginFromData(request,data):
user = authenticate(username = data['sno'],password = data['passwd'])
if user is not None:
if user.is_active:
auth_login(request,user)
return HttpResponse('0')
else:
return HttpResponse('-1')
return HttpResponse('-202')
def login_dean_with_data(**kwarg):
data = kwarg['data']
PHPSESSID = kwarg['PHPSESSID']
request = kwarg['request']
headers={'User-Agent':user_agent,'PHPSESSID':PHPSESSID}
if data['number']==u'':
return _loginFromData(request,data)
else:
url_values=urllib.urlencode(data)
req=urllib2.Request(urlin+PHPSESSID,url_values,headers)
response=urllib2.urlopen(req)
logindata=response.read()
response.close()
if re.search(plogin,logindata.decode('gb18030')):
'''get course data'''
req=urllib2.Request(urlxkqk+PHPSESSID,None,headers)
response=urllib2.urlopen(req)
doc_xkqk=response.read().decode('GBK')
'''get profile from dean'''
req=urllib2.Request(urlprofile+PHPSESSID,None,headers)
response=urllib2.urlopen(req)
doc_profile=response.read().decode('GBK')
'''退出'''
req=urllib2.Request(urlexit+PHPSESSID)
urllib2.urlopen(req)
response.close()
register={}
register = dict_deanprofile(doc_profile)
'''if user does not exists yet'''
if User.objects.filter(username=data['sno']).count() == 0:
userprofile = Profile(realname=register.get('realname',''),
school = register.get('school',''),
grade = register.get('grade',''),
major = register.get('major',''),
mphone = register.get('mphone',''),
phone = register.get('phone',''),
user_type = 0
)
user = User.objects.create_user(username=data['sno'],password=data['passwd'],email=register.get('email','example@example.com'))
user.save()
userprofile.user=user
userprofile.save()
'''begin handle course data'''
try:
pass
error = handle_course(doc_xkqk,user)
except:
return HttpResponse('-5')
return _loginFromData(request,data)
else:
cuser = User.objects.get(username=data['sno'])
cuser.set_password(data['passwd'])
try:
profile = Profile.objects.get(user=cuser.id)
except:
profile = Profile(realname=register.get('realname',''),
school = register.get('school',''),
grade = register.get('grade',''),
major = register.get('major',''),
mphone = register.get('mphone',''),
phone = register.get('phone',''),
user_type = 0
)
profile.realname = register.get('realname','')
profile.school = register.get('school','')
profile.grade = register.get('grade','')
profile.grade = mphone = register.get('mphone','')
profile.phone = register.get('phone','')
profile.user_type = 0;
profile.save()
course_set = cuser.course_set.filter(termnumber = getTermNumber())
for ccourse in course_set:
ccourse.user.remove(cuser)
ccourse.save()
cuser.save()
error = handle_course(doc_xkqk,cuser)
return _loginFromData(request,data)
else:
'''exit'''
req=urllib2.Request(urlexit+PHPSESSID)
urllib2.urlopen(req)
response.close()
error = logindata.decode('gb18030')
return HttpResponse('<!--'+error+'-->')
return HttpResponse('-1')
@require_http_methods(['POST'])
def login_dean(request):
data={}
PHPSESSID=request.POST.get('sessionid','')
data['database']='0'
data['sno']=request.POST.get('username','')
data['passwd']=request.POST.get('passwd','')
data['number']=request.POST.get('valid','')
return login_dean_with_data(**locals())
def login(request):
data={}
PHPSESSID=request.POST.get('sid','')
data['database']='0'
data['sno']=request.POST.get('sno','')
data['passwd']=request.POST.get('pwd','')
data['number']=request.POST.get('check','')
return login_dean_with_data(**locals())
@require_http_methods(['POST'])
def login_elective(request):
def _login(request):
user = authenticate(username = request.POST.get('username',''),\
password = request.POST.get('passwd',''))
if user is not None:
if user.is_active:
auth_login(request,user)
return HttpResponse('0')
else:
return HttpResponse('-1')
return HttpResponse('-202')
urlCourseResults = 'http://elective.pku.edu.cn/elective2008/edu/pku/stu/elective/controller/electiveWork/showResults.do'
user_agent = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; rv:1.9.0.7) Gecko/2009021910 Firefox/3.0.7'
urllogin_elective="http://elective.pku.edu.cn/elective2008/edu/pku/stu/elective/controller/loginServlet/login_webservicehandle.jsp"
p_login = re.compile(u'([^ ]*) *([^ ]*) 您好')
p_notlogin = re.compile(u'未登录')
if request.method == 'POST':
data = {}
data['uid'] = request.POST.get('username',None)
data['psd'] = request.POST.get('passwd',None)
data['validCode'] = request.POST.get('valid',None)
cookie_value = 'JSESSIONID='+request.POST.get('sessionid',None)
url_values = urllib.urlencode(data)
_req = urllib2.Request(urllogin_elective,url_values)
_req.add_header('Cookie',cookie_value)
response = urllib2.urlopen(_req)
mlogin = response.read().decode('utf8')
response.close()
match_login = p_login.search(mlogin)
# return HttpResponse(mlogin)
if match_login:
register = {}
register['realname'] = match_login.group(2)
register['school'] = match_login.group(1)
'''Get Course Doc'''
'''退出'''
# no need
'''if user does not exists yet'''
if User.objects.filter(username = data['uid']).count() == 0:
userprofile = Profile(realname=register.get('realname',''),
school = register.get('school',''),
grade = register.get('grade',''),
major = register.get('major',''),
mphone = register.get('mphone',''),
phone = register.get('phone',''),
user_type = 0,
)
cuser = User.objects.create_user(username=data['uid'],password=data['psd'],email='example@example.com')
cuser.save()
userprofile.user = cuser
userprofile.save()
'''begin handle course data'''
error = Stu_elective_handler.handleElectiveCourse(cookie_value,cuser)
_login(request)
return HttpResponse(error)
else:
cuser = User.objects.get(username=data['uid'])
cuser.set_password(data['psd'])
try:
profile = Profile.objects.get(user=cuser.id)
except:
profile = Profile(realname=register.get('realname',''),
school = register.get('school',''),
grade = register.get('grade',''),
major = register.get('major',''),
mphone = register.get('mphone',''),
phone = register.get('phone',''),
user_type = 0,
)
profile.realname = register.get('realname','')
profile.school = register.get('school','')
profile.grade = register.get('grade','')
profile.grade = mphone = register.get('mphone','')
profile.user_type = 0
profile.phone = register.get('phone','')
profile.save()
course_set = cuser.course_set.filter(termnumber = getTermNumber())
for ccourse in course_set:
ccourse.user.remove(cuser)
ccourse.save()
cuser.save()
error = Stu_elective_handler.handleElectiveCourse(cookie_value,cuser)
_login(request)
return HttpResponse(error)
else:
return HttpResponse(mlogin)
return HttpResponse('-1')
@require_http_methods(['POST'])
def login_portal(request):
def _login(request):
user = authenticate(username = request.POST.get('username',''),\
password = request.POST.get('passwd',''))
if user is not None:
if user.is_active:
auth_login(request,user)
return HttpResponse('0')
else:
return HttpResponse('-1')
return HttpResponse('-202')
url_exit_portal = 'http://portal.pku.edu.cn//infoPortal/logout.do'
url_login_portal = 'http://portal.pku.edu.cn:80/infoPortal/login.do'
url_course_doc = 'http://portal.pku.edu.cn/infoPortal/appmanager/myPortal/myDesktop?_nfpb=true&_pageLabel=myPortal_page_17'
p_login_portal = re.compile(u'<B>(.*)</B>,欢迎登录信息门户')
data = {}
JSESSIONID = request.POST.get('sessionid','')
headers={'Cookie':'JSESSIONID='+JSESSIONID}
data['{actionForm.userid}'] = request.POST.get('username','')
data['{actionForm.password}'] = request.POST.get('passwd','')
data['{actionForm.validCode}'] = request.POST.get('valid','')
if data['{actionForm.validCode}']==u'':
_login(request)
else:
url_values = urllib.urlencode(data)
cookieJar=cookielib.CookieJar()
opener=urllib2.build_opener(urllib2.HTTPCookieProcessor(cookieJar))
req_login = urllib2.Request(url_login_portal,url_values,headers)
response = opener.open(req_login)
match_login = p_login_portal.search(response.read().decode('utf8'))
if match_login:
register = {}
register['realname'] = match_login.group(1)
'''Get Course Doc'''
req = urllib2.Request(url_course_doc)
response = opener.open(req)
doc_xkqk = response.read()
response.close()
'''退出'''
req = urllib2.Request(url_exit_portal)
response = opener.open(req)
response.close()
'''if user does not exists yet'''
if User.objects.filter(username = data['{actionForm.userid}']).count() == 0:
userprofile = Profile(realname=register.get('realname',''),
school = register.get('school',''),
grade = register.get('grade',''),
major = register.get('major',''),
mphone = register.get('mphone',''),
phone = register.get('phone',''),
user_type = 1,
)
cuser = User.objects.create_user(username=data['{actionForm.userid}'],password=data['{actionForm.password}'],email='example@example.com')
cuser.save()
userprofile.user = cuser
userprofile.save()
'''begin handle course data'''
error = MasterStu_handler.parse_course_page(doc_xkqk,cuser)
_login(request)
return HttpResponse(error)
else:
cuser = User.objects.get(username=data['{actionForm.userid}'])
cuser.set_password(data['{actionForm.password}'])
try:
profile = Profile.objects.get(user=cuser.id)
except:
profile = Profile(realname=register.get('realname',''),
school = register.get('school',''),
grade = register.get('grade',''),
major = register.get('major',''),
mphone = register.get('mphone',''),
phone = register.get('phone',''),
user_type = 1,
)
profile.realname = register.get('realname','')
profile.school = register.get('school','')
profile.grade = register.get('grade','')
profile.grade = mphone = register.get('mphone','')
profile.user_type = 1
profile.phone = register.get('phone','')
profile.save()
course_set = cuser.course_set.filter(termnumber = getTermNumber())
for ccourse in course_set:
ccourse.user.remove(cuser)
ccourse.save()
cuser.save()
error = MasterStu_handler.parse_course_page(doc_xkqk,cuser)
_login(request)
return HttpResponse(error)
else:
return HttpResponse('-3')
return HttpResponse('-1')
def index(request):
cookie=cookielib.CookieJar()
opener=urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie))
response=opener.open(urlimg)
t=response.read()
PHPSESSID=''
for item in cookie:
if item.name=='PHPSESSID':
PHPSESSID=item.value
img=open(MEDIA_ROOT+"dean"+".gif","wb")
img.write(t)
img.close()
context={'sid':PHPSESSID}
'''setup for portal testing'''
url_portal_img = 'http://portal.pku.edu.cn/infoPortal/DrawServlet?Rand=5052.215403411537'
cookieJar=cookielib.CookieJar()
opener=urllib2.build_opener(urllib2.HTTPCookieProcessor(cookieJar))
JSESSIONID=''
response=opener.open(url_portal_img)
img=response.read()
imgfile=open(MEDIA_ROOT+"protal.gif","wb")
imgfile.write(img)
imgfile.close()
for item in cookieJar:
if item.name=='JSESSIONID':
JSESSIONID=item.value
context['portal_sid'] = JSESSIONID
url_elective_img = "http://elective.pku.edu.cn/elective2008/DrawServlet?Rand=1898.0822409503162"
cookieJar=cookielib.CookieJar()
opener=urllib2.build_opener(urllib2.HTTPCookieProcessor(cookieJar))
JSESSIONID=''
response=opener.open(url_elective_img)
img=response.read()
imgfile=open(MEDIA_ROOT+"elective.gif","w")
imgfile.write(img)
imgfile.close()
for item in cookieJar:
if item.name=='JSESSIONID':
JSESSIONID=item.value
context['elective_id'] = JSESSIONID
return render_to_response("index.html",context)
def login_required_message(request):
return HttpResponse('-10')
| pkuapp/pkuapp_server | views.py | views.py | py | 21,034 | python | en | code | 3 | github-code | 13 |
21898052823 | #PAGE 1 - Overview of US Market
import yfinance as yf
import datetime
import pandas as pd
import requests
from bs4 import BeautifulSoup
end = datetime.datetime(2020,3,12)
days = datetime.timedelta(365*3)
start = end - days
metals = ['GLD','SLV','COPX', 'PALL', 'SLX', 'REMX']
major_indexes = ['SPY','INDA','MCHI', 'EWZ', 'IEUR', 'AFK']
energy = ['USO','UNG','KOL', 'TAN', 'FAN']
currency = ['UUP','FXE']
def scrape_yield_curve():
#https://www.treasury.gov/resource-center/data-chart-center/interest-rates/Pages/TextView.aspx?data=yieldAll
url = 'https://www.treasury.gov/resource-center/data-chart-center/interest-rates/Pages/TextView.aspx?data=yieldAll'
r = requests.get(url)
html = r.text
soup = BeautifulSoup(html)
table = soup.find('table', {"class": "t-chart"})
rows = table.find_all('tr')
data = []
for row in rows[1:]:
cols = row.find_all('td')
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols if ele])
result = pd.DataFrame(data, columns=['Date','1 Mo', '2 Mo', '3 Mo', '6 Mo', '1 Yr', '2 Yr', '3 Yr', '5 Yr', '7 Yr', '10 Yr', '20 Yr', '30 Yr'])
result[-252:].to_csv(r'./raw_data/yieldcurve.csv', index = False)
def scrape_yahoo_prices(stocks, start, end, file_label, format):
#download
data = yf.download(stocks, start=start, end=end)
#print the header
#data['Close'].head()
if format == "daily-percent":
#output to a file
percent = round((data['Open']/data['Close']-1)*100,2)
percent.to_csv(r"./raw_data/"+file_label+".csv")
if format == "year-over-year":
#https://stackoverflow.com/questions/28328636/calculating-year-over-year-growth-by-group-in-pandas
data = (data['Close'] - data['Close'].shift(252))/ data['Close'].shift(252)
data[-252:].to_csv(r"./raw_data/"+file_label+".csv")
if format == "close_price":
data['Close'].to_csv(r"./raw_data/"+file_label+".csv")
#http://www.worldgovernmentbonds.com/
scrape_yahoo_prices(major_indexes, start, end, "major_indexes", "year-over-year")
scrape_yahoo_prices(metals, start, end, "metals", "year-over-year")
scrape_yahoo_prices(energy, start, end, "energy", "year-over-year")
scrape_yahoo_prices(currency, start, end, "currency", "year-over-year")
scrape_yield_curve() | mcadhoc/Markets-Data-Scraping | data_scripts/scrape_data.py | scrape_data.py | py | 2,321 | python | en | code | 0 | github-code | 13 |
10191452581 | import ssl
from typing import Optional
from requests.adapters import HTTPAdapter
class SSLCiphers(HTTPAdapter):
"""
Custom HTTP Adapter to change the TLS Cipher set and security requirements.
Security Level may optionally be provided. A level above 0 must be used at all times.
A list of Security Levels and their security is listed below. Usually 2 is used by default.
Do not set the Security level via @SECLEVEL in the cipher list.
Level 0:
Everything is permitted. This retains compatibility with previous versions of OpenSSL.
Level 1:
The security level corresponds to a minimum of 80 bits of security. Any parameters
offering below 80 bits of security are excluded. As a result RSA, DSA and DH keys
shorter than 1024 bits and ECC keys shorter than 160 bits are prohibited. All export
cipher suites are prohibited since they all offer less than 80 bits of security. SSL
version 2 is prohibited. Any cipher suite using MD5 for the MAC is also prohibited.
Level 2:
Security level set to 112 bits of security. As a result RSA, DSA and DH keys shorter
than 2048 bits and ECC keys shorter than 224 bits are prohibited. In addition to the
level 1 exclusions any cipher suite using RC4 is also prohibited. SSL version 3 is
also not allowed. Compression is disabled.
Level 3:
Security level set to 128 bits of security. As a result RSA, DSA and DH keys shorter
than 3072 bits and ECC keys shorter than 256 bits are prohibited. In addition to the
level 2 exclusions cipher suites not offering forward secrecy are prohibited. TLS
versions below 1.1 are not permitted. Session tickets are disabled.
Level 4:
Security level set to 192 bits of security. As a result RSA, DSA and DH keys shorter
than 7680 bits and ECC keys shorter than 384 bits are prohibited. Cipher suites using
SHA1 for the MAC are prohibited. TLS versions below 1.2 are not permitted.
Level 5:
Security level set to 256 bits of security. As a result RSA, DSA and DH keys shorter
than 15360 bits and ECC keys shorter than 512 bits are prohibited.
"""
def __init__(self, cipher_list: Optional[str] = None, security_level: int = 0, *args, **kwargs):
if cipher_list:
if not isinstance(cipher_list, str):
raise TypeError(f"Expected cipher_list to be a str, not {cipher_list!r}")
if "@SECLEVEL" in cipher_list:
raise ValueError("You must not specify the Security Level manually in the cipher list.")
if not isinstance(security_level, int):
raise TypeError(f"Expected security_level to be an int, not {security_level!r}")
if security_level not in range(6):
raise ValueError(f"The security_level must be a value between 0 and 5, not {security_level}")
if not cipher_list:
# cpython's default cipher list differs to Python-requests cipher list
cipher_list = "DEFAULT"
cipher_list += f":@SECLEVEL={security_level}"
ctx = ssl.create_default_context()
ctx.check_hostname = False # For some reason this is needed to avoid a verification error
ctx.set_ciphers(cipher_list)
self._ssl_context = ctx
super().__init__(*args, **kwargs)
def init_poolmanager(self, *args, **kwargs):
kwargs["ssl_context"] = self._ssl_context
return super().init_poolmanager(*args, **kwargs)
def proxy_manager_for(self, *args, **kwargs):
kwargs["ssl_context"] = self._ssl_context
return super().proxy_manager_for(*args, **kwargs)
| devine-dl/devine | devine/core/utils/sslciphers.py | sslciphers.py | py | 3,699 | python | en | code | 198 | github-code | 13 |
19435710129 | import pandas as pd
# save filepath to variable for easier access
melbourne_file_path = '~/Downloads/melb_data.csv'
# read the data and store data in DataFrame titled melbourne_data
melbourne_data = pd.read_csv(melbourne_file_path)
# print a summary of the data in Melbourne data
melbourne_data.describe()
avg_lot_size = 558
newest_home_age = 2023 - 2018
print("average lot size: ",avg_lot_size)
print("newest home age: ",newest_home_age)
melbourne_data.columns
y = melbourne_data.Price
print(y)
# Create the list of features below
feature_names = ["Rooms", "Price", "Bathroom", "Car", "YearBuilt"]
# Select data corresponding to features in feature_names
X = melbourne_data[feature_names]
# print description or statistics from X
print(X)
# print the top few lines
X.head()
from sklearn.tree import DecisionTreeRegressor
#For model reproducibility, set a numeric value for random_state when specifying the model
iowa_model = DecisionTreeRegressor(random_state=1)
# Fit the model
iowa_model.fit(X, y)
print("Making predictions..")
predictions = iowa_model.predict(X)
print(predictions)
#compare the top few predictions to the actual home values (in y) for those same homes
y.head()
from sklearn.model_selection import train_test_split
train_X, val_X, train_y, val_y = train_test_split(X, y, random_state = 1)
# Specify the model
iowa_model = DecisionTreeRegressor(random_state=1)
# Fit iowa_model with the training data.
iowa_model.fit(train_X, train_y) | hazalkntr/data-works | introML.py | introML.py | py | 1,472 | python | en | code | 0 | github-code | 13 |
30883112726 | def solution(scores):
n = len(scores)
wx,wy=scores[0]
w = wx+wy
scores.sort(key=lambda x:(-x[0],x[1]))
px = scores[0][0]
py = scores[0][1]
answer=1
for i in range(n):
x,y = scores[i]
if x>wx and y>wy:
return -1
if x+y<=w:
continue
if x==px:
answer+=1
py = y
else:
if py>y:
continue
else:
py = y
px = x
answer+=1
return answer
print(solution([[2,2],[1,4],[3,2],[3,2],[2,1]]))
# 어떤 사원이 다른 임의의 사원보다 두 점수 모두 낮으면 인센티브 x
# 그렇지 않은 사원은 두 점수 합이 높은 순으로 인센티브 지급
# 동일 점수일시 그 수만큼 다음 석차는 건너 뜀
# 1등 2명일시, 1등 2명. 3등 1명
# scores 길이는 10만 이하
# [0]은 완호 점수
# 완호가 못 받을 시 -1
# 5 번째 사원은 3 번째 또는 4 번째 사원보다 근무 태도 점수와 동료 평가 점수가
# 모두 낮기 때문에 인센티브를 받을 수 없습니다.
# 2 번째 사원, 3 번째 사원, 4 번째 사원은 두 점수의 합이 5 점으로 최고점이므로 1 등입니다.
# 1 등이 세 명이므로 2 등과 3 등은 없고 1 번째 사원인 완호는 두 점수의 합이 4 점으로 4 등입니다.
| weeeeey/programmers | 인사고과.py | 인사고과.py | py | 1,404 | python | ko | code | 0 | github-code | 13 |
69897780819 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
def fbnq():
i = 0
sum_f = 0
while i < 10:
i += 1
sum_f = sum_f + i
print(i)
return sum_f
t = fbnq()
print(t) | felixPEK/q1 | day6_ex8.py | day6_ex8.py | py | 195 | python | en | code | 0 | github-code | 13 |
15458690383 | from node import Node
import xml.etree.ElementTree as ET
import random
class BayesNet:
def __init__(self, source):
self.nodes = []
root = source.getroot()
for node in root:
newNode = Node()
for field in node:
if field.tag == 'id':
newNode.set_id(field.text)
elif field.tag == 'name':
newNode.set_name(field.text)
elif field.tag == 'parent':
newNode.set_parent(field.text)
elif field.tag == 'probability':
newNode.set_probability(field.attrib, field.text)
self.nodes.append(newNode)
# Calculate the P(X|e) based on N samples
def likelihood_weighting(self, X, e, N):
samples = {}
for i in range(0, N):
x, w = self.weighted_sample(e)
samples[i] = {'x': x, 'w': w}
weight_sum = 0
for key in samples:
if X in samples[key]['x']:
weight_sum += 1
return float(weight_sum)/float(N)
def weighted_sample(self, e):
x = []
w = 1.0
for node in self.nodes:
# check this node is evidence or not
is_evidence, node_state = node.is_evidence(e)
if is_evidence:
# w = wp(currentNode | parents of currentNode)
if node.has_parents():
w = w * node.get_probability(node_state, node.parents)
else:
w = w * node.get_probability(node_state)
# x <- x_i
if node_state:
x.append(node.id)
else:
x.append(node.negated_id())
else:
# not evidence, so we take a random sample
if node.has_parents():
# check set is parents are in x
given_parents = []
for parent in node.parents:
if parent in x:
given_parents.append(parent)
else:
neg = "".join(['-', parent])
if neg in x:
given_parents.append(neg)
# sample P(this| parents(this))
p_true = node.get_probability(True, given_parents)
random.seed()
rng = random.uniform(0, 1.0)
if rng <= p_true:
x.append(node.id)
else:
x.append(node.negated_id())
else:
p_true = node.get_probability(True)
random.seed()
rng = random.uniform(0, 1.0)
if rng <= p_true:
x.append(node.id)
else:
x.append(node.negated_id())
return x, w
| khanhfumaster/comp3308_assignment2 | part4/classes/bayes_net.py | bayes_net.py | py | 2,147 | python | en | code | 0 | github-code | 13 |
26512131620 | '''
Faça um programa que leia um ângulo qualquer, e mostre na tela o seu seno, cosseno e tangente
'''
from math import sin, tan, cos,radians
ang = float(input("Digite o ângulo que você deseja saber: "))
ang=radians(ang) #para encontrar o radiano do angulo
sen = sin(ang) #sen, cos e tan devem ser calculados em radiano
coss = cos(ang)
tang = tan(ang)
print("O seno do ângulo {:.2f}º é {:.2f}º, o cosseno é {:.2f}º, e a tangente é {:.2f}º"
.format(ang, sen, coss, tang))
| MLucasf/PythonExercises | ex018.py | ex018.py | py | 490 | python | pt | code | 0 | github-code | 13 |
11509894019 |
# coding: utf-8
# In[1]:
import pandas as pd
# In[32]:
class ndist:
# d(p,q) = sqrt(((p1-q1)**2) + ((p2-q2)**2) + ... + ((pn-qn)**2))
def standardize_col(col, mean, sd):
return((col - mean) / sd)
def dataframe(p,q):
data = {'p':p,'q':q}
data_frame = pd.DataFrame(data)
mean = data_frame['p'].mean()
sd = data_frame['p'].std()
data_frame['p'] = data_frame['p'].apply(ndist.standardize_col,
args=[mean, sd])
mean = data_frame['q'].mean()
sd = data_frame['q'].std()
data_frame['q'] = data_frame['q'].apply(ndist.standardize_col,
args=[mean, sd])
data_frame['sq_distances'] = (data_frame['p'] -
data_frame['q'])**2
data_frame.columns = ['p_normed','q_normed', 'sq_distances']
return(data_frame)
def distance(p,q):
from math import sqrt
distance = sqrt(ndist.dataframe(p,q)['sq_distances'].sum())
return(distance)
def dimensions(p,q):
dimensions = len(ndist.dataframe(p,q))
return(dimensions)
# In[43]:
ndist.dataframe([5,4,9],[18,11,12])
# In[44]:
ndist.distance([5,2,9],[5,8,1])
# In[45]:
ndist.dimensions([5,2,9],[5,8,1])
| adamrossnelson/distances | archive/ndist_Jan_28_2019.py | ndist_Jan_28_2019.py | py | 1,382 | python | en | code | 1 | github-code | 13 |
37970547080 | from collections import Counter
input_str = "..."
adapters = [0] + sorted(int(line) for line in input_str.split("\n"))
adapters.append(max(adapters) + 3) # Add phone adapter
# Part 1
jolt_differences = Counter(
[adapters[i] - adapters[i - 1] for i in range(1, len(adapters))]
)
print(
f"1-jolt differences: {jolt_differences[1]}.\n"
f"3-jolt differences: {jolt_differences[3]}.\n"
f"Product: {jolt_differences[1]*jolt_differences[3]}."
)
# Part 2
# Each element of this array represents in how many ways we can arrive at adapter[i]
in_paths = [0] * len(adapters)
in_paths[0] = 1
for i in range(0, len(adapters) - 1):
in_paths[i + 1] += in_paths[i]
if i + 2 < len(adapters) and (adapters[i + 2] - adapters[i]) < 4:
in_paths[i + 2] += in_paths[i]
if i + 3 < len(adapters) and (adapters[i + 3] - adapters[i]) < 4:
in_paths[i + 3] += in_paths[i]
print(f"Number of valid combinations of the adapters: {in_paths[-1]}")
| glumia/advent_of_code_2020 | day10.py | day10.py | py | 964 | python | en | code | 0 | github-code | 13 |
14600502464 | # -*- coding: utf-8 -*-
"""
Creates the name_database which provides access to a database of character names.
Classes:
name_database
"""
import sys
import dice
import trace_log as trace
sys.path.append('../../')
class NameDatabase:
"""
The database of character names, sorted by race and gender.
Methods:
__init__(self)
init_name_database(self)
get_races(self)
set_race(self, race)
get_num_males(self)
get_num_females(self)
get_male(self)
get_female(self)
get_either(self)
"""
def __init__(self):
trace.entry()
dice.randomize()
self.name_database = {}
self.init_name_database()
self.set_race("Beffraen")
self.males = []
self.females = []
self.current_race = ""
trace.exit()
def init_name_database(self):
"""
Set up the contents of the name database.
"""
self.name_database = {
"Beffraen":
[
["Draeg", "Gabran", "Maelgym", "Sereccan", "Shelbym", "Sherl", "Tegid"],
["Boghan", "Deira"]
],
"Beorning":
[
["Beoraborn", "Beorn", "Beornan", "Braig", "Egil", "Freobeort", "Grimbeorn",
"Grimbold", "Hallan", "Hethlind", "Imlahir", "Raudabern"],
[]
],
"Black Numenorean":
[
["Adûmir", "Aduntarik", "Akallazor", "Akbulkathar", "Alukhôr", "Ancantar",
"Anglach", "Arkhâd", "Armeirtän", "Arolic", "Athrazoc", "Balzathor", "Borathôr",
"Bragolmaitë", "Camdir", "Cyrmeirmûr", "Desinôr", "Durac", "Durbaran", "Eärantar",
"Falmar", "Fältur", "Fëagwath", "Fuinur", "Gastmorgath", "Gimilkhâd", "Gimilkhor",
"Gimilzôr", "Gulthuin", "Härderin", "Herudur", "Herumor", "Imralion", "Imrazim",
"Imrazôr", "Ingar", "Kaldûrmeir", "Khoradûr", "Krûsnak", "Leärdinoth", "Maben",
"Mireädur", "Morarthdur", "Morlammen", "Naldûrgath", "Nimruzagar", "Nûrmir",
"Oric", "Peldûr", "Pharaphion", "Phorakôn", "Pon Acark", "Raenar", "Sakal",
"Sakaladun", "Sakalthôr", "Sangarunya", "Sargan", "Seregul", "Tarfuluth",
"Teldûmeir", "Telicur", "Tharadoc", "Tredûrith", "Tredûrmerith", "Tûl-mir",
"Tulôr", "Ukandar", "Uthcû", "Valgavia", "Vilmûr", "Wyatan", "Zimrathôn",
"Zimtarik", "Zokhad", "Zuxzuldûr"],
["Akûrarii", "Araphor", "Bävire", "Beruthiel", "Eädur", "Eläemir", "Inzilbêth ",
"Miruimor", "Mûrabeth", "Rozilan", "Telerien", "Zimraphel"]
],
"Dorwinadan":
[
["Arcatia", "Ballin", "Baradi", "Behhrin", "Bendretta", "Cardily", "Davmps",
"Dolwin", "Drel", "Drusso", "Dudannis", "Forlet", "Garth", "Garvanon", "Gollo",
"Handel", "Jorga", "Kiral", "Korl", "Lada", "Legios", "Mikel", "Morse",
"Noralda", "Rambal", "Rant", "Rencil", "Rof", "Sethrian", "Tavlo", "Tieran",
"Vosca"],
["Biarda", "Ciarda", "Fregia", "Gilyn", "Julia", "Nenladil"]
],
"Dunadan":
[
["Aderil", "Adrazôr", "Aeghan", "Aegnor", "Aerandir", "Aervellon", "Agonar",
"Aladil", "Alagarn", "Alandur", "Aldamir", "Aldúrin", "Allurac", "Amadar",
"Amandil", "Amaron", "Amarthion", "Amdir", "Amferen", "Amlaith", "Amondil",
"Amrod", "Amrohir", "Amtaur", "Anaras", "Anardil", "Anarond", "Anborn", "Andril",
"Angbor", "Angelimar", "Angon", "Angrim", "Annoras", "Anvelig", "Aradan",
"Aradil", "Arador", "Araglas", "Arahâd", "Aranarth", "Aranel", "Araval", "Aravir",
"Arcambion", "Arcle", "Arcondur", "Ardehir", "Arfanhil", "Arganil", "Arinethir",
"Arlaith", "Arlend", "Arûkhor", "Atano", "Aurandir", "Avram", "Balan", "Balcam",
"Ballath", "Barach", "Baragin", "Baragund", "Barahir", "Baramor", "Baranfindel",
"Baranor", "Barendil", "Barfindil", "Belchamion", "Belcthir", "Belechael",
"Belechul", "Belecthor", "Beleg", "Belegdur", "Belegorn", "Belegund", "Beletar",
"Belvor", "Beran", "Berdil", "Beregar", "Beregond", "Beregor", "Beren",
"Berendúr", "Beretar", "Bergil", "Bergrand", "Berillan", "Betheal", "Bondan",
"Boranas", "Borandil", "Boranglim", "Borgil", "Boromir", "Borondir", "Brandir",
"Breglor", "Bregol", "Bregolas", "Brethil", "Brethildur", "Caerlinc", "Caldamir",
"Calendur", "Calimehtar", "Calimmacil", "Calimon", "Calion", "Calmacil",
"Calvellon", "Camagal", "Camallin", "Cambre", "Camdir", "Camlan", "Camlin",
"Caraglin", "Caramir", "Carandae", "Carandor", "Caranthir", "Carnendil",
"Carradar", "Cebervoth", "Celarin", "Celdrahil", "Celebdur", "Celebrindor",
"Celefarn", "Celefaroth", "Celepharn", "Celephir", "Cemendur", "Ceren", "Cimrion",
"Ciramir", "Círdus", "Cirion", "Ciryaher", "Ciryang", "Ciryatur", "Ciryon",
"Coratar", "Cormacar", "Cospatric", "Cristion", "Cuimarion", "Curistel",
"Curmegil", "Curudur", "Daeron", "Daeros", "Dagnir", "Dairos", "Dairuin",
"Damrod", "Darion", "Daroc", "Daroín", "Denethor", "Derufin", "Derulin",
"Dervorin", "Desirin", "Dindal", "Dintur", "Dior", "Direvel", "Dírhael",
"Dírhavel", "Dongorath", "Doramir", "Dorandrand", "Dorias", "Dorien", "Dorrin",
"Drégon", "Dregorsgil", "Dromil", "Duilin", "Duinhir", "Dunsûl", "Dúraladh",
"Duranil", "Durbil", "Durgin", "Durvar", "Éanfled", "Eärbaldol", "Eardil",
"Eärdil", "Eärnil", "Eärnur", "Echorion", "Ecthelion", "Edhelion", "Edrahil",
"Egalmoth", "Egulë", "Ekuris", "Elatar", "Eldacar", "Eldahil", "Eldamir",
"Elenaerion", "Elendin", "Elvir", "Encalion", "Eradan", "Erador", "Erchamion",
"Erchvir", "Erdil", "Eregdur", "Erelion", "Erellont", "Erestor", "Erhuan",
"Erthil", "Ervegil", "Ervithdin", "Esgaldor", "Estel", "Estelmo", "Everithil",
"Falastir", "Falathar", "Falather", "Falmathil", "Faltar", "Faltur", "Fanuidhol",
"Faradon", "Farahail", "Faramir", "Farandir", "Farnithain", "Fergerin",
"Findamir", "Findegil", "Finglin", "Finlong", "Finralin", "Finrod", "Fondil",
"Fordelin", "Forodil", "Fuindil", "Fuinur", "Gabbon", "Galadhrion", "Galadrahil",
"Galdor", "Galwë", "Gamallin", "Gathdîn", "Gedron", "Geiri", "Gelmir", "Gethron",
"Gevas", "Giladan", "Gilcúdor", "Girion", "Gloredhel", "Glorfindel", "Golantir",
"Golasgil", "Gontran", "Gonvegil", "Gordacar", "Gorlim", "Goromil", "Guldúmir",
"Gundor", "Gwathvoron", "Gwindor", "Haddil", "Hador", "Haedric", "Halach",
"Halamir", "Halbarad", "Haldan", "Haldarion", "Halgon", "Hallacar", "Hallas",
"Handir", "Harmadil", "Hathol", "Hearon", "Helvorn", "Henderch", "Herion",
"Herluin", "Heruvorn", "Hieryan", "Hiraew", "Hirgon", "Hirluin", "Hunthor",
"Huor", "Hurin", "Hydril", "Iderion", "Idrazor", "Imkel", "Imlach", "Imrahad",
"Imrahâd", "Imrahil", "Ingold", "Ingwë", "Inhael", "Intorin", "Ionel", "Iovin",
"Irdaal", "Irdamir", "Irhalmir", "Irmion", "Isildur", "Ithildir", "Ithilrain",
"Kelvarguin", "Kirvin", "Lhachglin", "Lindal", "Lith", "Lórin", "Lorindol",
"Lotharion", "Luinil", "Luthien", "Mablung", "Maeglin", "Magor", "Malbeth",
"Malborn", "Malcam", "Malfinwë", "Malion", "Mallach", "Mallin", "Mallindor",
"Mallor", "Malloth", "Malvagor", "Malvegil", "Marach", "Marados", "Marahil",
"Mardil", "Marendil", "Marmedon", "Meladorn", "Meldin", "Melforn", "Melrandir",
"Meneldil", "Meneldir", "Meneldor", "Menelmir", "Mengron", "Merembeleg", "Mereth",
"Methillir", "Minalcar", "Minastir", "Mindacil", "Minohtar", "Mirenil", "Mîrkano",
"Mirnidar", "Monach", "Moradan", "Morchaint", "Mordulin", "Morgalad", "Morvagor",
"Nadhaim", "Narmacil", "Naurdil", "Neithan", "Neldorn", "Nerumir", "Nimengel",
"Nimhir", "Nimír", "Nimrilien", "Nimroch", "Niniel", "Ninko", "Nísi", "Nomrel",
"Odo", "Odonil", "Ondoher", "Opperith", "Orbragol", "Orchaldor", "Orinas",
"Ormendel", "Orodreth", "Ostoher", "Oswy", "Othirhan", "Oxrandir", "Palandal",
"Palandar", "Palandir", "Palarcam", "Palomire", "Parigan", "Parmandil",
"Pelendur", "Perion", "Perorren", "Pilinnur", "Piréna", "Poddit", "Portnithor",
"Quenandil", "Queneldor", "Quiacil", "Radhruin", "Ragnir", "Ragnor", "Randi",
"Rathumus", "Rendail", "Revorn", "Rhovanhen", "Rhovannin", "Rhukar", "Rincar",
"Ringlin", "Ringmir", "Ringór", "Rinhil", "Roane", "Rocúrion", "Rodhel",
"Roensen", "Roginor", "Romer", "Romin", "Ronindil", "Ruinir", "Saerol",
"Sarvelich", "Seregdal", "Shakhôr", "Solorion", "Sorondothor", "Sûlistar",
"Surion", "Taladhan", "Talathorn", "Tamir", "Tarannon", "Tarassar", "Tarbeth",
"Tardegil", "Tarfil", "Targon", "Tarhad", "Tarminion", "Tarquillan", "Tauron",
"Teiglor", "Telchrist", "Telegorn", "Telemnar", "Telethal", "Telumehtar",
"Tensidir", "Tergon", "Terieth", "Terision", "Thalion", "Thireny", "Thorondir",
"Thorongil", "Thorûth", "Tigon", "Tillórin", "Tiranir", "Tirazôr", "Tirgil",
"Tirion", "Tirwin", "Tolodin", "Tonekil", "Túan", "Tuminir", "Tuor", "Turgon",
"Turin", "Turjomil", "Ulbar", "Urthel", "Uthrin", "Vagaig", "Valacar", "Valadan",
"Valandil", "Vardamavi", "Vardamir", "Veantur", "Vëantur", "Verylen", "Viliarith",
"Vilyatir", "Vinyaran", "Voromir", "Vorondil", "Voronthor", "Warris", "Zamin",
"Zardellum"],
["Aelindur", "Aeriel", "Aerin", "Aerinel", "Almiel", "Alquawen", "Anóriel",
"Áraliniel", "Aranwen", "Arienwen", "Arisiel", "Astrith", "Belewen", "Bellaniel",
"Bessandis", "Caladwen", "Calamere", "Caliel", "Calime", "Camarina", "Caraniel",
"Celebriel", "Celeserwen", "Colmorwë", "Corrian", "Darana", "Dinturien",
"Edhetariel", "Eldiriel", "Ellothiel", "Elmericel", "Elosian", "Emeldir", "Emerie",
"Erendis", "Erennis", "Esteliel", "Ethudil", "Faivë", "Fanuilë", "Fimalcá",
"Finduilas", "Finriel", "Firiel", "Gaerwen", "Galina", "Galwen", "Gamin", "Gillan",
"Gilmith", "Gilorwen", "Gilraen", "Gilronwen", "Gilweth", "Glíwen", "Gysiel",
"Haedoriel", "Helluin", "Hiraew", "Idril", "Ilmarë", "Imberin", "Imisiel", "Jin",
"Kireil", "Laurelach", "Lessith", "Lorin", "Lothiriel", "Lothíriel", "Lúthien",
"Lyana", "Malloriel", "Melian", "Melindwen", "Melloriel", "Melmereth", "Meriel",
"Merien", "Mindiel", "Míraladhwen", "Míriel", "Mírien", "Morwen", "Nariel",
"Nartheliel", "Nassiel", "Neldoriel", "Nienor", "Niniel", "Níriel", "Ólanwen",
"Pelenwen", "Pelewen", "Perlothiel", "Rophirë", "Rosíthil", "Rûmenna", "Saranelda",
"Seregonwen", "Serinde", "Serindë", "Sernesta", "Silivrien", "Silmarien",
"Silrien", "Sirith", "Sondinwë", "Sulimith", "Sulinwë", "Súlinwë", "Talfannan",
"Telperien", "Tollanwen", "Túriel", "Vanána", "Voronwë", "Wintila", "Yendílwë"]
],
"Dunlending":
[
["Adeyn", "Aidhan", "Albaraich", "Amithol", "Amthol", "Ancú", "Anduinil", "Aonghas",
"Arleg", "Baga", "Barnur", "Belligel", "Berma", "Beul", "Bheil", "Blaith",
"Bomaynee", "Borar", "Borkul", "Borru", "Brego", "Bruad", "Cadwallon",
"Cagh Monûnaw", "Cairmach", "Calach", "Calmuad", "Canth", "Cartmel", "Ceasgair",
"Cenne", "Cern", "Chulainn", "Cies", "Cinard", "Cisid", "Cluad", "Clúan", "Clyn",
"Cober", "Coel", "Coelmun", "Coeshay", "Crennen", "Cú", "Cuag", "Cunnat",
"Daonghlas", "Darnic", "Dervorin", "Drualphien", "Dumfa", "Dunadd", "Durth",
"Eagan", "Easgan", "Ebbo", "Edallaigh", "Eion", "Elharian", "Enion", "Eskerzen",
"Feannan", "Feirr", "Feldas", "Feundig", "Fimran", "Fiorel", "Foskat", "Furish",
"Furn", "Furth", "Fwen", "Fyn", "Gaoth", "Gariac", "Glurin", "Gorgan", "Gov",
"Guik", "Haedrec", "Harec", "Hasso", "Hoegwar", "Iarlless", "Iestin", "Illtud",
"Jerl", "Jeroibha", "Josherë", "Kasselrim", "Khathog", "Kurf", "Kurna", "Lanaigh",
"Larth", "Llwei", "Lumban", "Magone", "Marroc", "Maschbram", "Meórag", "Merro",
"Mert", "Moctallan", "Morthec", "Naum", "Nidd", "Nig", "Nudan", "Oravai",
"Oravarri", "Orcare", "Orvig", "Osgan", "Pad", "Padrey", "Pesc", "Pureneir",
"Ragi", "Raltin", "Raonull", "Ries", "Riscen", "Roggowen", "Rovik", "Rulart",
"Saddro", "Scammar", "Scoel", "Seammu", "Seinacaid", "Shoglic", "Sibroc", "Smardo",
"Sogran", "Solofhen", "Sult", "Surnir", "Suvac", "Talegi", "Telleman", "Tfalz",
"Thebo", "Thirrio", "Thrangir", "Torac", "Torifal", "Trmac", "Tughaibh", "Ulf",
"Uner", "Urchoid", "Urdrek", "Urlaglin", "Vagibreg", "Varen", "Voisiol", "Werlar",
"Wuftana", "Wulf"],
["Anrea", "Brennan", "Cea", "Cila", "Derna", "Derra", "Egwar", "Eribhen",
"Fecandra", "Feorna", "Haeldwyn", "Ishel", "Measgan", "Merwai", "Reghian", "Retha",
"Riadégha", "Sionnach", "Sudha", "Tanray", "Tughaib", "Urganna", "Uthanna",
"Ygana"]
],
"Easterling":
[
["Adajo", "Aegach", "Akonid", "Ariks", "Arkish", "Baleksar", "Barakat", "Bassan",
"Belechor", "Bór", "Borhan", "Borlach", "Borlad", "Borthand", "Braith", "Carfe",
"Celgor", "Chukka", "Demik", "Dudannis", "Dûmra", "Esseu", "Ethacali", "Ethem",
"Evit", "Geer", "Gol Makov", "Gorin", "Gorion", "Gozef", "Grallon", "Grasty",
"Grimling", "Guton", "Heludar", "Hord", "Hos", "Huil", "Hungh", "Huskash", "Huz",
"Hûz", "Jukath", "Jyaganoth", "Jyganoth", "Karamar", "Katrisel", "Kav",
"Kav Gorka", "Keemac", "Koumiss", "Lorgam", "Lorthand", "Mahto", "Meonid",
"Mercaver", "Mhôrlen", "Mungrod", "Neburcha", "Nevido", "Orash", "Ormar",
"Rechorca", "Rof", "Senzal", "Shabun", "Shakal", "Skauril", "Slovas", "Thuram",
"Tinta", "Tros", "Uldor", "Ulfang", "Ulfast", "Ulwarth", "Urdrath", "Vacros",
"Varchaz", "Vrak", "Wiliaruk", "Wiliatun", "Wiliazen", "Zorab"],
["Gisela", "Jasala", "Kadida", "Shanva", "Totila", "Tuva"]
],
"Eriadoran":
[
["Aelfred", "Aescstan", "Aethelan", "Algen", "Amalin", "Amplac", "Andril", "Aranas",
"Arondil", "Arteveld", "Atano", "Aysteac", "Bail", "Barahdell", "Barkwell",
"Barliman", "Beregond", "Bernar", "Bethlam", "Bill", "Bob", "Boinand", "Bondan",
"Braith", "Burthurin", "Carmil", "Carnion", "Cerwiff", "Cethwin", "Chal",
"Chapster", "Chiarold", "Cobman", "Colfen", "Cormac", "Cormacar", "Curudur",
"Curuvegil", "Cuthan", "Dagobert", "Dairuin", "Delrin", "Derelon", "Dethor",
"Dirkal", "Dongorath", "Dórmir", "Drake", "Drun", "Duffy", "Duoveris Cleg",
"Eamir", "Edrec", "Eliver", "Emendil", "Emerdan", "Envir", "Eowic", "Eratil",
"Ergrem", "Eríbhen", "Erig", "Erling", "Euric", "Farrenar", "Feinhíril", "Feld",
"Feldas", "Fimarn", "Firdok", "Forlong", "Fwen", "Gaem Wulsen", "Galun", "Gellain",
"Gendar", "Grethor", "Gulstaff", "Guntar", "Halbered", "Halfast", "Hallas", "Ham",
"Haren", "Harluinar", "Harran", "Harry", "Haver", "Herucalmo", "Hiiri", "Hiri",
"Hobson", "Hujai", "Ibal", "Irurn", "Jeirn", "Jeshan", "Kellir", "Korbrild",
"Kuball", "Laifrin", "Lamarod", "Lamril", "Len", "Lengha", "Lhachglin", "Limlach",
"Lindorië", "Lith", "Lorgas", "Lóthand", "Luinand", "Lúvagor", "Mablung", "Mag",
"Mallick", "Marluk", "Mat", "Michl", "Moff", "Nasen", "Navir", "Oget", "Olby",
"Orchaldor", "Ornil", "Pate", "Pegmar", "Pilkun", "Pultar", "Purdin", "Purth",
"Rannor", "Ravambor", "Rigdarabin", "Rory", "Rubb", "Rudiger", "Ruem Laer",
"Runnal", "Rush", "Sarkar", "Shebrim", "Shiril", "Sigmar", "Sisebuth", "Sovorn",
"Surk", "Svinder", "Talmabrith", "Tarhad", "Telemnar", "Telethal", "Theave",
"Thelgrom", "Thramir", "Thuidimer", "Tilmarin", "Tirrin", "Tolman", "Tregon",
"Tuggle", "Turgarin", "Turibor", "Turoth", "Turumir", "Ulgar", "Vengaree", "Virin",
"Virloch", "Welar", "Werlard", "Will", "Wilrith", "Zarby"],
["Angrid", "Aysteas", "Barelwen", "Bereth", "Bredda", "Brithwen", "Bura", "Cora",
"Currael", "Daisy", "Deniel", "Effie", "Egale", "Eilwen", "Elisa", "Eriel",
"Hannei", "Hiriel", "Holly", "Idris", "Iriel", "Kerit", "Maisy", "Mirrin",
"Morwen", "Murryelle", "Rellin", "Severtha", "Sirrin", "Tempi", "Torendra"]
],
"Haradan":
[
["Abdahkil", "Ablish", "Amrukh", "Arcil", "Aru-Sûm", "Ashdam", "Barthanan",
"Belphegor", "Bidash", "Bläs", "Brom", "Carlon", "Carnen Mek", "Casarac", "Cluth",
"Cudûma", "Culcamalin", "Dabadda", "Dejyk", "Del Imat", "Derei", "Dulish", "Dulo",
"Epef", "Es-amu", "Esfur", "Eshefar", "Flerit Klorin", "Garlan Det", "Gimmin",
"Hamid", "Harath do Ramam", "Harij", "Harith", "Ikûr", "Jaeru", "Jalib",
"Jamak Spijun", "Jarnir", "Kalatar", "Karaag", "Karamac", "Karan", "Klú Relortin",
"Krinda", "Kub Nara", "Kuran", "Leizha", "Ló-desmic", "Lonkuran", "Machun",
"Malezar", "Manari", "Matsûm", "Merul", "Min Oturna", "Moraiza Satark", "Nahir",
"Nard", "Ne-baraca", "Neddet", "Ne-eslem", "Ne-ula", "Ne-upka", "Ne-wull",
"Nomikon", "Obed", "Oeren", "Ombûr", "Orbul", "Orcir", "Ordun Halbor", "Orf Tello",
"Ormul", "Ossim", "Padua Bar", "Paji", "Par Shetti", "Pathan", "Pernelion",
"Peshtin", "Pon Olarti", "Pon Opar", "Pujist Din", "Qesset", "Rhavas", "Ricenaris",
"Runeren", "Sakur do 'Akil", "Samaub Narett", "Sazar Parn", "Sen Jerrek",
"Shardoz", "Shebbin Vûr", "Simbu", "Slú Carlon", "Slûcrac", "Sofan do Sofan",
"Sokol Sova", "Suljati Sey", "Tabaya Kas", "Tahar do Sakur", "Tarkas",
"Tartas Izain", "Tel Azef", "Tennith Borbul", "Terendil", "The Gusar", "The Póa",
"The Pust", "Tolodin", "Tónn", "Tor Mitari", "Ulaca", "Ulcathur", "Ulfacs",
"Ulrith", "Uma", "Umbin Swé", "Vamman Carl", "Wimbur", "Wote", "Yezmin",
"Yud do Sarsor", "Zäde", "Zimrakhil", "Zumman"],
["Arza sut Timman", "Bethin Omul", "Ebarthon", "Emuna", "Jefya", "Leriaj", "Lesjia",
"Lyli", "Nurna", "Shabla", "Shamara sut Katub", "Thena", "Tiena"]
],
"Hillman":
[
["Bragha", "Bram-op-Bran", "Bregg", "Brend", "Briam", "Broggha", "Jo-nag", "Krennt",
"Llewen", "Llewt", "Mong-Finn", "Mon-raggh", "Movran", "Nagwech", "Nalle",
"Paddro", "Seammu", "Sispar", "Twi Righa", "Twi Twir", "Vennolandua", "Wistan"],
["Ap-Brigg", "Ap-Coleen"]
],
"Lossoth":
[
["Chosum", "Culnun", "Daled", "Frannard", "Grimk", "Gromk", "Iltatuuli",
"Karhunkäsi", "Lufsen", "Lumipallo", "Nuorilintu", "Pitää", "Puolikarhu",
"Trimani", "Yhedksän"],
["Pieni", "Punakäsi", "Sadenainen", "Sinipilvi", "Unisoturi", "Vanha"]
],
"Northman":
[
["Aelfred", "Aelfric", "Agiluf", "Alboin", "Albwini", "Aldaric", "Aldhelm", "Aldor",
"Aldoric", "Alfraits", "Alfward", "Antharis", "Aradacer", "Asgaric", "Atagavia",
"Athugavia", "Audarik", "Augimund", "Bain", "Baldor", "Balg", "Bancadan", "Bard",
"Baric", "Barlof", "Baumyakund", "Beadarof", "Bemyakund", "Beneric", "Beortnov",
"Bonigild", "Brach", "Braegla", "Brand", "Breagla", "Sarador", "Brego",
"Tharendin", "Targon", "Breor", "Brëor", "Breorh", "Brocking", "Broehir",
"Haurian", "Brogdin", "Brug", "Brytta", "Buhrgavia", "Carloman", "Caviltar",
"Ceorl", "Charibert", "Chilperic", "Brenith", "Chlodomir", "Chlotar", "Cilis",
"Corl", "Cormac", "Harmandil", "Daef-Udra", "Daelgild", "Dagobert", "Dani",
"Darian", "Dartel", "Demarii", "Déor", "Déorwine", "Dieraglir", "Dorvic",
"Drafend", "Druhtiridya", "Drukka", "Dúnhere", "Dunnarth", "Earm", "Ebroin",
"Edorhil", "Edwodyn", "Ehwarik", "Elfhelm", "Ellollen", "Eloric", "Eniad",
"Eoaric", "Éoder", "Éodoric", "Éofor", "Éomund", "Eorein", "Eormenlic", "Éothain",
"Erkenbrand", "Eudail", "Falryen", "Fastred", "Fengel", "Fennric", "Fidoric",
"Folca", "Folcred", "Folcwine", "Folgar", "Folric", "Fornagath", "Forthwini",
"Fram", "Fréa", "Freaga", "Fréahár", "Fréaláf", "Fréalóf", "Fréalor", "Freamund",
"Fréawine", "Freca", "Fréga", "Froedhir", "Frumgar", "Fryancryn", "Fylaric",
"Galariks", "Galmod", "Gamling", "Gardagd", "Gartila", "Gárulf", "Gerse", "Girion",
"Gléowine", "Glyorivia", "Gnorn", "Goldwine", "Gordai", "Gorghiric", "Goshafoc",
"Gram", "Gríma", "Grimabalth", "Grimbold", "Gripa", "Gristlung", "Gudrinc",
"Guidariks", "Guntram", "Gurth", "Guthláf", "Guthwin", "Gwyn", "Haed", "Halfred",
"Háma", "Harding", "Heleder", "Helin", "Hemming", "Herefara", "Herewulf", "Herion",
"Herubrand", "Hestan", "Heth", "Hilderinc", "Hilman", "Hoeg Cuerd", "Hofding",
"Holting", "Horn", "Hréowalda", "Hrothgar", "Huc", "Hundin", "Hurm", "Hwaetrinc",
"Hygegrim", "Jerriad", "Jiord", "Kennit", "Konnul", "Korlin", "Krulla", "Kynoden",
"Lain", "Lann", "Lanning", "Lefwin", "Léod", "Léodurth", "Leovigild", "Léovric",
"Leowin", "Liam", "Ligrador", "Linlocc", "Lisgaria", "Lorril", "Lorthis",
"Maecwin", "Maethelgar", "Mahrbrand", "Mahrcared", "Mahrwini", "Marach", "Marhari",
"Marhcared", "Marhwini", "Marlo", "Matorn", "Merovech", "Mikilarn", "Nial",
"Nithya", "Odagavia", "Odavacer", "Ogar", "Orduclax", "Osric", "Oterics",
"Otogorth", "Pathirad", "Penda", "Pepin", "Portik", "Raendoric", "Rello",
"Rognachar", "Romauld", "Rotaris", "Saewic", "Saewulf", "Sahail", "Sallan", "Saym",
"Scoderath", "Sculding", "Shinrinc", "Sigfast", "Sigiswulf", "Sigwerd", "Sorandil",
"Sunlending", "Swertling", "Swithwulf", "Taraim", "Thal Éolsen", "Thandrain",
"Thang", "Thelas", "Thenesleag", "Théodolinda", "Théodwine", "Theoren", "Therge",
"Thiudawini", "Thordil", "Thorlavan", "Thuidimer", "Tonfall", "Tosti", "Uirdriks",
"Ulno", "Ulred", "Umbor", "Uphelb", "Utlash", "Valdor", "Vandorag", "Vellser",
"Vergandrieg", "Viclaf", "Vidugavia", "Vidurafin", "Vidustain", "Vilorc",
"Viloric", "Vogir", "Volaf", "Vormenric", "Vracoth", "Waggeorn", "Wakr",
"Walvoric", "Waulcho", "Waulrics", "Weriúch", "Wídfara", "Widuhund", "Wiglaf",
"Wistan", "Witbert", "Wolwin", "Wuilaric", "Wulf", "Wulfr", "Wuthgild", "Yarri",
"Ymp", "Ynarri"],
["Aldora", "Anni", "Ariberta", "Aud", "Beawyn", "Béawyn", "Béotta", "Blosoma",
"Bogatung", "Borgenda", "Brinwica", "Bronwyn", "Brunehaut", "Dunheuet", "Elfhild",
"Éothwyn", "Eudesuntha", "Flana", "Flota", "Forwen", "Fréawyn", "Fredegonde",
"Freowyn", "Fréowyn", "Fulda", "Gaervicca", "Gelda", "Gelmir", "Gretta",
"Gudelinda", "Haleth", "Hild", "Hildegripa", "Illinith", "Jirfelien", "Kelai",
"Laren", "Leofa", "Liwisintha", "Marhforn", "Marluh", "Marodwyn", "Minual",
"Odalinda", "Raedwyn", "Rémahild", "Riguntha", "Rinel", "Rose", "Sahali",
"Sauilswintha", "Shagedla", "Sigebeorta", "Spearwa", "Stanchela", "Sulwen",
"Súlwyn", "Syndrith", "Thraer", "Unn", "Valcrigge", "Vidumavi", "Wilda", "Woedwyn"]
],
"Gondorian":
[
["Allit", "Amrod", "Andra", "Araclin", "Aranna", "Arantar", "Arcle", "Argirion",
"Axor", "Balthrod", "Barmir", "Beldin", "Belebragol", "Beregond", "Boron",
"Bracken", "Brandir", "Bregor", "Brelam", "Brettring", "Calenorn", "Calmacil",
"Camulion", "Cannal", "Carcamir", "Carnendil", "Cealan", "Cendralion", "Civrui",
"Clennan", "Clothiel", "Coerba", "Conul", "Croggan", "Dagar", "Dagnir", "Dagobert",
"Damrod", "Derufin", "Dirhavel", "Domar", "Dorelas", "Doreoren", "Duinhir", "Eben",
"Ecuris", "Edacar", "Edwilber", "Eldanon", "Elegar", "Elin", "Eolson", "Erkam",
"Fallin", "Fallor", "Ferrin", "Findegil", "Fíriel", "Forlong", "Fornact", "Gael",
"Galad", "Galbar", "Galf", "Gam", "Gelmir", "Gerdon", "Gildor", "Gorlim",
"Grathian", "Grillic", "Hace", "Halbar", "Haldir", "Halifor", "Hallatan", "Halmir",
"Handir", "Hargon", "Hatharya", "Havnis", "Helbran", "Herion", "Herucalmo",
"Herumir", "Hirgon", "Horluin", "Hossadam", "Hunthor", "Hurin", "Imlach", "Ini",
"Iorlas", "Joraal", "Kaldir", "Kíron", "Krobon", "Lanios", "Lidimir", "Mablung",
"Maeflad", "Malbeth", "Maldring", "Malegorn", "Mard", "Meriot", "Merithdil",
"Midhroch", "Midmin", "Minasdan", "Mino", "Morreg", "Myall", "Myarnil", "Naurudûn",
"Neldorn", "Olby", "Oric", "Ormon", "Padderec", "Palanthrar", "Palol", "Palvano",
"Pauren", "Penemith", "Perdido", "Perion", "Pinto", "Portik", "Pwyll", "Ragnir",
"Ragnor", "Rastarin", "Rewin", "Rieldir", "Rillit", "Ringmir", "Rogan", "Rogeth",
"Rognir", "Rohtur", "Saeros", "Seregon", "Serendur", "Shebbin", "Shorrie",
"Sinyadal", "Siobal", "Syron", "Tárain", "Targon", "Tarlang", "Telissûring",
"Tergon", "Terimbrel", "Tharagun", "Tharanon", "Thoril", "Thorondil", "Thorondir",
"Tinindil", "Torgir", "Tuor", "Ulbar", "Ulbor", "Uldros", "Ullis", "Ulrad",
"Úrcamir", "Urlagin", "Urranta", "Vagor", "Vandor", "Varak", "Verden", "Vinië",
"Wafar", "Walec", "Wartik", "Widlo", "Wilrith", "Zarby"],
["Bereth", "Boromis", "Brilwen", "Caenesta", "Carnel", "Degla", "Dorien", "Emeldir",
"Fanariel", "Fíriel", "Frandica", "Garreth", "Gilwen", "Glindiel", "Grena",
"Haleth", "Idril", "Ioanna", "Ioreth", "Jesec Cael", "Kinda", "Lissuin", "Luinen",
"Miena", "Miriel", "Nanya", "Nimriel", "Odelard", "Raniel", "Stefa", "Sunil",
"Taska", "Treva", "Tyreath", "Uruiwen", "Yoruvë"]
],
"Variag":
[
["Azzad", "Curuband", "Gorovod", "Itana", "Mardrash", "Ovatha", "Uma", "Valhad"],
[]
],
"Woodman":
[
["Bardir", "Clemendan", "Gramberot", "Hírband", "Rothaar", "Theamond", "Theuderic",
"Thuiric", "Tiralgar", "Waulfa", "Windlore", "Woffung"],
["Amala", "Roenda", "Sigeberta", "Súlwine"]
],
"Wose":
[
["Ari-Ghân", "Ari-Ghín", "Ari-Lam", "Azakhad", "Borin", "Buri-Jun", "Buri-Khûrni",
"Clatu", "Daldin", "Dhân", "Effem", "Fanghîn", "Furin", "Ghân-buri-Ghân", "Ghîm",
"Gôr-khan-gôr", "Khûn-buri-Khûn", "Narvi", "Nhâk-Bûran", "Nivtur", "Om-buri-Om",
"Om-ura-Om", "Ôn-Eno", "Ôn-Ikana", "Ôn-Iko", "Ôn-Tomu", "Ôn-uri-Gès", "Or-Dîn",
"Or-Lân", "Or-Prâga", "Rhân-guri-Rhân", "Róin", "Viddis"],
["Pôn-ora-Pôn"]
],
"Dwarf":
[
["Ai", "Aldan", "Aldor", "Alf", "Althjof", "An", "Andvari", "Angrod", "Atilik",
"Aurvang", "Austri", "Azaghal", "Báin", "Bair", "Baldur", "Balin", "Balli",
"Balrim", "Barin", "Bávor", "Bifur", "Bildr", "Billing", "Bofur", "Bohór",
"Bombur", "Bowlin", "Bróin", "Bruni", "Brór", "Buri", "Burin", "Bwalin", "Cori",
"Craier", "Dain", "Dáin", "Dár", "Dáram", "Darim", "Darin", "Darzum", "Dhebun",
"Dhemim", "Dheo", "Dibin", "Dintam", "Dirn", "Dolgthrasir", "Dolin", "Dóm", "Dór",
"Dori", "Dralin", "Drarin", "Draupnir", "Drúhar", "Drúin", "Drús", "Duf",
"Duildin", "Durzil", "Dvalin", "Dwáin", "Dwalin", "Eikinskjaldi", "Falin", "Farin",
"Fíli", "Finn", "Fjalar", "Flói", "Fóli", "Fori", "Forin", "Fræg", "Frár", "Freri",
"Frerin", "Fror", "Frór", "Frósti", "Frúhar", "Frúin", "Fulla", "Fundin", "Gáin",
"Ghamim", "Ghar", "Gimbal", "Gimithor", "Gimlin", "Ginnar", "Glein", "Gloin",
"Glorin", "Grális", "Gróin", "Grolin", "Gror", "Gulla", "Gura", "Gurh", "Gurim",
"Gurin", "Gurn", "Halin", "Hannar", "Haugspori", "Hepti", "Hlevang", "Hor",
"Hornbori", "Ibír", "Ibûn", "Jari", "Kaidin", "Kalin", "Kallin", "Khadak", "Khain",
"Khanil", "Khanli", "Khîm", "Khorni", "Kíli", "Kóri", "Kuri", "Líli", "Lit",
"Lofar", "Lóni", "Miffli", "Miffli", "Mîm", "Mjothvitnir", "Moranar", "Motsognir",
"Nain", "Náin", "Nali", "Náli", "Nár", "Naric", "Narmire", "Naug", "Nedilli",
"Niping", "Nithi", "Nori", "Northri", "Nurís", "Nyi", "Nyr", "Nyrath", "Obun",
"Ohtar", "Oin", "Óin", "Omim", "Onar", "Ori", "Orn", "Purfin", "Rálin",
"Rathsvith", "Regin", "Rhomin", "Rhotti", "Rúrin", "Seldur", "Skafith", "Skirfir",
"Suthri", "Sviur", "Tali", "Thalin", "Tharangul", "Thekk", "Thelór", "Thíst",
"Thorin", "Thráin", "Thrangull", "Thrár", "Threlin", "Thrír", "Throdin", "Thror",
"Thrórin", "Thrúr", "Thúlin", "Thurin", "Tili", "Tíli", "Vestri", "Vigg", "Vili",
"Vindalf", "Virfir", "Vit", "Yngvi", "Zafor", "Zdori", "Zeddic", "Zorn", "Zrór"],
["Bís", "Brís", "Dís", "Drúis", "Durí", "Freris", "Grís", "Harnekil", "Silnoi",
"Thrís", "Tís", "Welís"]
],
"Elf":
[
["Adunavar", "Aegnor", "Aldan", "Aldohir", "Amdir", "Amdír", "Amras", "Amrod",
"Amroth", "Andovon", "Annael", "Aranto", "Aranwë", "Arculagar", "Arduin",
"Ardûval", "Arminas", "Arophel", "Ascarnil", "Bathor", "Beleg", "Belion",
"Bladorthin", "Bodmin", "Brandir", "Brethil", "Calendir", "Cambragol", "Camgalen",
"Camring", "Camthalion", "Caranfin", "Carihir", "Carnil", "Celedhring", "Celegorm",
"Celequar", "Círdor", "Coibor", "Cornen", "Curubor", "Daeron", "Daniros",
"Denethor", "Ecthelion", "Ectheon", "Edrahil", "Eglavirdan", "Eldarion",
"Elemmírë", "Elenril", "Elerior", "Elfaron", "Eluréd", "Elurín", "Elwë", "Eöl",
"Erestor", "Erocil", "Estelin", "Faleriod", "Falfed", "Fanar", "Fanari", "Fëabor",
"Fëatur", "Fendomë", "Filegdir", "Finculin", "Fingon", "Finrod", "Finwë",
"Froithir", "Fuinur", "Gaerdaer", "Galadlin", "Galador", "Galandeor", "Galdor",
"Galenlain", "Galerin", "Galion", "Galvilya", "Gelmir", "Gihellin", "Gildor",
"Gilraen", "Ginfilian", "Glorinadan", "Glosnar", "Gorthaur", "Gwilidhol",
"Gwindion", "Halatir", "Haldir", "Heladil", "Helkama", "Hílanor", "Hilvanar",
"Hiradur", "Hîrforn", "Huinen", "Indis", "Ingwë", "Istagol", "Kénwë", "Kheleglin",
"Khelekar", "Khelgin", "Khilia", "Klaen", "Laerion", "Larrithin", "Laurrë",
"Lenwë", "Lyaan", "Lyrin", "Mablung", "Maedhros", "Maeglin", "Maellin", "Maglor",
"Malthir", "Mendal", "Merethorn", "Meryalë", "Miles", "Moran", "Mornaur",
"Morthaur", "Mourfuin", "Nardhol", "Nenledil", "Nestador", "Olwë", "Orophin",
"Orrerë", "Palandor", "Pelnimloth", "Persuvious", "Ragnor", "Randae", "Rhovamir",
"Ringlin", "Ruindel", "Rúmil", "Saeros", "Sarkaxë", "Striuk'ir", "Sûlarin",
"Sûldun", "Súlherok", "Sûlherok", "Súlkano", "Súlor", "Taurclax", "Taurion",
"Taurnil", "Teletasarë", "Terelorn", "Terilaen", "Thalos", "Thanadirian",
"Tharúdan", "Turgon", "Turlindë", "Ufëa", "Ulcamer", "Vaal Gark", "Vairesûl",
"Valandor", "Valkrist", "Vallin", "Valmorgûl", "Valnaur", "Valsûl", "Vidarlin",
"Vilyadhol", "Voronwë"],
["Adaldrida", "Aiwë", "Amarië", "Ardana", "Aredhel", "Arhendhiril", "Ariel",
"Arien", "Arvairë", "Arverethiel", "Belladonna", "Bellindiel", "Brethilwen",
"Calime", "Derna", "Eariel", "Eärwen", "Eldebeth", "Elendor", "Elenwe", "Elenwë",
"Elindiel", "Elwing", "Erdíniel", "Fëamírë", "Fëatur", "Finduilas", "Gilmith",
"Gwaedun", "Idril", "Ivren", "Jesprin", "Lalaith", "Linsûl", "Losp'indel", "Lysa",
"Mally", "Marwen", "Merilwen", "Míriel", "Morloth", "Namirë", "Nernadel",
"Nimrodel", "Óriel", "Othariel", "Rána", "Rilia", "Saeraladhwen", "Silion",
"Sirnaur", "Tara", "Tathariel", "Teiglin", "Tirial", "Tiriel", "Tolwen", "Valglin",
"Vasariel", "Yavëkamba"]
],
"Hobbit":
[
["Adalgrim", "Adelard", "Amaranth", "Andwise", "Anson", "Ashturg", "Babbin",
"Balbo", "Berilac", "Bert", "Bilbo", "Bingo", "Blaggo", "Blanco", "Bodo", "Bowman",
"Broggo", "Bucca", "Bungo", "Carl", "Chuff", "Cleff", "Coldomac", "Coney",
"Cotman", "Cottar", "Dallo", "Dinodas", "Doddle", "Doderic", "Dodinas", "Droggo",
"Drogo", "Dromibar", "Dudo", "Elfstan", "Erling", "Eustace", "Everard", "Falco",
"Fastolph", "Fastred", "Fencon", "Ferdibrand", "Ferdinand", "Ferumbras",
"Filibert", "Flambard", "Folco", "Fortinbras", "Fosco", "Fredegar", "Frodo", "Gam",
"Gerontius", "Gorbadoc", "Gorbulas", "Gorhendad", "Gormadoc", "Gresham", "Griffo",
"Gundabald", "Hal", "Halfast", "Halfred", "Hamfast", "Hamson", "Harding",
"Hending", "Hildibrand", "Hildifons", "Hildigard", "Hildigrim", "Hob", "Hobson",
"Holfast", "Holman", "Hugo", "Ilberick", "Isembard", "Isembold", "Isengar",
"Isengrim", "Isumbras", "Isundras", "Jolly", "Kocho", "Largo", "Longo", "Lotho",
"Lotto", "Madoc", "Maitlow", "Manlow", "Marcho", "Marmadas", "Marmadoc", "Marroc",
"Menegilda", "Mentha", "Merimac", "Merimas", "Merry", "Milo", "Minto", "Moro",
"Mosco", "Muggrath", "Mungo", "Muzgash", "Nick", "Nob", "Odo", "Odovacar", "Olo",
"Orgulas", "Otho", "Paladin", "Pencho", "Permagin", "Pippin", "Pollo", "Polo",
"Ponto", "Porto", "Posco", "Reginard", "Robin", "Rorimac", "Rudigar", "Rufus",
"Sadoc", "Samwise", "Sancho", "Saradas", "Saradoc", "Seredic", "Sigismond", "Ted",
"Thoddo", "Tobold", "Togo", "Tolman", "Tom", "Tully", "Tunny", "Uklurg", "Wag",
"Wilcome", "Wilibald", "Will", "Willie", "Wiseman", "Worshem", "Zarbag"],
["Adamantha", "Angelica", "Asphodel", "Baromba", "Belba", "Bell", "Beryl", "Calamy",
"Camellia", "Celandine", "Chica", "Dahlia", "Daisy", "Diamond", "Donnamira",
"Dora", "Eglantine", "Elanor", "Esmeralda", "Estella", "Fairly", "Gilly",
"Goldilocks", "Hanna", "Hilda", "Holly", "Laura", "Leffly", "Lily", "Linda",
"Lobelia", "Lolly", "Malva", "Margott", "Marigold", "May", "Mayferry", "Melilot",
"Mimosa", "Mirabella", "Molly", "Myrtle", "Pansy", "Pearl", "Peony", "Pervinca",
"Pimpernel", "Poppy", "Precious", "Primrose", "Primula", "Prisca", "Rosa",
"Rosamunda", "Rose", "Rowan", "Ruby", "Salvia", "Tanta"]
],
"Orc":
[
["One-Fang", "Akargûn", "Arthrug", "Arthuan", "Azog", "Balcmeg", "Balkhmog",
"Barfka", "Bokdankh", "Bolg", "Bolvag", "Bralg", "Bugrug", "Bukra", "Bulkupar",
"Cro", "Dakalmog", "Daumdorût", "Dolgrist", "Dorglas", "Drurgandra", "Dunadd",
"Durba", "Durg-Orsh", "Fektalgh", "Fha-khorlash", "Forak", "Gaballol", "Garg",
"Garny", "Gaskbuz", "Ghardak", "Ghashurlagk", "Glashtoc", "Gorbag", "Gorbla",
"Gormuk", "Gorron", "Gorthak", "Grac", "Grachuk", "Grashukh", "Grashûkh",
"Grashur", "Grishnákh", "Grizbat", "Hagrakh", "Hukor", "Hurog", "Ikgor", "Karagat",
"Kargmaushat", "Kharghiz", "Lagduf", "Leegrash", "Lug", "Lugdush", "Lurd",
"Lurshas", "Lurshras", "Luzog", "Malkur", "Marlug", "Maugrath", "Mauhúr", "Mogshi",
"Mordanak", "Muagan", "Nadash", "Nagan", "Narkga", "Narlga", "Natak", "Nazog",
"Nurgash", "Nurl", "Obad", "Ogrod", "One Fang", "Orcobal", "Orthrod", "Pochack",
"Radbug", "Ragavaug", "Rashkûk", "Rask", "Regdûk", "Rekka", "Rhukska", "Rugat",
"Rulthak", "Savgak", "Scutsparg", "Shagog", "Shagrat", "Shagrath", "Shagrug",
"Shardakh", "Sharzig", "Shergnakh", "Skargnakh", "Skoralg", "Snaga", "Storlaga",
"Strulug", "Thergor", "Ufthak", "Uftog", "Ugluk", "Ukog", "Ukrish", "Ulgin",
"Ulzog", "Unhir", "Urfa", "Urfase", "Urgubal", "Urgurk", "Urmek", "Urudrak",
"Utor", "Utsar", "Uunk", "Virsh", "Volog", "Wargiz", "Yagrash", "Yazhgar", "Zalg",
"Zurtak"],
[]
]
}
def get_races(self):
"""
Returns the set of races in the database.
:return: String containing the name of each race in the database.
"""
trace.entry()
trace.detail("Name database length %r" % len(self.name_database))
trace.exit()
return sorted(self.name_database.keys())
def set_race(self, race):
"""
Updates variables following a change to requested race.
:param race: The requested race.
"""
trace.detail("Set race to %r" % race)
self.current_race = race
self.males = self.name_database[race][0]
self.females = self.name_database[race][1]
trace.detail("Number of males %r" % len(self.males))
trace.detail("Number of females %r" % len(self.females))
def get_num_males(self):
"""
Returns the number of male names in the current database.
:return:
"""
return len(self.males)
def get_num_females(self):
"""
Returns the number of female names in the current database.
"""
return len(self.females)
def get_male(self):
"""
Returns a male name.
"""
roll = dice.dcustom(len(self.males))
trace.detail("Roll %r, gives %s" % (roll, self.males[roll-1]))
return self.males[roll - 1]
def get_female(self):
"""
Returns a female name.
"""
roll = dice.dcustom(len(self.females))
trace.detail("Roll %r, gives %s" % (roll, self.females[roll-1]))
return self.females[roll - 1]
def get_either(self):
"""
Returns a name that is either male or female.
:return: The name returned, including a gender identifier.
"""
max_number = len(self.males) + len(self.females)
roll = dice.dcustom(max_number)
trace.detail("Roll %r" % roll)
trace.detail("Number of males %r" % len(self.males))
if roll <= len(self.males):
male: str = self.males[roll - 1]
trace.detail("Return %s" % male)
return male + " (M)"
else:
roll = roll - len(self.males)
trace.detail("Roll now %r" % roll)
trace.detail("Return %s" % self.females[roll-1])
female: str = self.females[roll - 1]
trace.detail("Return %s" % female)
return female + " (F)"
| AidanCopeland/merp | console/name_database/name_database.py | name_database.py | py | 42,682 | python | hr | code | 1 | github-code | 13 |
36617807952 | class LedMatrix(object):
def __init__(self):
self.array = []
self.jump = 16
self.altnum = (1,3,5,7,9,11,13,15)
self.start = 248
for y in range(0,8):
row = []
for x in range(self.start+y, -1, -1*(self.jump) ):
row.append(x)
row.append(x - self.altnum[y])
self.array.append(row)
def ledarray(self):
return self.array
| johnjreiser/pisign | matrix.py | matrix.py | py | 440 | python | en | code | 0 | github-code | 13 |
73796061458 | import os
from pathlib import Path
from dotenv import load_dotenv
CONFIG = {}
CONFIG['MAIN'] = str(Path.cwd())
CONFIG['DOCUMENTS'] = str(Path.cwd() / 'docs')
CONFIG.update({
'DB_CONFIG' : str(Path(CONFIG['DOCUMENTS']) / 'db_connect.json'),
'PROMTS' : str(Path(CONFIG['DOCUMENTS']) / 'promts.json'),
'APP' : str(Path(CONFIG['MAIN']) / 'app'),
'API_KEYS' : str(Path(CONFIG['DOCUMENTS']) / 'key.txt')
})
CONFIG.update({
'MODELS' : str(Path(CONFIG['APP']) / 'model')
})
# dotenv_path = os.path.join(os.path.dirname(__file__), '.env')
# if os.path.exists(dotenv_path):
# load_dotenv(dotenv_path)
if __name__ == '__main__':
print(
CONFIG['MAIN'] ,
CONFIG['DOCUMENTS'],
CONFIG['DB_CONFIG'],
CONFIG['PROMTS'],
CONFIG['APP'],
CONFIG['API_KEYS'],
)
| Nick2201/chat_gpt_assisstant | app/config.py | config.py | py | 815 | python | en | code | 0 | github-code | 13 |
14400998615 | __author__ = 'Faaiz'
from PySide.QtUiTools import *
from PySide.QtGui import *
from WallObserver import *
from Project import *
class ProjectPageHeader(QWidget, WallObserver):
def __init__(self,system):
QWidget.__init__(self, None)
self.system = system
self.system.addObserver(self)
loader = QUiLoader()
dialog = loader.load("./UI/projectPageHeader.ui")
self.nameText = dialog.findChild(QLabel,"name")
layout = QVBoxLayout(self)
layout.addWidget(dialog)
layout.setContentsMargins(0,0,0,0)
self.hide()
def updateObserver(self,user,history):
if type(history[-1]) == Project:
self.nameText.setText(history[-1].name)
self.system.fitText(self.nameText,430,40)
self.show()
else:
self.hide()
| sathachao/Wall | ProjectPageHeader.py | ProjectPageHeader.py | py | 840 | python | en | code | 0 | github-code | 13 |
15977486371 | import matplotlib.pyplot as plt
import scipy.special as ss
import numpy as np
from time import perf_counter
def path_difference(**kwargs):
# Returns a function which computes the optical path difference multiplied
# by the wave number k. The returned function f is a function of the depth
# z and horizontal distance rho to the origin.
k = kwargs.get("k")
na = kwargs.get("na")
n_s = kwargs.get("n_s")
n_g = kwargs.get("n_g")
n_i = kwargs.get("n_i")
n_g_ = kwargs.get("n_g_")
n_i_ = kwargs.get("n_i_")
t_g = kwargs.get("t_g")
t_i = kwargs.get("t_i")
t_g_ = kwargs.get("t_g_")
t_i_ = kwargs.get("t_i_")
def f(z, rho):
# Returns the optical path difference multiplied by the wave number k
tot = n_s * z * (1 - (na * rho / n_s) ** 2) ** 0.5
tot += n_g * t_g * (1 - (na * rho / n_g) ** 2) ** 0.5
tot += n_i * t_i * (1 - (na * rho / n_i) ** 2) ** 0.5
tot -= n_g_ * t_g_ * (1 - (na * rho / n_g_) ** 2) ** 0.5
tot -= n_i_ * t_i_ * (1 - (na * rho / n_g_) ** 2) ** 0.5
return k * tot
return f
def path_length(**kwargs):
# Returns a function which computes the optical path length multiplied by
# the wave number k. This function f is a function of the depth z and the
# horizontal distance rho.
k = kwargs.get("k")
na = kwargs.get("na")
n_s = kwargs.get("n_s")
def f(z, rho):
# Returns the optical path length multiplied by the wave number k
return k * n_s * z * (1 - (na * rho / n_s) ** 2) ** 0.5
return f
def GLA_int(**kwargs):
# Gibson Lanni with Absorption integrand
# Returns a function of the depth z and horizontal norm rho,
# which expresses the exponential function exp(i OPD - alpha * OPL).
# This function is part of the integrand of the Gibson-Lanni absorption model.
alpha = kwargs.get("alpha", 1.0) # Absorption coefficient
OPD = path_difference(**kwargs) # Optical path difference (multiplied by constant)
OPL = path_length(**kwargs) # Optical path length (multiplied by constant)
def f(z, rho):
return np.exp((1.j * OPD(z, rho) - alpha * OPL(z, rho)))
return f
def fit_bessel_functions(f, a, upper, K):
# Approximate the function f on the interval [0, upper] as a sum of K Bessel
# functions J_0 by minimizing the sum of squares. Returns the coefficients
# in the sum of Bessel functions.
# f : function to be approximated
# a : array of scalars inside the Bessel functions
# upper : upper bound of the interval [0, upper]
# K : step size in the interval [0, upper]
x = np.linspace(0, upper, K)
J = ss.j0(np.outer(x, a)) # Values of the Bessel functions on [0, upper]
F = f(x)
# Compute the minimizer of ||F - Jc||^2:
c = np.dot(np.dot(np.linalg.inv(np.dot(J.T, J)), J.T), F)
return c
def sum_of_bessel_functions(c, a, x):
# Return the weighted sum over the Bessel functions c_i J_0(a_i x).
# Inputs c, a are expected to be 1-dimensional arrays of equal size,
# while x can be a 1-dimensional array of any size.
return np.dot(ss.j0(np.outer(x, a)), c)
def R(r, a, upper, **kwargs):
# Computes an array of the values of R_m(r), of which the sum are used to
# approximate the integral.
# r: horizontal norm sqrt(x^2 + y^2)
# a: inner coefficients of the Bessel functions
# upper: upper bound of the interval [0, upper] on which the integral is defined
k = kwargs.get("k")
NA = kwargs.get("na")
beta = k * r * NA
return (a * ss.j1(a * upper) * ss.j0(beta * upper) * upper - \
beta * ss.j0(a * upper) * ss.j1(beta * upper) * upper) / \
(a ** 2 - beta ** 2)
def PSF(rs, zs, N=50, K=100, **kwargs):
# Computes a 2-dimensional array of the values of PSF on rs and zs
# rs: 1-dimensional array with values r = sqrt(x^2 + y^2)
# zs: 1-dimensional array containing the depths z
# N: number of Bessel functions used for the approximation
# K: number of points on which the Bessel approximation is fitted
# NA = kwargs.get("na")
# n_s = kwargs.get("n_s")
# n_i = kwargs.get("n_i")
# n_i_ = kwargs.get("n_i_")
# n_g = kwargs.get("n_g")
# n_g_ = kwargs.get("n_g_")
# Upper bound of the interval [0, upper] to be integrated over:
upper = 0.5
# Inner coefficients of the Bessel functions:
a = (3 * np.linspace(1, N, N) - 2) / upper
# Fit a series of Bessel functions for each depth z
# c contains the coefficients of this series at each depth z
cs = []
for z in zs:
f = lambda rho : GLA_int(**kwargs)(z, rho)
cs.append(fit_bessel_functions(f, a, upper, K))
# Compute the functions R used to approximate the integral
# Rs contains these functions at each distance r
Rs = []
for r in rs:
Rs.append(R(r, a, upper, **kwargs))
# Convert to numpy arrays
cs = np.array(cs)
Rs = np.array(Rs)
# Return the weighted sum over c_i R_i squared, which gives a 2-dimensional
# array of the values of the PSF at each point (r, z)
return np.abs(np.matmul(cs, Rs.T))**2
# Default constants
wavelength = 600e-9 # unit is meter
defaults = {
"na" : 1.9, # numerical aperture
"n_s" : 1.33, # refractive index of sample layer
"n_g" : 1.5, # actual refractive index of cover slip
"n_i" : 1.7, # actual refractive index of immersion layer
"n_g_" : 1.5, # nominal refractive index of coverslip
"n_i_" : 1.5, # nominal refractive index of immerson layer
"t_g" : 170e-6, # actual coverslip thickness
"t_i" : 130e-6, # actual distance between objective lense and coverslip
"t_g_" : 150e-6, # nominal coverslip thickness
"t_i_" : 150e-6, # nominal distance between objective lens and coverslip
"k" : 2 * np.pi / wavelength, # wave number
"alpha" : np.log(2) / (2 * np.pi / wavelength) * 10**3
# absorption constant, assuming the illuminating halves after 1 mm
}
if __name__ == "__main__":
# Determine horizontal and lateral boundaries
xmin = -18e-6
xmax = 18e-6
ymin = 0
ymax = 3e-5
# xmin = -5e-4
# xmax = 5e-4
# ymin = 0
# ymax = 1e-3
xs = np.linspace(xmin, xmax, 1000)
ys = np.linspace(ymin, ymax, 1000)
X, Y = np.meshgrid(xs, ys)
# Compute PSF
start_time = perf_counter()
psf = PSF(xs, ys, **defaults)
end_time = perf_counter()
print("Computation time: %.3fs" % (end_time - start_time))
# Alternatively load a PSF in:
# psf = np.load("psf.npy")
# Display PSF
fig = plt.figure(figsize=(8,6))
plt.pcolormesh(X, Y, psf, vmin=0, vmax=35e-05)
plt.xlabel("meter")
plt.ylabel("meter")
plt.colorbar(format='%.e')
# plt.savefig("psf.png" % (wavelength * 10**9)) # save figure as png
plt.show()
# Optionally, save PSF
# np.save("psf.npy", psf)
| samuelhklumpers/mfi-photosynthetics | psf_generator.py | psf_generator.py | py | 7,129 | python | en | code | 1 | github-code | 13 |
70761869779 | valor = int(input("Informe a quantidade de valores inteiros que deseja somar\n"))
i = 0
soma = 0
while i <= valor:
soma += i
i += 1
print("A soma dos", valor, "primeiros números inteiros é", soma)
| LiajuX/Python-Exercises-2020 | Arquivo12-Ex.4.py | Arquivo12-Ex.4.py | py | 220 | python | pt | code | 0 | github-code | 13 |
30139217522 | from zou.app.services import (
base_service,
projects_service,
notifications_service,
)
from zou.app.utils import cache, events, fields, query as query_utils
from zou.app.models.entity import Entity, EntityLink
from zou.app.models.entity_type import EntityType
from zou.app.models.preview_file import PreviewFile
from zou.app.models.task import assignees_table
from zou.app.models.task import Task
from zou.app.services.exception import (
PreviewFileNotFoundException,
EntityLinkNotFoundException,
EntityNotFoundException,
EntityTypeNotFoundException,
)
def clear_entity_cache(entity_id):
cache.cache.delete_memoized(get_entity, entity_id)
def clear_entity_type_cache(entity_type_id):
cache.cache.delete_memoized(get_entity_type, entity_type_id)
cache.cache.delete_memoized(get_entity_type_by_name)
def get_temporal_entity_type_by_name(name):
entity_type = get_entity_type_by_name(name)
if entity_type is None:
cache.cache.delete_memoized(get_entity_type_by_name, name)
entity_type = get_entity_type_by_name(name)
return entity_type
@cache.memoize_function(240)
def get_entity_type(entity_type_id):
"""
Return an entity type matching given id, as a dict. Raises an exception
if nothing is found.
"""
return base_service.get_instance(
EntityType, entity_type_id, EntityTypeNotFoundException
).serialize()
@cache.memoize_function(240)
def get_entity_type_by_name(name):
"""
Return entity type maching *name*. If it doesn't exist, it creates it.
"""
entity_type = EntityType.get_by(name=name)
if entity_type is None:
entity_type = EntityType.create(name=name)
return entity_type.serialize()
@cache.memoize_function(240)
def get_entity_type_by_name_or_not_found(name):
"""
Return entity type maching *name*. If it doesn't exist, it creates it.
"""
entity_type = EntityType.get_by(name=name)
if entity_type is None:
raise EntityTypeNotFoundException
return entity_type.serialize()
def get_entity_raw(entity_id):
"""
Return an entity type matching given id, as an active record. Raises an
exception if nothing is found.
"""
return base_service.get_instance(
Entity, entity_id, EntityNotFoundException
)
@cache.memoize_function(120)
def get_entity(entity_id):
"""
Return an entity type matching given id, as a dict. Raises an exception if
nothing is found.
"""
return base_service.get_instance(
Entity, entity_id, EntityNotFoundException
).serialize()
def update_entity_preview(entity_id, preview_file_id):
"""
Update given entity main preview. If entity or preview is not found, it
raises an exception.
"""
entity = Entity.get(entity_id)
if entity is None:
raise EntityNotFoundException
preview_file = PreviewFile.get(preview_file_id)
if preview_file is None:
raise PreviewFileNotFoundException
entity.update({"preview_file_id": preview_file.id})
clear_entity_cache(str(entity.id))
events.emit(
"preview-file:set-main",
{"entity_id": entity_id, "preview_file_id": preview_file_id},
project_id=str(entity.project_id),
)
entity_type = EntityType.get(entity.entity_type_id)
entity_type_name = "asset"
if entity_type.name in ["Shot", "Scene", "Sequence", "Episode"]:
entity_type_name = entity_type.name.lower()
events.emit(
"%s:update" % entity_type_name,
{"%s_id" % entity_type_name: str(entity.id)},
project_id=str(entity.project_id),
)
return entity.serialize()
def get_entities_for_project(
project_id,
entity_type_id,
obj_type="Entity",
episode_id=None,
only_assigned=False,
):
"""
Retrieve all entities related to given project of which entity is entity
type.
"""
from zou.app.services import user_service
query = (
Entity.query.filter(Entity.entity_type_id == entity_type_id)
.filter(Entity.project_id == project_id)
.order_by(Entity.name)
)
if episode_id is not None:
query = query.filter(Entity.parent_id == episode_id)
if only_assigned:
query = query.outerjoin(Task).filter(
user_service.build_assignee_filter()
)
result = query.all()
return Entity.serialize_list(result, obj_type=obj_type)
def get_entity_links_for_project(project_id, page=None, limit=None):
"""
Retrieve entity links for
"""
query = EntityLink.query.join(
Entity, EntityLink.entity_in_id == Entity.id
).filter(Entity.project_id == project_id)
results = []
if page is not None and page > 0:
if limit < 1:
limit = None
results = query_utils.get_paginated_results(query, page, limit=limit)
else:
for entity_link in query.all():
results.append(
{
"id": entity_link.id,
"entity_in_id": entity_link.entity_in_id,
"entity_out_id": entity_link.entity_out_id,
"nb_occurences": entity_link.nb_occurences,
"label": entity_link.label,
"data": entity_link.data,
"type": "EntityLink",
}
)
return results
def get_entities_and_tasks(criterions={}):
"""
Get all entities for given criterions with related tasks for each entity.
"""
if "episode_id" in criterions and criterions["episode_id"] == "all":
return []
entity_map = {}
task_map = {}
subscription_map = notifications_service.get_subscriptions_for_user(
criterions.get("project_id", None),
criterions.get("entity_type_id", None),
)
query = (
Entity.query.outerjoin(Task, Task.entity_id == Entity.id)
.outerjoin(assignees_table)
.add_columns(
Task.id,
Task.task_type_id,
Task.task_status_id,
Task.priority,
Task.estimation,
Task.duration,
Task.retake_count,
Task.real_start_date,
Task.end_date,
Task.start_date,
Task.due_date,
Task.last_comment_date,
assignees_table.columns.person,
)
)
if "entity_type_id" in criterions:
query = query.filter(
Entity.entity_type_id == criterions["entity_type_id"]
)
if "project_id" in criterions:
query = query.filter(Entity.project_id == criterions["project_id"])
if "episode_id" in criterions:
query = query.filter(Entity.parent_id == criterions["episode_id"])
for (
entity,
task_id,
task_type_id,
task_status_id,
task_priority,
task_estimation,
task_duration,
task_retake_count,
task_real_start_date,
task_end_date,
task_start_date,
task_due_date,
task_last_comment_date,
person_id,
) in query.all():
entity_id = str(entity.id)
entity.data = entity.data or {}
if entity_id not in entity_map:
status = "running"
if entity.status is not None:
status = str(entity.status.code)
entity_map[entity_id] = {
"id": str(entity.id),
"name": entity.name,
"status": status,
"episode_id": str(entity.parent_id),
"description": entity.description,
"frame_in": entity.data.get("frame_in", None),
"frame_out": entity.data.get("frame_out", None),
"fps": entity.data.get("fps", None),
"preview_file_id": str(entity.preview_file_id or ""),
"canceled": entity.canceled,
"data": fields.serialize_value(entity.data),
"tasks": [],
}
if task_id is not None:
task_id = str(task_id)
if task_id not in task_map:
task_dict = fields.serialize_dict(
{
"id": task_id,
"estimation": task_estimation,
"entity_id": entity_id,
"end_date": task_end_date,
"due_date": task_due_date,
"duration": task_duration,
"is_subscribed": subscription_map.get(task_id, False),
"last_comment_date": task_last_comment_date,
"priority": task_priority or 0,
"real_start_date": task_real_start_date,
"retake_count": task_retake_count,
"start_date": task_start_date,
"task_status_id": str(task_status_id),
"task_type_id": str(task_type_id),
"assignees": [],
}
)
task_map[task_id] = task_dict
entity_dict = entity_map[entity_id]
entity_dict["tasks"].append(task_dict)
if person_id:
task_map[task_id]["assignees"].append(str(person_id))
return list(entity_map.values())
def remove_entity_link(link_id):
try:
link = EntityLink.get_by(id=link_id)
link.delete()
return link.serialize()
except BaseException:
raise EntityLinkNotFoundException
def get_not_allowed_descriptors_fields_for_vendor(
entity_type="Asset", departments=[], projects_ids=[]
):
not_allowed_descriptors_field_names = {}
for project_id in projects_ids:
not_allowed_descriptors_field_names[project_id] = [
descriptor["field_name"]
for descriptor in projects_service.get_metadata_descriptors(
project_id
)
if descriptor["entity_type"] == entity_type
and descriptor["departments"] != []
and len(set(departments) & set(descriptor["departments"])) == 0
]
return not_allowed_descriptors_field_names
def remove_not_allowed_fields_from_metadata(
not_allowed_descriptors_field_names=[], data={}
):
return {
key: data[key]
for key in data.keys()
if key not in not_allowed_descriptors_field_names
}
| cgwire/zou | zou/app/services/entities_service.py | entities_service.py | py | 10,409 | python | en | code | 152 | github-code | 13 |
29723093265 |
# 3. Создайте программу для игры в "Крестики-нолики".
lst = [1, 2, 3,
4, 5, 6,
7, 8, 9]
def print_lst():
print('-------------')
for i in range(3):
print('|', lst[0 + i * 3], '|', lst[1 + i * 3], '|', lst[2 + i * 3], '|')
print('-------------')
return lst
win_combo = [[0, 1, 2], [3, 4, 5], [6, 7, 8], [0, 3, 6], [1, 4, 7], [2, 5, 8], [0, 4, 8], [2, 4, 6]]
from random import randint
player_1 = input('Введите имя первого игрока: ')
player_2 = input('Введите имя второго игрока: ')
flag = randint(0, 2)
if flag:
print(f'Первый ход: игрок {player_1}')
else:
print(f'Первый ход: игрок {player_2}')
def step_lst(step,symbol):
ind = lst.index(step)
lst[ind] = symbol
def result(win_combo):
win = ""
for i in win_combo:
if lst[i[0]] == 'X' and lst[i[1]] == 'X' and lst[i[2]] == 'X':
win = player_1
if lst[i[0]] == 'O' and lst[i[1]] == 'O' and lst[i[2]] == 'O':
win = player_2
return win
def input_value(name):
step = int(input(f'{name}, ваш ход: '))
return step
game_over = False
counter = 0
while not game_over:
print_lst()
if flag:
symbol = 'X'
step = input_value(player_1)
if step >= 1 and step <= 9:
if (str(lst[step - 1]) not in 'XO'):
step_lst(step, symbol)
else:
print('Эта клетка занята! ')
continue
else:
print('Введите число от 1 до 9! ')
continue
flag = False
counter += 1
else:
symbol = 'O'
step = input_value(player_2)
if step >= 1 and step <= 9:
if (str(lst[step - 1]) not in 'XO'):
step_lst(step, symbol)
else:
print('Эта клетка занята! ')
continue
else:
print('Введите число от 1 до 9! ')
continue
flag = True
# step_lst(step, symbol)
win = result(win_combo)
if win != "":
game_over = True
elif counter < 4:
game_over = False
else:
print('Ничья!')
win = 'Дружба'
break
print_lst()
print("Победил(а)", win) | KsuKudrina/SeminarsPython | HomeWork/HomeWork_5/Task_3.py | Task_3.py | py | 2,438 | python | ru | code | 0 | github-code | 13 |
5459884964 | # -*- coding: utf-8 -*-
"""
@contact: lishulong.never@gmail.com
@time: 2019/4/8 下午3:50
"""
import random
def test_c_profile():
for i in range(100):
print(random.random())
if __name__ == '__main__':
test_c_profile()
| lishulongVI/Ilhabela | analysis/c_profile.py | c_profile.py | py | 240 | python | en | code | 0 | github-code | 13 |
40766829169 | import csv
import requests
import json
import pandas as pd
import openpyxl
class Video:
def __init__(self, title, channel):
self.title = title
self.channel = channel
def GetVideoInfo(videoId):
response = requests.get("https://youtube.googleapis.com/youtube/v3/videos?part=snippet&id={0}&key=[Your Google API Key Here]"
.format(videoId))
if response.status_code == 200:
responseObj = json.loads(response.text)
try:
title = responseObj["items"][0]["snippet"]["title"]
channel = responseObj["items"][0]["snippet"]["channelTitle"]
return Video(title, channel)
except:
return Video(videoId, "Invalid")
else:
return None
def GetVideoIds(fileName):
videoIds = []
with open(fileName, 'r') as csvFile:
csvReader = csv.reader(csvFile)
for row in csvReader:
videoIds.append(row[0])
return videoIds
videoIds = GetVideoIds("./watch_later_csv.csv")
totalVideos = len(videoIds)
currentVideo = 0
titles = []
channels = []
for id in videoIds:
currentVideo = currentVideo + 1
finishedPercentage = currentVideo / totalVideos
print("Processing Video {0} of {1} - {2:.2f}% Finished\n".format(currentVideo, totalVideos, (finishedPercentage * 100)))
vid = GetVideoInfo(id)
if vid != None:
titles.append(vid.title)
channels.append(vid.channel)
else:
print("Sorry, something went wrong with video: {0}\n".format(id))
df = pd.DataFrame({'Title':titles, 'Channel':channels})
df.to_excel("./watch_later.xlsx") | david-ruffner/Youtube-Video-ID-Converter | watch_later_convert.py | watch_later_convert.py | py | 1,658 | python | en | code | 0 | github-code | 13 |
4299518495 |
from tkinter import messagebox
from tkinter import *
from tkinter import simpledialog
import tkinter
from tkinter import filedialog
from imutils import paths
import matplotlib.pyplot as plt
import numpy as np
from tkinter.filedialog import askopenfilename
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from sklearn import linear_model
from sklearn.metrics import accuracy_score
from sklearn.model_selection import train_test_split
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras import optimizers
from genetic_selection import GeneticSelectionCV
import webbrowser
from sklearn.metrics import mean_squared_error
main = tkinter.Tk()
main.title("Groundwater Level Prediction Using Hybrid Artificial Neural Network with Genetic Algorithm")
main.geometry("1300x1200")
global filename, dataset
global X, Y, X_train, X_test, y_train, y_test, Y1
global mse, text, pathlabel
'''
gray wolf optimization algortihm which consists of different wolf such as alpha, beta, delta and omega and all this wold will hunt in group
alpha wolf is the commander and help in taking optimal decision
omega wolf will separate prey from group
delta and beta will attack the prey
in features selection also we will apply alfa technique to select optimize features
using delta and beta will calculate fitness of each features and the best fitness features will be selected
omega will help to remove irrelevant features
'''
def grayWolf(X, Y):
X_selected_features = None
#take X as Random population and shuffle it randomly
indices = np.arange(X.shape[0])
np.random.shuffle(indices)
X = X[indices]
Y = Y[indices]
features = X.T
solution = np.cov(features.astype(float))
iterations, vectors = np.linalg.eig(solution)#get solution for each features (to allow alpha wolf to take decision)
fitness = []
for i in range(len(iterations)):
fitness.append(round(iterations[i] / np.sum(iterations), 8)) #calculate fitness values
optimal_features = np.sort(fitness)[::-1] #list of best optimal features
selected_features = np.zeros(len(fitness)) #wolf or selected features population
for i in range(0,X.shape[1]):
for j in range(len(fitness)):
if optimal_features[i] > fitness[j]:
selected_features[j] = 1
return selected_features
#function to optimize features with crow search
def crowSearch(X, Y):
selected = []
fitness = 0
for i in range(2,12): #loop each features
features = X[:,0:i]
X_train, X_test, y_train, y_test = train_test_split(features, Y, test_size=0.2)
classifier = linear_model.LogisticRegression(max_iter=1000) #train the classifier
classifier.fit(X_train, y_train)
acc = accuracy_score(classifier.predict(X_test), y_test)#calculate accuracy as fitness
print(str(acc)+" "+str(i))
if acc > fitness: #if fitness high then select features else ignore it
fitness = acc
selected = i
return selected
def uploadDataset():
global filename
text.delete('1.0', END)
filename = askopenfilename(initialdir = "Dataset")
pathlabel.config(text=filename)
text.insert(END,"Dataset loaded\n\n")
def processDataset():
global filename, dataset, X, Y, Y1
text.delete('1.0', END)
le = LabelEncoder()
dataset = pd.read_csv(filename)
dataset.fillna(0, inplace = True)
text.insert(END,str(dataset.head()))
dataset['Situation'] = pd.Series(le.fit_transform(dataset['Situation'].astype(str)))
dataset = dataset.values
X = dataset[:,1:dataset.shape[1]-1]
Y = dataset[:,dataset.shape[1]-1]
Y1 = dataset[:,dataset.shape[1]-2]
Y = Y.astype('int')
def ANNwithCrow():
global filename, X, Y, Y1, mse
mse = []
text.delete('1.0', END)
text.insert(END,"Total features found in dataset before applying Crow Search GA : "+str(X.shape[1])+"\n")
#call crow search and get selected features
crow_search_features = crowSearch(X, Y)
X1 = X[:,0:crow_search_features]
#now define genetic algorithm object
estimator = linear_model.LogisticRegression(solver="liblinear", multi_class="ovr") #
selector = GeneticSelectionCV(estimator, cv=5, verbose=1, scoring="accuracy", max_features=5, n_population=5, crossover_proba=0.5, mutation_proba=0.2,
n_generations=5, crossover_independent_proba=0.5, mutation_independent_proba=0.05, tournament_size=3, n_gen_no_change=2,
caching=True, n_jobs=-1)
selector = selector.fit(X1, Y)#OPTIMIZING CRow FEATURES WITH GENETIC ALGORITHM and then select tnem
print(selector.support_)
X_selected_features = X1[:,selector.support_==True] #take selected features
print(X_selected_features.shape)
text.insert(END,"Total features found in dataset after applying Crow Search GA : "+str(X_selected_features.shape[1])+"\n")
#now split selected features into train and test
X_train, X_test, y_train, y_test = train_test_split(X_selected_features, Y1, test_size=0.2)
#now build ann model with different layers
ann_model = Sequential()
ann_model.add(Dense(512, input_shape=(X_train.shape[1],)))
ann_model.add(Activation('relu'))
ann_model.add(Dropout(0.3))
ann_model.add(Dense(512))
ann_model.add(Activation('relu'))
ann_model.add(Dropout(0.3))
ann_model.add(Dense(1))
ann_model.compile(optimizer="adam", loss='mse', metrics=['mae']) #compile the model
hist = ann_model.fit(X_train, y_train, batch_size=16,epochs=100, validation_data=(X_test, y_test))#train the model on train data and test on test data
predict = ann_model.predict(X_test)#perform prediction on test data
error = mean_squared_error(predict, y_test)#calculatee MSE
mse.append(error)
text.insert(END,"ANN with Crow Search MSE : "+str(error)+"\n\n")
output = '<table border=1 align=center>'
output+= '<tr><th>Algorithm Name</th><th>Test Data Water Level</th><th>Predicted Water Level</th></tr>'
for i in range(len(predict)):
output+='<tr><td>ANN with Crow Search GA</td><td>'+str(y_test[i])+'</td><td>'+str(predict[i])+"</td></tr>"
output+='</table></body></html>'
f = open("output.html", "w")
f.write(output)
f.close()
webbrowser.open("output.html",new=1)
plt.plot(y_test, color = 'red', label = 'Available Test Data Water Level')
plt.plot(predict, color = 'green', label = 'Predicted Water Level')
plt.title('ANN with Crow Search Water Level Prediction')
plt.xlabel('Test Data Values')
plt.ylabel('Water Level Prediction')
plt.legend()
plt.show()
def ANNwithWolf():
global filename, X, Y, Y1, mse
text.insert(END,"Total features found in dataset before applying Gray Wolf GA : "+str(X.shape[1])+"\n")
gray_wolf_features = grayWolf(X, Y)
X1 = X[:,gray_wolf_features==1]
estimator = linear_model.LogisticRegression(solver="liblinear", multi_class="ovr") #
selector = GeneticSelectionCV(estimator, cv=5, verbose=1, scoring="accuracy", max_features=5, n_population=5, crossover_proba=0.5, mutation_proba=0.2,
n_generations=5, crossover_independent_proba=0.5, mutation_independent_proba=0.05, tournament_size=3, n_gen_no_change=2,
caching=True, n_jobs=-1)
selector = selector.fit(X1, Y)#OPTIMIZING FEATURES WITH GENETIC ALGORITHM OBJECT SELECTOR
print(selector.support_)
X_selected_features = X1[:,selector.support_==True]
print(X_selected_features.shape)
text.insert(END,"Total features found in dataset after applying Gray Wolf GA : "+str(X_selected_features.shape[1])+"\n")
X_train, X_test, y_train, y_test = train_test_split(X_selected_features, Y1, test_size=0.2)
ann_model = Sequential()
ann_model.add(Dense(512, input_shape=(X_train.shape[1],)))
ann_model.add(Activation('relu'))
ann_model.add(Dropout(0.3))
ann_model.add(Dense(512))
ann_model.add(Activation('relu'))
ann_model.add(Dropout(0.3))
ann_model.add(Dense(1))
ann_model.compile(optimizer="adam", loss='mse', metrics=['mae'])
hist = ann_model.fit(X_train, y_train, batch_size=16,epochs=100, validation_data=(X_test, y_test))
predict = ann_model.predict(X_test)
error = mean_squared_error(predict, y_test)
mse.append(error)
text.insert(END,"ANN with Gray Wolf MSE : "+str(error)+"\n\n")
output = '<table border=1 align=center>'
output+= '<tr><th>Algorithm Name</th><th>Test Data Water Level</th><th>Predicted Water Level</th></tr>'
for i in range(len(predict)):
output+='<tr><td>ANN with Grey Wolf GA</td><td>'+str(y_test[i])+'</td><td>'+str(predict[i])+"</td></tr>"
output+='</table></body></html>'
f = open("output.html", "w")
f.write(output)
f.close()
webbrowser.open("output.html",new=1)
plt.plot(y_test, color = 'red', label = 'Available Test Data Water Level')
plt.plot(predict, color = 'green', label = 'Predicted Water Level')
plt.title('ANN with Gray Wolf GA Water Level Prediction')
plt.xlabel('Test Data Values')
plt.ylabel('Water Level Prediction')
plt.legend()
plt.show()
def graph():
height = mse
bars = ('ANN with Crow Search GA', 'ANN with Gray Wolf GA')
y_pos = np.arange(len(bars))
plt.bar(y_pos, height)
plt.xticks(y_pos, bars)
plt.title("ANN MSE Comparison Between Crow Search & Gray Wolf")
plt.xlabel("Algorithm Names")
plt.title("MSE ERROR")
plt.show()
def close():
main.destroy()
def GUI():
global main, text, pathlabel
font = ('times', 16, 'bold')
title = Label(main, text='Groundwater Level Prediction Using Hybrid Artificial Neural Network with Genetic Algorithm')
title.config(bg='brown', fg='white')
title.config(font=font)
title.config(height=3, width=120)
title.place(x=0,y=5)
font1 = ('times', 13, 'bold')
upload = Button(main, text="Upload Ground Water Level Dataset", command=uploadDataset)
upload.place(x=50,y=100)
upload.config(font=font1)
pathlabel = Label(main)
pathlabel.config(bg='brown', fg='white')
pathlabel.config(font=font1)
pathlabel.place(x=400,y=100)
preprocess = Button(main, text="Preprocess Dataset", command=processDataset)
preprocess.place(x=50,y=150)
preprocess.config(font=font1)
anncrow = Button(main, text="Run ANN with Crow Search GA", command=ANNwithCrow)
anncrow.place(x=300,y=150)
anncrow.config(font=font1)
annwolf = Button(main, text="Run ANN with Gray Wolf GA", command=ANNwithWolf)
annwolf.place(x=600,y=150)
annwolf.config(font=font1)
graphButton = Button(main, text="MSE Comparison Graph", command=graph)
graphButton.place(x=50,y=200)
graphButton.config(font=font1)
exitButton = Button(main, text="Exit", command=close)
exitButton.place(x=300,y=200)
exitButton.config(font=font1)
font1 = ('times', 12, 'bold')
text=Text(main,height=30,width=150)
scroll=Scrollbar(text)
text.configure(yscrollcommand=scroll.set)
text.place(x=10,y=250)
text.config(font=font1)
main.config(bg='brown')
main.mainloop()
if __name__ == "__main__":
GUI()
| vinay-kumar-uppala/Major-Project-D9 | WaterLevelPrediction.py | WaterLevelPrediction.py | py | 11,635 | python | en | code | 0 | github-code | 13 |
7733715166 | import npyscreen
import curses
from phonebook.extra import notifications
class RecordsList(npyscreen.GridColTitles):
def __init__(self, *args, **keywords):
super(RecordsList, self).__init__(*args, **keywords)
self.add_handlers({
curses.KEY_RIGHT: self.h_exit_right,
"d": self.delete_record_listener,
"e": self.edit_record_listener,
"c": self.restore_list_listener,
"l": self.stub,
})
self.values = []
self.columns = 4
self.col_titles = ['Name', 'Surname', 'Phone', 'Birthday']
self.select_whole_line = True
def delete_record_listener(self, *args, **keywords):
if self.values:
selected = self.selected_row()
answer = notifications.spawn_notify_confirmation("DELETE THE RECORD")
if answer:
self.find_parent_app().book.delete_record(name=selected[0], surname=selected[1])
self.update_list()
def edit_record_listener(self, *args, **keywords):
if self.values:
selected = self.selected_row()
unique_key = [selected[0], selected[1]]
self.find_parent_app().getForm('RECORDEDITOR').value = unique_key
self.find_parent_app().switchForm('RECORDEDITOR')
def restore_list_listener(self, *args, **keyword):
self.update_list()
def stub(self, *args, **keywords):
pass
def update_list(self):
self.values = self.find_parent_app().book.get_all_records()
self.display()
class RecordsBox(npyscreen.BoxTitle):
_contained_widget = RecordsList
def __init__(self, *args, **kwargs):
super(RecordsBox, self).__init__(*args, **kwargs)
self.add_handlers({
curses.KEY_RIGHT: self.h_exit_right,
})
self.name = "Records"
self.action_type = ''
def update_list(self, *args, **keywords):
if self.action_type != 'Search':
self.values = self.find_parent_app().book.get_all_records()
else:
self.action_type = ''
self.display()
| fiskirton/Phone-book | phonebook/gui/widgets/records_list_widget.py | records_list_widget.py | py | 2,117 | python | en | code | 0 | github-code | 13 |
13054933290 | import numpy as np
# Replace this with your actual dataset
observations = [2.5, 3.0, 2.7, 3.2, 2.8, 3.5, 3.1, 2.9]
# Calculate the sample mean (μ_hat)
sample_mean = np.mean(observations)
# Calculate the sample standard deviation (σ_hat)
sample_stddev = np.std(observations)
print(f"Estimated Mean (μ_hat): {sample_mean:.4f}")
print(f"Estimated Standard Deviation (σ_hat): {sample_stddev:.4f}")
| Shivkisku/data_science_problems | GaussianDistributionEstimation.py | GaussianDistributionEstimation.py | py | 402 | python | en | code | 0 | github-code | 13 |
21635325904 | import speech_recognition as sr
from chatterbot import ChatBot
from chatterbot.training.trainers import ListTrainer
# Create a new instance of a ChatBot
bot = ChatBot("Terminal",
storage_adapter="chatterbot.adapters.storage.JsonDatabaseAdapter",
logic_adapters=[
"chatterbot.adapters.logic.MathematicalEvaluation",
"chatterbot.adapters.logic.TimeLogicAdapter",
"chatterbot.adapters.logic.ClosestMatchAdapter"
],
input_adapter="chatterbot.adapters.input.VariableInputTypeAdapter",
output_adapter="chatterbot.adapters.output.TerminalAdapter",
database="../database.db"
)
bot.set_trainer(ListTrainer)
bot.train([
"hello",
"hi",
"how are you",
"i am fine",
"that is good to hear",
"thank you",
"you are welcome",
"sorry",
"its okay",
"what is your name",
"my name is HURO",
])
print("Type something to begin...")
# The following loop will execute each time the user enters input
while True:
try:
# obtain audio from the microphone
r = sr.Recognizer()
with sr.Microphone() as source:
print("\nSay something!")
r.dynamic_energy_threshold=True
#r.energy_threshold = 200
r.adjust_for_ambient_noise(source, 0.5) # listen for 0.5 second to calibrate the energy threshold for ambient noise levels
r.pause_threshold = 0.5
r.dynamic_energy_adjustment_ratio = 2.5 # speech is louder than ambedient noise by a factor of 2.5
audio = r.listen(source)
print(r.recognize_sphinx(audio))
# We pass None to this method because the parameter
# is not used by the TerminalAdapter
bot_input = bot.get_response(r.recognize_sphinx(audio))
print("\n")
# Press ctrl-c or ctrl-d on the keyboard to exit
except (KeyboardInterrupt, EOFError, SystemExit):
break
| RoboticsClubIITK/2016_HuRo | Speech_Engine/Speak_n_chat.py | Speak_n_chat.py | py | 1,887 | python | en | code | 2 | github-code | 13 |
42786879104 | import tkinter as tk
from tkinter import ttk
import main
def button_clicked():
main.swarmSize = swarm_size.get()
main.velocityMultiplier = velocity_multiplier.get()
main.maxNumberOfIterations = iterations.get()
main.c1 = c1.get()
main.c2 = c2.get()
main.w = w.get()
main.pickedFunction = function.get()
main.mainPSO()
# creating window
root = tk.Tk()
root.title("PSO")
root.geometry('720x720')
# Var Defs
swarm_size = tk.IntVar()
velocity_multiplier = tk.DoubleVar()
iterations = tk.IntVar()
function = tk.StringVar()
c1 = tk.IntVar()
c2 = tk.IntVar()
w = tk.IntVar()
# GUI
label0 = ttk.Label(root, text="Hello there!")
label1 = ttk.Label(root, text="Maksymalna liczba iteracji")
iterations_entry = ttk.Entry(root, textvariable=iterations, width=10)
label2 = ttk.Label(root, text="Liczba cząsteczek")
swarm_entry = ttk.Entry(root, textvariable=swarm_size, width=10)
label3 = ttk.Label(root, text="Współczynnik dążenia do najlepszego lokalnego rozwiązania: ")
c1_entry = ttk.Entry(root, textvariable=c1, width=10)
label4 = ttk.Label(root, text="Współczynnik dążenia do najlepszego globalnego rozwiązania: ")
c2_entry = ttk.Entry(root, textvariable=c2, width=10)
label5 = ttk.Label(root, text="współczynnik bezwładności, określa wpływ prędkości w poprzednim kroku")
w_entry = ttk.Entry(root, textvariable=w, width=10)
label6 = ttk.Label(root, text="Mnożnik prędkości")
velocity_entry = ttk.Entry(root, textvariable=velocity_multiplier, width=10)
label7 = ttk.Label(root, text="Funkcja")
function_entry = ttk.Combobox(root, width=25, textvariable=function)
function_entry['value'] = ('Funkcja Rastringa',
'Funkcja Stołu Holdera',
'Funkcja Eggholder')
function_entry.current()
button = ttk.Button(root, text="START", command=button_clicked).grid(row=10, column=0)
label0.grid(row=0, column=0)
label1.grid(row=1, column=0)
iterations_entry.grid(row=1, column=1)
label2.grid(row=2, column=0)
swarm_entry.grid(row=2, column=1)
label3.grid(row=3, column=0)
c1_entry.grid(row=3, column=1)
label4.grid(row=4, column=0)
c2_entry.grid(row=4, column=1)
label5.grid(row=5, column=0)
w_entry.grid(row=5, column=1)
label6.grid(row=6, column=0)
velocity_entry.grid(row=6, column=1)
label7.grid(row=7, column=0)
function_entry.grid(row=7, column=1)
root.mainloop()
| FilGor/Python-ParticleSwarmOptimization | GUI.py | GUI.py | py | 2,378 | python | pl | code | 0 | github-code | 13 |
32952389742 | from glados import Module
from PIL import ImageFont, Image, ImageDraw
from os.path import join, dirname, realpath, exists
from os import makedirs
class Trumpify(Module):
left_margin = 56
right_margin = 68
font_size = 26
font_pad = 2
def __init__(self, server_instance, full_name):
super(Trumpify, self).__init__(server_instance, full_name)
self.cache_dir = join(self.local_data_dir, 'trumpify')
if not exists(self.cache_dir):
makedirs(self.cache_dir)
@Module.command('trumpify', '<user or text>', 'If user, converts their last message into a trump tweet. If text, '
'converts the text into a trump tweet.')
async def trumpify(self, message, content):
members, roles, error = self.parse_members_roles(message, content, membercount=1, rolecount=0)
if error or len(members) == 0:
text = content
else:
text = self.get_member_text(members[0])
if not text:
text = content
file_name = join(self.local_data_dir, 'trumpify', message.author.id + '.png')
self.generate_tweet(text, file_name)
await self.client.send_file(message.channel, file_name)
def get_member_text(self, member):
for msg in reversed(self.client.messages):
if msg.author == member:
return msg.content
return None
def generate_tweet(self, text, output_file_name):
this_path = dirname(realpath(__file__))
# Load header and footer images
header_file = join(this_path, 'trump-tweet-header.png')
footer_file = join(this_path, 'trump-tweet-footer.png')
header = Image.open(header_file, 'r')
footer = Image.open(footer_file, 'r')
# Create the background image and render the tweet text into the middle (making space for header and footer)
font = ImageFont.truetype(join(this_path, 'DejaVuSerif.ttf'), self.font_size)
lines = self.wrap_text(text, font, header.size[0])
canvas_width = header.size[0]
canvas_height = header.size[1] + footer.size[1] + len(lines) * (self.font_size + self.font_pad * 2)
canvas = Image.new('RGB', (canvas_width, canvas_height), (255, 255, 255))
draw = ImageDraw.Draw(canvas)
for i, line in enumerate(lines):
draw.text((self.left_margin, header.size[1] + i*30), line, (0, 0, 0), font=font)
# Add footer and header
canvas.paste(header, (0, 0))
canvas.paste(footer, (0, canvas_height - footer.size[1]))
canvas.save(output_file_name)
def wrap_text(self, text, font, img_width):
max_width = img_width - self.left_margin - self.right_margin
lines = list()
buffer = ''
for word in text.split():
if font.getsize(buffer + word)[0] < max_width:
buffer += ' ' + word
else:
lines.append(buffer.strip())
buffer = word
if buffer:
lines.append(buffer.strip())
return lines
| TheComet/GLaDOS2 | modules/general/trumpify.py | trumpify.py | py | 3,079 | python | en | code | 4 | github-code | 13 |
27731721322 | import sys
def solution(n,arr):
arr=sorted(arr,key=lambda x:(x[1],x[0]))
end=arr[0][1]
count=1
for i in range(1,n):
if end<=arr[i][0]:
count=count+1
end=arr[i][1]
print(count)
return count
if __name__=="__main__":
n=int(sys.stdin.readline())
arr=[]
for i in range(n):
arr.append(tuple(map(int,sys.stdin.readline().split())))
solution(n,arr) | Wolfsil/CodingTestComplete | python/난이도 어려움/회의실 배정.py | 회의실 배정.py | py | 454 | python | en | code | 0 | github-code | 13 |
10734445138 | class Solution:
def majorityElement(self, nums):
limit=len(nums)/3
counters=[0,0]
cands=[None,None]
# First pass to find the two possible candidates.
for elem in nums:
if elem==cands[0]:
counters[0]+=1
elif elem==cands[1]:
counters[1]+=1
elif counters[0]==0:
counters[0]+=1
cands[0]=elem
elif counters[1]==0:
counters[1]+=1
cands[1]=elem
elif elem==cands[1]:
counters[1]+=1
else:
counters[0]-=1
counters[1]-=1
# Second pass to make sure that both candidates occur more than n/3 times.
ans=[]
for cand in cands:
if nums.count(cand)>limit:
ans.append(cand)
return ans
| Therealchainman/LeetCode | problems/majority_element_ii/solution.py | solution.py | py | 896 | python | en | code | 0 | github-code | 13 |
15356158431 | import numpy as np
import torch.nn as nn
from config import *
class REINFORCE(nn.Module):
def __init__(self, no_states, no_actions):
super(REINFORCE, self).__init__()
self.no_states = no_states
self.no_actions = no_actions
self.net = nn.Sequential(
nn.Linear(no_states, 128),
nn.ReLU(),
nn.Linear(128, no_actions),
nn.Softmax()
)
def forward(self, state):
policy = self.net(state)
return policy
@classmethod
def train_model(cls, net, transitions, optimizer):
states, actions, rewards, dones = transitions.state, transitions.action, transitions.reward, transitions.done
states = torch.stack(states).to(device)
actions = torch.stack(actions).to(device)
rewards = torch.tensor(rewards).to(device)
dones = torch.tensor(dones).to(device)
returns = torch.zeros_like(rewards)
running_return = 0
# noinspection PyTypeChecker
for t in reversed(range(len(rewards))):
# noinspection PyTypeChecker
running_return = rewards[t] + gamma * running_return * ~dones[t]
returns[t] = running_return
policies = net(states)
policies = policies.view(-1, net.no_actions)
log_policies = (torch.log(policies) * actions.detach()).sum(dim=1)
loss = (-log_policies * returns).sum()
optimizer.zero_grad()
loss.backward()
optimizer.step()
return loss
def get_action(self, state):
policy = self.forward(state)
policy = policy.squeeze().cpu().detach().numpy()
action = np.random.choice(self.no_actions, 1, p=policy)[0]
return action
| Syzygianinfern0/Stable-Baselines | Policy Gradients/1. REINFORCE/model.py | model.py | py | 1,744 | python | en | code | 0 | github-code | 13 |
22787858901 | class Solution(object):
def massage(self, nums):
length=len(nums)
if length==0:
return 0
if length<=2:
return max(nums)
result=[nums[0],max(nums[:2])]
for i in range(2,length):
temp=max(result[i-2]+nums[i],result[i-1])
result.append(temp)
return result[-1]
| lmb633/leetcode | 17.16massage.py | 17.16massage.py | py | 369 | python | en | code | 0 | github-code | 13 |
73485228497 | # A noob programmer was given two simple tasks: sum and sort the elements of the given array
# a = [a1, a2, ..., an]. He started with summing and did it easily, but decided to store the sum he found in some random position of the original array which
# was a bad idea. Now he needs to cope with the second task, sorting the original array a, and it's giving him trouble since he modified it.
# Given the array shuffled, consisting of elements a1, a2, ..., an, a1 + a2 + ... + an in random order, return the sorted array of original elements a1, a2,
# ..., an.
# Example
# For shuffled = [1, 12, 3, 6, 2], the output should be
# solution(shuffled) = [1, 2, 3, 6].
# 1 + 3 + 6 + 2 = 12, which means that 1, 3, 6 and 2 are original elements of the array.
# For shuffled = [1, -3, -5, 7, 2], the output should be
# solution(shuffled) = [-5, -3, 2, 7].
def solution(shuffled):
# let's first sort the shuffled
shuffled.sort()
# now we find out which one of the values given in shuffled is the sum of the others
for i in range(len(shuffled)):
potential_sum = shuffled[i]
sum = 0
for j in range(len(shuffled)):
if i == j:
continue
sum += shuffled[j]
if potential_sum == sum:
shuffled.remove(potential_sum)
return shuffled
| aslamovamir/codeSignalPractice | shuffled_array.py | shuffled_array.py | py | 1,358 | python | en | code | 0 | github-code | 13 |
21836796146 | import os.path
import openpyxl
def ticket_saver(theme, sender, send_time,path):
my_path = f"/Users/sevak/PycharmProjects/VK_bot/{path}.xlsx"
if os.path.isfile(my_path):
wb = openpyxl.load_workbook(my_path)
wb.active = 0
sheet = wb.active
else:
wb = openpyxl.Workbook()
wb.active = 0
sheet = wb.active
sheet['A1'] = 'Тема тикета'
sheet['B1'] = 'От кого'
sheet['C1'] = 'Когда'
a = 2
while True:
if sheet['A' + str(a)].value == None:
sheet['A' + str(a)].value = theme
sheet['B' + str(a)].value = sender
sheet['C' + str(a)].value = send_time
break
else:
a += 1
wb.save(my_path)
wb.close()
| PEBU3OP1/VK_bot | VK_bot/excel.py | excel.py | py | 788 | python | en | code | 0 | github-code | 13 |
25698509032 | #KNN Surprise from kaggle
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
from sklearn.model_selection import train_test_split as train_test_split_sklearn
import surprise
from surprise.model_selection.split import train_test_split
from surprise.prediction_algorithms.knns import KNNBasic
from surprise import accuracy
import matplotlib.pyplot as plt
# Input data files are available in the "../input/" directory.
# For example, running this (by clicking run or pressing Shift+Enter) will list the files in the input directory
import os
FOLDER = '/home/nick/Desktop/thesis/datasets/retail-rocket/'
events = pd.read_csv(FOLDER + 'events.csv')
category_tree = pd.read_csv(FOLDER + 'category_tree.csv')
item_properties_part1 = pd.read_csv(FOLDER + 'item_properties_part1.csv')
item_properties_part2 = pd.read_csv(FOLDER + 'item_properties_part2.csv')
item_properties_part = pd.concat([item_properties_part1, item_properties_part2])
print(events.head())
print(category_tree.head())
print(item_properties_part.head())
data = events[['visitorid','event','itemid']]
info_event_events = events.groupby(by=['event']).size()
print(info_event_events)
print(data.head())
transfrom_rating = []
# view = 1, addtocart = 2, transaction = 3
def transfrom_data(data_raw):
data = data_raw.copy()
for event in data.event:
if(event == 'view'):
transfrom_rating.append(1)
if(event == 'addtocart'):
transfrom_rating.append(2)
if(event == 'transaction'):
transfrom_rating.append(3)
data['rating']= transfrom_rating
return data[['visitorid','itemid','rating']]
data_surprise = transfrom_data(data)
print(data_surprise.head())
data_view = data_surprise[data_surprise['rating']==1].reset_index(drop= True)
data_transaction = data_surprise[data_surprise['rating']==2].reset_index(drop= True)
data_addtocard = data_surprise[data_surprise['rating']==3].reset_index(drop= True)
data_view_train, data_view_test = train_test_split_sklearn(data_view, test_size= 0.008)
data_transaction_train, data_transaction_test = train_test_split_sklearn(data_transaction, test_size= 0.33)
data_tuning = pd.concat([data_addtocard, data_view_test, data_transaction_test]).sort_values(by = 'rating').reset_index(drop=True)
print("The number item view ", data_tuning[data_tuning['rating']==1].shape[0])
print("The number item tranaction ", data_tuning[data_tuning['rating']==2].shape[0])
print("The number item addtacard ", data_tuning[data_tuning['rating']==3].shape[0])
print(data_tuning.head())
reader = surprise.Reader(rating_scale=(1, 3))
data = surprise.Dataset.load_from_df(data_tuning, reader)
type(data)
trainset, testset = train_test_split(data, test_size=0.25)
type(trainset)
sim_options = {'name': 'cosine',
'user_based': False
}
algo_knn_basic = KNNBasic(sim_options=sim_options)
predictions = algo_knn_basic.fit(trainset).test(testset)
result = pd.DataFrame(predictions, columns=['visitor_id', 'item_id', 'base_event', 'predict_event', 'details'])
result.drop(columns = {'details'}, inplace = True)
result['error'] = abs(result['base_event'] - result['predict_event'])
print(result.head())
result['predict_event'].hist(bins= 100, figsize= (20,10))
result[result['base_event']== 1]['predict_event'].hist(bins= 100, figsize= (20,10))
result[result['base_event']== 2]['predict_event'].hist(bins= 100, figsize= (20,10))
mae_model = accuracy.mae(predictions)
rmse_model = accuracy.rmse(predictions)
print(mae_model,rmse_model)
| mindis/thesis | retail-rocket /knnbasic.py | knnbasic.py | py | 3,575 | python | en | code | 0 | github-code | 13 |
6148864096 | import settings
import uasyncio as asyncio
from primitives.pushbutton import Pushbutton
from homie.constants import FALSE, TRUE, BOOLEAN
from homie.device import HomieDevice
from homie.node import HomieNode
from homie.property import HomieProperty
from machine import Pin
def reset(led):
import machine
wdt = machine.WDT()
wdt.feed()
led(0)
machine.reset()
class SmartSocket(HomieNode):
def __init__(self):
super().__init__(
id="relay", name="Wifi Power Socket", type="OW8266-02Q"
)
self.led = Pin(4, Pin.OUT, value=1)
self.r_on = Pin(12, Pin.OUT)
self.r_off = Pin(5, Pin.OUT)
self.p_power = HomieProperty(
id="power",
name="Relay",
settable=True,
retained=True,
datatype=BOOLEAN,
default=FALSE,
restore=True,
on_message=self.on_power_msg,
)
self.add_property(self.p_power)
self.button = Pushbutton(Pin(14, Pin.IN, Pin.PULL_UP))
self.button.release_func(self.toggle, ())
self.button.long_func(reset, (self.led,))
async def off(self):
self.r_off(0)
await asyncio.sleep_ms(100)
self.r_on(1)
async def on(self):
self.r_on(0)
await asyncio.sleep_ms(100)
self.r_off(1)
def on_power_msg(self, topic, payload, retained):
if payload == FALSE:
self.off()
elif payload == TRUE:
self.on()
async def toggle(self):
if self.p_power.value == TRUE:
await self.off()
self.p_power.value = FALSE
else:
await self.on()
self.p_power.value = TRUE
def main():
homie = HomieDevice(settings)
homie.add_node(SmartSocket())
homie.run_forever()
if __name__ == "__main__":
main()
| microhomie/microhomie | examples/obi-socket/main.py | main.py | py | 1,871 | python | en | code | 78 | github-code | 13 |
21000711676 | import numpy as np
import random
values = [3.0, 4.0, 1.0, 2.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0]
def get_percentile(values, N):
k =[0.0]
for i in range(1, N):
s = np.percentile(values, round(i*100/N))
k.append(s)
return k
def get_percentile_number(N):
percentiles = get_percentile(values, 4)
for i in range(len(percentiles)):
if N >= percentiles[i] and N < percentiles[i + 1]:
return i
break
elif N <= min(percentiles):
return 0
break
elif N >= max(percentiles):
return len(percentiles) - 1
break
def values_equalization(values, add_random = True):
percentiles = get_percentile(values, 4)
print(percentiles)
step = 1 / len(percentiles)
new = []
for i in range(len(values)):
idx = get_percentile_number(values[i])
if add_random == True:
random_noise = random.random() * step
new_value = idx * step + random_noise
new.append(new_value)
else:
new_value = idx * step
new.append(new_value)
print(new)
values_equalization(values, add_random=True)
| somasan/projectone | six/six35.py | six35.py | py | 1,179 | python | en | code | 0 | github-code | 13 |
22943010034 | import env_examples # Modifies path, DO NOT REMOVE
from sympy import Symbol
import numpy as np
from src import Circuit, CoordinateSystem, VoltageSource, Wire, World
if __name__ == "__main__":
WORLD_SHAPE = (101, 101)
BATTERY_VOLTAGE = 1.0
HIGH_WIRE_RESISTANCE = 1.0
LOW_WIRE_RESISTANCE = 0.01
cartesian_variables = Symbol("x"), Symbol("y")
x, y = cartesian_variables
x_expression_vertical = 1*x
y_expression_vertical = 1*y
vertical_eqs = (x_expression_vertical, y_expression_vertical)
x_expression_horizontal = 1*x
y_expression_horizontal = 1*y
horizontal_eqs = (x_expression_horizontal, y_expression_horizontal)
#Considérons les deux rayons du circuit D, soit le petit rayon de 20 et le grand rayon de 30. L'angle du rayon le plus haut est de
# 60 degrés donc pi/3 et l'angle le plus bas est de 15 degrés donc pi/12. Cependant, puisque que le programme considère
# seulement les angles entre 0 et pi/4, on devra diviser les angles par 4. Dans un premier temps, on peu créer
#les fils qui ne requierts pas d'angle (qui ne sont pas en forme d'arc de cercle) et ensuite, à l'aide d'une boucle for, on peut faire une multitude de petits déplacements
#qui donneront une forme circulaire pour les deux boucles.
wires = [
VoltageSource((20*np.cos(np.pi/24), 20*np.sin(np.pi/24)), (20*np.cos(7*np.pi/144), (20*np.sin(7*np.pi/144))), horizontal_eqs, cartesian_variables, BATTERY_VOLTAGE),
Wire((30*np.cos(np.pi/24), 30*np.sin(np.pi/24)), (30*np.cos(7*np.pi/144), 30*np.sin(7*np.pi/144)), horizontal_eqs, cartesian_variables, HIGH_WIRE_RESISTANCE),
Wire((20*np.cos(np.pi/12), 20*np.sin(np.pi/12)), (30*np.cos(np.pi/12), 30*np.sin(np.pi/12)), horizontal_eqs, cartesian_variables, LOW_WIRE_RESISTANCE),
Wire((30*np.cos(np.pi/48), 30*np.sin(np.pi/48)), (20*np.cos(np.pi/48), 20*np.sin(np.pi/48)), horizontal_eqs, cartesian_variables, LOW_WIRE_RESISTANCE),
]
wires1 = []
#On peut ensuite faire la forloop pour la partie du bas (avant la source et les résistances):
#wires2 = []
for i in range(3):
wires1.append(Wire((30*np.cos(np.pi/48+np.pi*i/144), 30*np.sin(np.pi/48+np.pi*i/144)),(30*np.cos(np.pi/48+np.pi*(i+1)/144), 30*np.sin(np.pi/48+np.pi*(i+1)/144)), horizontal_eqs, cartesian_variables, LOW_WIRE_RESISTANCE))
wires.append(Wire((20*np.cos(np.pi/48+np.pi*i/144), 20*np.sin(np.pi/48+np.pi*i/144)),(20*np.cos(np.pi/48+np.pi*(i+1)/144), 20*np.sin(np.pi/48+np.pi*(i+1)/144)), horizontal_eqs, cartesian_variables, LOW_WIRE_RESISTANCE))
#On fait la boucle pour la partie du haut après la source et les résistances
for j in range(5):
wires1.append(Wire((30*np.cos(7*np.pi/144+np.pi*j/144), 30*np.sin(7*np.pi/144+np.pi*j/144)),(30*np.cos(7*np.pi/144+np.pi*(j+1)/144), 30*np.sin(7*np.pi/144+np.pi*(j+1)/144)), horizontal_eqs, cartesian_variables, LOW_WIRE_RESISTANCE))
wires.append(Wire((20*np.cos(7*np.pi/144+np.pi*j/144), 20*np.sin(7*np.pi/144+np.pi*j/144)),(20*np.cos(7*np.pi/144+np.pi*(j+1)/144), 20*np.sin(7*np.pi/144+np.pi*(j+1)/144)), horizontal_eqs, cartesian_variables, LOW_WIRE_RESISTANCE))
#w = Wire((30*np.cos(np.pi/48+np.pi*i/144), 30*np.sin(np.pi/48+np.pi*i/144)),(30*np.cos(np.pi/48+np.pi*(i+1)/144), 30*np.sin(np.pi/48+np.pi*(i+1)/144)), horizontal_eqs, cartesian_variables, LOW_WIRE_RESISTANCE)
#print(w._start_position)
ground_position = ((20*np.cos(np.pi/24)), (20*np.sin(np.pi/24)))
count = 0
for w in wires1:
print('start: '+ str(count) +str(w._start_position))
print('__________________')
print('stop: '+ str(count) +str(w._stop_position))
count += 1
s = Wire((30*np.cos(np.pi/24), 30*np.sin(np.pi/24)), (30*np.cos(7*np.pi/144), 30*np.sin(7*np.pi/144)), horizontal_eqs, cartesian_variables, HIGH_WIRE_RESISTANCE)
print('start' + str(s._start_position))
print('stop' + str(s._stop_position))
print('_______________')
a =Wire((20*np.cos(np.pi/12), 20*np.sin(np.pi/12)), (30*np.cos(np.pi/12), 30*np.sin(np.pi/12)), horizontal_eqs, cartesian_variables, LOW_WIRE_RESISTANCE)
b = Wire((30*np.cos(np.pi/48), 30*np.sin(np.pi/48)), (20*np.cos(np.pi/48), 20*np.sin(np.pi/48)), horizontal_eqs, cartesian_variables, LOW_WIRE_RESISTANCE)
print('b start:' + str(b.start_position))
print('b stop:' + str(b.stop_position))
print('a start' + str(a.start_position))
print('a fin' + str(a.stop_position))
c = VoltageSource((20*np.cos(np.pi/24), 20*np.sin(np.pi/24)), (20*np.cos(7*np.pi/144), (20*np.sin(7*np.pi/144))), horizontal_eqs, cartesian_variables, BATTERY_VOLTAGE)
print('source start:' + str(c._start_position))
print('source stop:' + str(c.stop_position))
#circuit = Circuit(wires, ground_position)
#world = World(circuit=circuit, coordinate_system=CoordinateSystem.CARTESIAN, shape=WORLD_SHAPE)
#world.show_circuit({0: ()})
#world.compute()
#world.show_all() | AlexandreBeliveau/Devoir-electromag | examples/CircuitDCartTest.py | CircuitDCartTest.py | py | 4,989 | python | fr | code | 0 | github-code | 13 |
33255582329 | from django.core.management.base import BaseCommand
from dynamic_initial_data.base import InitialDataUpdater
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'--verbose', action='store_true', dest='verbose', default=False,
help='Determines if we should display which apps are being updated'
)
parser.add_argument(
'--app', dest='app', default=None, help='Updates a single app'
)
help = 'Call the InitialData.update_initial_data command for all apps. Use --app to update only one app.'
def handle(self, *args, **options):
updater = InitialDataUpdater(options)
if options['app']:
updater.update_app(options['app'])
else:
updater.update_all_apps()
| ambitioninc/django-dynamic-initial-data | dynamic_initial_data/management/commands/update_initial_data.py | update_initial_data.py | py | 811 | python | en | code | 12 | github-code | 13 |
32125707115 | # -*- coding: utf-8 -*-
"""Module ga4gh.drs.util.method_types.https.py
Contains the HTTPS class, a child of MethodType. HTTPS contains submethods to
download DRS object bytes according to the https url scheme.
"""
from ga4gh.drs.exceptions.drs_exceptions import DownloadSubmethodException
from ga4gh.drs.util.method_types.method_type import DownloadSubmethod
from ga4gh.drs.util.method_types.method_type import MethodType
class HTTPS(MethodType):
"""Download DRS object bytes according to https url scheme
Attributes:
download_submethods (list): multiple methods to attempt byte download
"""
def __init__(self, json, drs_obj):
"""Instantiates an HTTPS object
Arguments:
json (dict): parsed AccessMethod JSON, used to set other attributes
drs_obj (DRSObject): reference to parent DRSObject object
"""
super(HTTPS, self).__init__(json, drs_obj)
self.download_submethods = [
self.__download_by_https
]
@DownloadSubmethod()
def __download_by_https(self, write_config):
"""Download submethod, get object bytes by https
Arguments:
write_config (dict): config to write downloaded file
"""
url = self.access_url.get_url()
self._MethodType__download_by_requests_package(url, write_config)
| ga4gh/ga4gh-drs-client | ga4gh/drs/util/method_types/https.py | https.py | py | 1,358 | python | en | code | 6 | github-code | 13 |
8082172388 | import cv2
import numpy as np
from numpy import ndarray
import torch
import torch.nn.functional as F
WINDOW_NAME: str = '16 x 9 test'
def getWebcamCapture() -> cv2.VideoCapture:
capture = cv2.VideoCapture(0)
capture.set(cv2.CAP_PROP_FRAME_WIDTH, 1920)
capture.set(cv2.CAP_PROP_FRAME_HEIGHT, 1080)
return capture
def get16x9(capture: cv2.VideoCapture) -> ndarray:
return capture.read()[1]
def makeWindow() -> None:
cv2.namedWindow(WINDOW_NAME)
def showImage(img: ndarray):
cv2.imshow(WINDOW_NAME, img)
def scaleImage(img: ndarray, img_size: tuple[int, int]) -> ndarray:
out = F.interpolate(torch.from_numpy(img).cuda().float().permute(2, 1, 0)[None], size=img_size, mode='bilinear')[0].permute(2, 1, 0)
print(out.shape)
out = out.type(torch.uint8)
print(out.shape)
out = out.cpu()
print(out.shape)
out = out.numpy()
print(out.shape)
return out
makeWindow()
cam = getWebcamCapture()
img0 = get16x9(cam)
print(img0.shape)
showImage(img0)
# cv2.waitKey()
img1 = scaleImage(img0, (1920 // 4, 1080 // 4))
showImage(img1)
cv2.waitKey()
| Alethon/GpuInference | rect_processing/utils.py | utils.py | py | 1,104 | python | en | code | 0 | github-code | 13 |
71276692818 | from __future__ import print_function
'''Procedure'''
#1-4 N/A
#5. The integer, long, and float data types can represent six million
#6. The second one: type('tr' + 5), because you cannot concatenate a string and
#integer. Both data types have to be the same if you are concatenating or
#adding them.
#7. slogan[-7] outputs the seventh to last character in the string. It should
#output 'h'
#8. slogan[17:] will slice slogan to output 'best'
#9. slogan[:13] + 'cool' will output My school is cool
#10a. This will output 7 since there are seven characters in the string
#10b. This will output 'theate' since it starts at the beginning since the front
#bound is 0 and ends one character before the actual length of the string
#which is why the string is missing one character.
#11. This returns true since the string 'test goo' is within the string being
#checked which means that the consition has been met.
#12.
def how_eligible(essay):
'''
Parameters: essay -> string
Function creates a local var eligibility and increments it by one every time
a specific condition is met. Used to check the 'eligibility' of the inputted
string
Returns: eligibility -> int value that represents eligibility
'''
eligibility = 0
if '?' in essay:
eligibility += 1
if '!' in essay:
eligibility += 1
if ',' in essay:
eligibility += 1
if '"' in essay:
eligibility += 1
return eligibility
#1.3.5 Function Test
print(how_eligible('This? "Yes." No, not really!'))
print(how_eligible('Really, not a compound sentence.'))
#Reflection
#The function test outputted the expected values which were 4 and 1. Based on
#these results, I believe that I have succesfully completed this assignment. | Anshul2004/pythonPer2_2018-2019 | 1.3.5/Kashyap_1.3.5.py | Kashyap_1.3.5.py | py | 1,796 | python | en | code | 0 | github-code | 13 |
43581349086 | from kivy.app import App
from kivy.lang import Builder
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.label import Label
from kivy.utils import platform
if platform == "android":
from jnius import autoclass
BuildVersion = autoclass('android.os.Build$VERSION')
PythonActivity = autoclass('org.renpy.android.PythonActivity')
activity = PythonActivity.mActivity
SettingsSecure = autoclass('android.provider.Settings$Secure')
ANDROID_ID = SettingsSecure.getString(activity.getContentResolver(),
SettingsSecure.ANDROID_ID)
else:
class BV():
def __init__(self, codename, incremental, release, sdk, sdk_int):
self.CODENAME = codename
self.INCREMENTAL = incremental
self.RELEASE = release
self.SDK = sdk
self.SDK_INT = sdk_int
BuildVersion = BV("Stub", "Wat", "Maybe", "Nope", 0)
ANDROID_ID = "Fake ID"
kv = """
<Test>:
orientation: 'vertical'
Label:
text: "Android Info"
"""
Builder.load_string(kv)
class Test(BoxLayout):
def __init__(self, **kwargs):
super(Test, self).__init__(**kwargs)
self.append_widget("CODENAME", BuildVersion.CODENAME)
self.append_widget("INCREMENTAL", BuildVersion.INCREMENTAL)
self.append_widget("RELEASE", BuildVersion.RELEASE)
self.append_widget("SDK", BuildVersion.SDK)
self.append_widget("SDK_INT", BuildVersion.SDK_INT)
self.append_widget("ANDROID_ID", ANDROID_ID)
def append_widget(self, heading, text):
self.add_widget(Label(text="{}: {}".format(heading, text)))
class TestApp(App):
def build(self):
return Test()
if __name__ == '__main__':
TestApp().run()
| brousch/playground | playground/andverinfo/main.py | main.py | py | 1,763 | python | en | code | 2 | github-code | 13 |
12797406030 | from selenium import webdriver
from selenium.webdriver.common.keys import Keys
# automatically opens chrome and performs google search
def search_google(query):
browser = webdriver.Chrome()
browser.get("https://www.google.com")
search = browser.find_element_by_name("q")
search.send_keys(query)
search.send_keys(Keys.RETURN) | PhelimonSarpaning/AI-Voice-Assistant | google.py | google.py | py | 330 | python | en | code | 0 | github-code | 13 |
7141726869 | import os
from logging import getLogger
from typing import Any, Dict, List, Optional
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.files.storage import default_storage
from django.db import models
from django.db.models import JSONField, Q
from django.utils.translation import gettext_lazy as _
from botocore.exceptions import ClientError
from web3._utils.normalizers import normalize_abi
from web3.contract import Contract
from gnosis.eth.clients import Sourcify
from gnosis.eth.django.models import EthereumAddressField
from gnosis.eth.ethereum_client import EthereumClientProvider, EthereumNetwork
from .clients import EtherscanApi
from .clients.etherscan_api import EtherscanApiConfigurationError
logger = getLogger(__name__)
def get_file_storage():
if settings.AWS_CONFIGURED:
from storages.backends.s3boto3 import S3Boto3Storage
return S3Boto3Storage()
else:
return default_storage
def validate_abi(value: Dict[str, Any]):
try:
if not value:
raise ValueError('Empty ABI not allowed')
normalize_abi(value)
except ValueError as exc:
raise ValidationError(
_('%(value)s is not a valid Ethereum Contract ABI: %(reason)s'),
params={'value': value, 'reason': str(exc)},
)
class ContractAbi(models.Model):
"""
This model holds contract ABIs. Contract ABIS don't have to be tied to a contract
(e.g. generic ERC20/721 ABI)
"""
abi = JSONField(validators=[validate_abi])
description = models.CharField(max_length=200, blank=True)
relevance = models.SmallIntegerField(default=100) # A lower number will indicate more relevance
def __str__(self):
return f'ContractABI {self.relevance} - {self.description}'
def clean(self):
try:
contract_abi = ContractAbi.objects.get(abi=self.abi)
raise ValidationError(_(f'Abi cannot be duplicated. Already exists: '
f'{contract_abi.pk} - {contract_abi.description}'))
except ContractAbi.DoesNotExist:
pass
def abi_functions(self) -> List[str]:
return [x['name'] for x in self.abi if x['type'] == 'function']
def get_contract_logo_path(instance: 'Contract', filename):
# file will be uploaded to MEDIA_ROOT/<address>
_, extension = os.path.splitext(filename)
return f'contracts/logos/{instance.address}{extension}' # extension includes '.'
class ContractManager(models.Manager):
def create_from_address(self, address: str, network_id: int = 1) -> Contract:
sourcify = Sourcify()
contract_metadata = sourcify.get_contract_metadata(address, network_id=network_id)
if contract_metadata:
if contract_metadata.abi:
contract_abi, _ = ContractAbi.objects.update_or_create(abi=contract_metadata.abi,
defaults={
'description': contract_metadata.name,
})
else:
contract_abi = None
return super().create(
address=address,
name=contract_metadata.name,
contract_abi=contract_abi,
)
else: # Fallback to etherscan API (no name for contract)
try:
etherscan = EtherscanApi(EthereumNetwork(network_id), api_key=settings.ETHERSCAN_API_KEY)
abi = etherscan.get_contract_abi(address)
if abi:
try:
contract_abi = ContractAbi.objects.get(abi=abi)
except ContractAbi.DoesNotExist:
contract_abi = ContractAbi.objects.create(abi=abi, description='')
return super().create(
address=address,
name='',
contract_abi=contract_abi,
)
except EtherscanApiConfigurationError:
return
def fix_missing_logos(self) -> int:
"""
Syncs contracts with empty logos with files that exist on S3 and match the address. This usually happens
when logos
:return: Number of synced logos
"""
synced_logos = 0
for contract in self.without_logo():
filename = get_contract_logo_path(contract, f'{contract.address}.png')
contract.logo.name = filename
try:
if contract.logo.size:
synced_logos += 1
contract.save(update_fields=['logo'])
logger.info('Found logo on url %s', contract.logo.url)
except (ClientError, FileNotFoundError): # Depending on aws or filesystem
logger.error('Error retrieving url %s', contract.logo.url)
return synced_logos
class ContractQuerySet(models.QuerySet):
no_logo_query = Q(logo=None) | Q(logo='')
def with_logo(self):
return self.exclude(self.no_logo_query)
def without_logo(self):
return self.filter(self.no_logo_query)
class Contract(models.Model):
objects = ContractManager.from_queryset(ContractQuerySet)()
address = EthereumAddressField(primary_key=True)
name = models.CharField(max_length=200, blank=True, default='')
display_name = models.CharField(max_length=200, blank=True, default='')
logo = models.ImageField(blank=True, default='',
upload_to=get_contract_logo_path, storage=get_file_storage)
contract_abi = models.ForeignKey(ContractAbi, on_delete=models.SET_NULL, null=True, default=None, blank=True,
related_name='contracts')
def __str__(self):
has_abi = self.contract_abi_id is not None
logo = ' with logo' if self.logo else ' without logo'
return f'Contract {self.address} - {self.name} - with abi {has_abi}{logo}'
def get_main_name(self):
"""
:return: `display_name` if available, else use scraped `name`
"""
return self.display_name if self.display_name else self.name
def sync_abi_from_api(self, network: Optional[EthereumNetwork] = None) -> bool:
"""
Sync ABI from EtherScan
:param network: Can be provided to save requests to the node
:return: True if updated, False otherwise
"""
ethereum_client = EthereumClientProvider()
network = network or ethereum_client.get_network()
etherscan_api = EtherscanApi(network)
abi = etherscan_api.get_contract_abi(self.address)
if abi:
contract_abi, _ = ContractAbi.objects.update_or_create(abi=abi)
self.contract_abi = contract_abi
self.save(update_fields=['contract_abi'])
return True
return False
| harmony-one/multisig-transaction-service | safe_transaction_service/contracts/models.py | models.py | py | 7,009 | python | en | code | 4 | github-code | 13 |
36214609232 | '''
Adaptation from https://github.com/ternaus/TernausNetV2
'''
import torch.nn as nn
class ConvRelu(nn.Module):
def __init__(self, in_: int, out: int):
super().__init__()
self.conv = nn.Conv2d(in_, out, 3, padding=1)
self.activation = nn.ReLU(inplace=True)
def forward(self, x):
x = self.conv(x)
x = self.activation(x)
return x
| artyompal/kaggle_salt | code_gazay/salt/src/components/TernausNetV2/conv_relu.py | conv_relu.py | py | 389 | python | en | code | 0 | github-code | 13 |
8840846428 | # populate_product_review.py
import time
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from django.db import transaction
from faker import Faker
from product.enum.review import RateEnum
from product.enum.review import ReviewStatusEnum
from product.models.product import Product
from product.models.review import ProductReview
faker = Faker()
User = get_user_model()
class Command(BaseCommand):
help = "Seed ProductReview model."
def add_arguments(self, parser):
parser.add_argument(
"total_reviews",
type=int,
help="Indicates the number of reviews to be seeded.",
default=1000,
nargs="?",
)
def handle(self, *args, **options):
total_reviews = options["total_reviews"]
total_time = 0
start_time = time.time()
available_languages = [
lang["code"] for lang in settings.PARLER_LANGUAGES[settings.SITE_ID]
]
if total_reviews < 1:
self.stdout.write(
self.style.WARNING("Total number of reviews must be greater than 0.")
)
return
users = list(User.objects.all())
products = list(Product.objects.all())
if not users or not products:
self.stdout.write(
self.style.ERROR(
"Insufficient data. Aborting seeding ProductReview model."
)
)
return
if not available_languages:
self.stdout.write(self.style.ERROR("No languages found."))
return
rate_choices = [choice[0] for choice in RateEnum.choices]
status_choices = [choice[0] for choice in ReviewStatusEnum.choices]
objects_to_insert = []
user_product_review = []
with transaction.atomic():
for _ in range(total_reviews):
user = faker.random_element(users)
product = faker.random_element(products)
rate = faker.random_element(rate_choices)
status = faker.random_element(status_choices)
user_product_pair = (user, product)
existing_review = ProductReview.objects.filter(
user=user, product=product
).exists()
if not existing_review and user_product_pair not in user_product_review:
user_product_review.append(user_product_pair)
review = ProductReview(
product=product,
user=user,
rate=rate,
status=status,
)
objects_to_insert.append(review)
ProductReview.objects.bulk_create(objects_to_insert)
for review in objects_to_insert:
for lang in available_languages:
lang_seed = hash(f"{review.id}{lang}")
faker.seed_instance(lang_seed)
comment = faker.text(max_nb_chars=250)
review.set_current_language(lang)
review.comment = comment
review.save()
end_time = time.time()
execution_time = end_time - start_time
total_time += execution_time
self.stdout.write(
self.style.SUCCESS(
f"{len(objects_to_insert)} ProductReview instances created successfully "
f"in {execution_time:.2f} seconds."
)
)
| vasilistotskas/grooveshop-django-api | core/management/commands/populate_product_review.py | populate_product_review.py | py | 3,600 | python | en | code | 4 | github-code | 13 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.