seq_id stringlengths 4 11 | text stringlengths 113 2.92M | repo_name stringlengths 4 125 ⌀ | sub_path stringlengths 3 214 | file_name stringlengths 3 160 | file_ext stringclasses 18
values | file_size_in_byte int64 113 2.92M | program_lang stringclasses 1
value | lang stringclasses 93
values | doc_type stringclasses 1
value | stars int64 0 179k ⌀ | dataset stringclasses 3
values | pt stringclasses 78
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
3947171528 | from django.http import HttpResponse
# from django.conf import settings
from api.nfl.NflDataScraper import NflDataScraper
from api.nfl.NflScoreBoard import NflScoreBoard
from api.nfl.NflTeams import NflTeams
from api.nfl.NflSchedule import NflSchedule
from api.nfl.NflStanding import NflStanding
from api.nfl.BeautifulTeam import BeautifulTeam
import urllib.parse
import time
import os
import sys
import json
import api
path = os.path.dirname(__file__)
pth = os.path.dirname(api.__file__)
data_path = "/home/shapjvcv/public_html/nflproapi/api/nfl/"
def index(request):
q = request.GET.get('team', 'NE')
uj = str('http://www.nfl.com/teams/profile?team='+ q)
request.session['p_time'] = time.time()
if 'p_time' in request.session:
p_time2 = request.session['p_time']
print(p_time2)
new2 = time.time()
new3 = time.time() + 30
pp = os.path.join(path,"/data/NE5.json","hello/kh4")
return HttpResponse(uj +" |||| "+ str(p_time2) + " Now" + str(new2) + "Now 3 " + str(new3))
def point_table(request):
pt_time = 0
thread = 30
now = time.time()
if 'pt_time' in request.session:
pt_time = request.session['pt_time']
if now > pt_time:
point = 'less 30'
nfl = NflDataScraper()
point = nfl.point_table('http://www.espn.in/nfl/standings', 'glossary__title')
request.session['pt_time'] = time.time() + thread
else:
pth = os.path.dirname(api.__file__)
json2 = pth + "/nfl/data/point-table.json"
with open(json2) as f:
data = json.load(f)
json_pretty = json.dumps(data, sort_keys=True, indent=4)
point = json_pretty
return HttpResponse(point, content_type="application/json")
def schedule(request):
schedule_time = 0
thread = 30
now = time.time()
if 'schedule_time' in request.session:
schedule_time = request.session['schedule_time']
if now > schedule_time:
point = 'less 30'
score_obj = NflSchedule()
urlok = "https://www.nfl.com/standings/division/2018/REG"
point = score_obj.schedule(urlok, 'score-strip-game')
request.session['schedule_time'] = time.time() + thread
else:
pth = os.path.dirname(api.__file__)
json2 = pth + "/nfl/data/schedule.json"
with open(json2) as f:
data = json.load(f)
json_pretty = json.dumps(data, sort_keys=True, indent=4)
point = json_pretty
return HttpResponse(point, content_type="application/json")
def teams(request):
q = request.GET.get('team', 'NE')
url = str('http://www.nfl.com/teams/profile?team='+ q)
pt_team = 0
thread = 30
now = time.time()
if 'pt_team' in request.session:
pt_team = request.session['pt_team']
if now > pt_team:
point = 'less 30'
team = BeautifulTeam()
point = team.run(url)
request.session['pt_team'] = time.time() + thread
else:
pth = os.path.dirname(api.__file__)
json2 = pth + "/nfl/data/"+ q +".json"
with open(json2) as f:
data = json.load(f)
json_pretty = json.dumps(data, sort_keys=True, indent=4)
point = json_pretty
return HttpResponse(point, content_type="application/json")
def score_board(request):
score_obj = NflScoreBoard()
score = score_obj.score_board('https://www.nfl.com/standings/division/2018/REG', 'score-strip-game')
return HttpResponse(score)
def standing(request):
score_obj = NflStanding()
score = score_obj.standing('https://www.nfl.com/standings/division/2018/REG', 'td')
return HttpResponse(score)
| shaponpal6/Python-Django-Selenium-web-scraper | views.py | views.py | py | 3,647 | python | en | code | 1 | github-code | 13 |
33809383581 |
import torch
import copy
import numpy as np
from .base import ParallelCollector
import torch.multiprocessing as mp
import torchrl.policies as policies
class SingleTaskParallelCollectorBase(ParallelCollector):
def __init__(self,
reset_idx = False,
**kwargs):
self.reset_idx = reset_idx
super().__init__(**kwargs)
@staticmethod
def eval_worker_process(shared_pf,
env_info, shared_que, start_barrier, terminate_mark, reset_idx):
pf = copy.deepcopy(shared_pf)
idx_flag = isinstance(pf, policies.MultiHeadGuassianContPolicy)
env_info.env.eval()
env_info.env._reward_scale = 1
while True:
start_barrier.wait()
if terminate_mark.value == 1:
break
pf.load_state_dict(shared_pf.state_dict())
eval_rews = []
done = False
success = 0
for idx in range(env_info.eval_episodes):
if reset_idx:
eval_ob = env_info.env.reset_with_index(idx)
else:
eval_ob = env_info.env.reset()
rew = 0
current_success = 0
while not done:
# act = pf.eval( torch.Tensor( eval_ob ).to(env_info.device).unsqueeze(0))
if idx_flag:
act = pf.eval( torch.Tensor( eval_ob ).to(env_info.device).unsqueeze(0), [task_idx] )
else:
act = pf.eval( torch.Tensor( eval_ob ).to(env_info.device).unsqueeze(0))
eval_ob, r, done, info = env_info.env.step( act )
rew += r
if env_info.eval_render:
env_info.env.render()
current_success = max(current_success, info["success"])
eval_rews.append(rew)
done = False
success += current_success
shared_que.put({
'eval_rewards': eval_rews,
'success_rate': success / env_info.eval_episodes
})
def start_worker(self):
self.workers = []
self.shared_que = self.manager.Queue()
self.start_barrier = mp.Barrier(self.worker_nums+1)
self.terminate_mark = mp.Value( 'i', 0 )
self.eval_workers = []
self.eval_shared_que = self.manager.Queue()
self.eval_start_barrier = mp.Barrier(self.eval_worker_nums+1)
for i in range(self.worker_nums):
self.env_info.env_rank = i
p = mp.Process(
target=self.__class__.train_worker_process,
args=( self.__class__, self.funcs,
self.env_info, self.replay_buffer,
self.shared_que, self.start_barrier,
self.terminate_mark))
p.start()
self.workers.append(p)
for i in range(self.eval_worker_nums):
eval_p = mp.Process(
target=self.__class__.eval_worker_process,
args=(self.pf,
self.env_info, self.eval_shared_que, self.eval_start_barrier,
self.terminate_mark, self.reset_idx))
eval_p.start()
self.eval_workers.append(eval_p)
def eval_one_epoch(self):
self.eval_start_barrier.wait()
eval_rews = []
mean_success_rate = 0
for _ in range(self.eval_worker_nums):
worker_rst = self.eval_shared_que.get()
eval_rews += worker_rst["eval_rewards"]
mean_success_rate += worker_rst["success_rate"]
return {
'eval_rewards':eval_rews,
'mean_success_rate': mean_success_rate / self.eval_worker_nums
}
| RchalYang/Soft-Module | torchrl/collector/para/mt.py | mt.py | py | 3,822 | python | en | code | 90 | github-code | 13 |
16820486640 | from django import forms, template
register = template.Library()
@register.filter
def cvtmem(mem):
if mem=='':return ''
mem = int(mem)
units = ['KB','MB','GB']
unit_index = 0
while mem>1024:
unit_index += 1
mem/=1024
return '%.2f%s'%(mem,units[unit_index])
| ryanrain2016/FreeEye | FreeEye/HostManage/templatetags/cvtmem.py | cvtmem.py | py | 301 | python | en | code | 5 | github-code | 13 |
17574448107 | # from cgi import print_directory
# a=int(input("Enter number of rows you wanna print the star pattern"))
# b=bool(int(input("Enter 1 for normal pattern of star pattern \n Enter 0 for inverted star pattern ")))
# if b==True:
# for i in range(1,a+1,2):
# print(i*"*")
# if i==a:
# print("You result is as above and our program is leaving now")
# elif b==False:
# while a!=0:
# print(a*"*")
# a=a-1
# if a==0:
# print("You result is as above and our program is leaving now")
a=int(input(('Enter number of rows of stars you want')))
b=bool(int(input("Enter 1 for normal patern \t and 0 for reversed pattern")))
if b==True:
for i in range(1,a+1):
print(i*'*')
elif b==False:
for i in range(a+1,1,-1):
print(i*"*")
| Raunakjha00/Python | ununderstandable2.py | ununderstandable2.py | py | 902 | python | en | code | 0 | github-code | 13 |
71842979859 | # -*- coding: utf-8 -*-
"""
This file manages the post-processing and plotting of the graph
@author: Jan Straub
"""
# Imports
from networkx import Graph
from networkx import get_node_attributes, shortest_path, draw, draw_networkx_nodes, draw_networkx_labels
from scipy.interpolate import BPoly, splprep, splev
from numpy import asarray, linspace, arange
import matplotlib.pyplot as plt
from post_processing import remove_unused_grid_nodes, remove_nodes_within_radius, fermat_torricelli_point_calculation
from helper import find_path_nodes, find_node_by_id, calculate_edge_control_points, find_edge_between_nodes, calculate_distance_between_positions
def bezier_curve_calculation(networkxGraph, environmentNodeList,
environmentTerminalNodeList, environmentEdgeList,
pathList, smoothing):
"""_summary_
Searches for path and calculates Bezier curve
Args:
networkxGraph (object): Graph created by networkx
environmentNodeList (list): List of node objects
environmentTerminalNodeList (list): List of terminal node objects
environmentEdgeList (list): List of edge object in network
pathList (list): List of path
smoothing (int): A smoothing parameter for the Bezier curve bundling
"""
bezierPointList = []
originalDistance = 0
for path in pathList:
startPoint, endPoint = path
finishedPathList, calculatedPathList = [], []
startNode, endNode = None, None
if [startPoint, endPoint] or [endPoint, startPoint] not in finishedPathList:
finishedPathList.append(path)
startNode, endNode = find_path_nodes(startPoint, endPoint,
environmentTerminalNodeList)
calculatedPathList = shortest_path(networkxGraph,
startNode.nodeObjectId,
endNode.nodeObjectId)
controlPoints = []
calculatedPathList.pop(0)
controlPoints.append(startNode.position)
if smoothing > 0:
lastNode = startNode
for pathNode in calculatedPathList:
nextNode = find_node_by_id(environmentNodeList, pathNode)
for point in find_edge_between_nodes(environmentEdgeList,
lastNode, nextNode).edgeControlPointsList:
controlPoints.append(point)
lastNode = nextNode
controlPoints.append(endNode.position)
else:
for pathNode in calculatedPathList:
controlPoints.append(find_node_by_id(environmentNodeList,
pathNode).position)
npControlPoints = asarray(controlPoints)
curve = BPoly(npControlPoints[:, None, :], [0, 1])
curvePoints = curve(linspace(0, 1, 100))
bezierPointList.append(curvePoints.T)
for i in range(len(curvePoints) - 1):
originalDistance += calculate_distance_between_positions(curvePoints[i], curvePoints[i + 1])
print(f"bundles distance: {originalDistance}")
return bezierPointList
def cubic_spline_calculation(networkxGraph, environmentNodeList,
environmentTerminalNodeList, environmentEdgeList,
pathList, smoothing):
"""_summary_
Searches for path and calculates cubic spline curve
Args:
networkxGraph (object): Graph created by networkx
environmentNodeList (list): List of node objects
environmentTerminalNodeList (list): List of terminal node objects
environmentEdgeList (list): List of edge object in network
pathList (list): List of path
smoothing (int): A smoothing parameter for the Bezier curve bundling
"""
cubicSplineList = []
for path in pathList:
startPoint, endPoint = path
finishedPathList, calculatedPathList = [], []
startNode, endNode = None, None
if [startPoint, endPoint] or [endPoint, startPoint] not in finishedPathList:
finishedPathList.append(path)
startNode, endNode = find_path_nodes(startPoint, endPoint,
environmentTerminalNodeList)
calculatedPathList = shortest_path(networkxGraph,
startNode.nodeObjectId,
endNode.nodeObjectId)
controlPoints = []
calculatedPathList.pop(0)
controlPoints.append(startNode.position)
if smoothing > 0:
lastNode = startNode
for pathNode in calculatedPathList:
nextNode = find_node_by_id(environmentNodeList, pathNode)
for point in find_edge_between_nodes(environmentEdgeList,
lastNode, nextNode).edgeControlPointsList:
controlPoints.append(point)
lastNode = nextNode
controlPoints.append(endNode.position)
npControlPoints = asarray(controlPoints)
tck = splprep(npControlPoints.transpose(), s = 0)[0]
unew = arange(0, 1.01, 0.01)
cubicSplineList.append(splev(unew, tck))
else:
raise ValueError ("Cubic spline plot must have SMOOTHING factor of at least 1")
return cubicSplineList
def plot_graph(path, jsonFileName, outerIteration, innerIteration,
savedNetwork, pathList, smoothing,
postProcessingSelection):
"""_summary_
Plot edges and nodes in matplotlib
Args:
path (string): Path for plot save
jsonFileName (sting): Name of file
outerIteration (int): Number of outer iterations
innerIteration (int): Number of inner iterations
savedNetwork (object): Network that has the lowest cost
pathList (list): List of original paths
smoothing (int): A smoothing parameter for the Bezier curve bundling
postProcessingSelection (int): Selection which postprocessing should be used
"""
environmentNodeList = savedNetwork.environmentNodeList
environmentTerminalNodeList = savedNetwork.environmentTerminalNodeList
environmentEdgeList = savedNetwork.environmentEdgeList
removeEdge = savedNetwork.remove_edge
createSteinerEdge = savedNetwork.create_steiner_edge
remove_unused_grid_nodes(environmentNodeList,
environmentTerminalNodeList,
removeEdge, createSteinerEdge,
environmentEdgeList)
fermat_torricelli_point_calculation(environmentNodeList)
remove_nodes_within_radius(environmentNodeList, removeEdge,
createSteinerEdge)
networkxGraph, sizeValues, nodeLabels, colorValues = Graph(), [], {}, []
for node in environmentNodeList:
x, y = node.position
networkxGraph.add_node(node.nodeObjectId, pos = (x, y))
if node in environmentTerminalNodeList:
nodeLabels[node.nodeObjectId] = ""
sizeValues.append(10)
colorValues.append("black")
else:
nodeLabels[node.nodeObjectId] = ""
sizeValues.append(0)
colorValues.append("green")
if postProcessingSelection == 0:
plot = plot_steiner_graph(environmentEdgeList, networkxGraph,
sizeValues, nodeLabels, colorValues)
elif postProcessingSelection == 1:
plot = plot_bezier_graph(environmentEdgeList,
environmentNodeList,
environmentTerminalNodeList,
networkxGraph,
pathList, smoothing, sizeValues,
colorValues)
elif postProcessingSelection == 2:
plot = plot_cubic_spline_graph(environmentEdgeList,
environmentNodeList, environmentTerminalNodeList,
networkxGraph,
pathList, smoothing, sizeValues,
colorValues)
else:
raise ValueError(f"postProcessingSelection value {postProcessingSelection} is not defined")
plot.savefig(path + f"/plots/{jsonFileName}_{outerIteration}-{innerIteration}.png", dpi = 300)
plot.savefig(path + f"/plots/{jsonFileName}_{outerIteration}-{innerIteration}.pdf")
plot.clf()
def plot_steiner_graph(environmentEdgeList, networkxGraph,
sizeValues, nodeLabels, colorValues):
"""_summary_
Creates Steiner graph
Args:
environmentEdgeList (list): List of all edge objects
networkxGraph (object): Graph created by networkx
sizeValues (list): List of node size values
nodeLabels (dict): Dict of node ids
Returns:
plt (object): Matplotlib object
"""
for edge in environmentEdgeList:
networkxGraph.add_edge(edge.start.nodeObjectId, edge.end.nodeObjectId)
fig = plt.figure()
fig = plt.figure(figsize = (10, 10))
fig.patch.set_visible(False)
pos = get_node_attributes(networkxGraph, 'pos')
draw(networkxGraph, pos, node_size = sizeValues, node_color = colorValues)
draw_networkx_nodes(networkxGraph, pos, node_size = sizeValues)
draw_networkx_labels(networkxGraph, pos, nodeLabels)
return plt
def plot_bezier_graph(environmentEdgeList,
environmentNodeList,
environmentTerminalNodeList,
networkxGraph,
pathList, smoothing, sizeValues,
colorValues):
"""_summary_
Create Bezier curve plot
Args:
environmentEdgeList (list): List of all edge objects
environmentNodeList (list): List of node objects
environmentTerminalNodeList (list): List of terminal node objects
networkxGraph (object): Graph created by networkx
pathList (list): List of original paths
smoothing (int): A smoothing parameter for the Bezier curve bundling
sizeValues (list): List of node size values
Returns:
plt (object): Matplotlib object
"""
calculate_edge_control_points(environmentEdgeList, smoothing)
for edge in environmentEdgeList:
networkxGraph.add_edge(edge.start.nodeObjectId, edge.end.nodeObjectId)
bezierPointList = bezier_curve_calculation(networkxGraph,
environmentNodeList,
environmentTerminalNodeList,
environmentEdgeList,
pathList, smoothing)
fig = plt.figure()
fig = plt.figure(figsize = (10, 10))
fig.patch.set_visible(False)
for bezierCurve in bezierPointList:
plt.plot(*bezierCurve, color = "black", linewidth = 0.5)
pos = get_node_attributes(networkxGraph, 'pos')
draw_networkx_nodes(networkxGraph, pos, node_size = sizeValues,
node_color = colorValues)
return plt
def plot_cubic_spline_graph(environmentEdgeList, environmentNodeList,
environmentTerminalNodeList, networkxGraph,
pathList, smoothing, sizeValues, colorValues):
"""_summary_
Create cubic spline curve plot
Args:
environmentEdgeList (list): List of all edge objects
environmentNodeList (list): List of node objects
environmentTerminalNodeList (list): List of terminal node objects
networkxGraph (object): Graph created by networkx
pathList (list): List of original paths
smoothing (int): A smoothing parameter for the Bezier curve bundling
sizeValues (list): List of node size values
Returns:
plt (object): Matplotlib object
"""
calculate_edge_control_points(environmentEdgeList, smoothing)
for edge in environmentEdgeList:
networkxGraph.add_edge(edge.start.nodeObjectId, edge.end.nodeObjectId)
cubicSplineList = cubic_spline_calculation(networkxGraph,
environmentNodeList,
environmentTerminalNodeList,
environmentEdgeList,
pathList, smoothing)
fig = plt.figure()
fig = plt.figure(figsize = (10, 10))
fig.patch.set_visible(False)
for cubicSpline in cubicSplineList:
plt.plot(cubicSpline[0], cubicSpline[1], color = "black")
pos = get_node_attributes(networkxGraph, 'pos')
draw_networkx_nodes(networkxGraph, pos, node_size = sizeValues,
node_color = colorValues)
return plt
def plot_original_graph(path, jsonFileName, nodeList, pathList):
"""_summary_
Args:
path (string): Path for plot save
jsonFileName (sting): Name of file
nodeList (list): List of node objects
pathList (list): List of original paths
"""
networkxGraph, sizeValues, nodeLabels = Graph(), [], {}
for index, node in enumerate(nodeList):
x, y = node
networkxGraph.add_node(index, pos = (x, y))
nodeLabels[index] = index
sizeValues.append(200)
for edge in pathList:
networkxGraph.add_edge(edge[0], edge[1])
fig = plt.figure()
fig = plt.figure(figsize = (10, 10))
pos = get_node_attributes(networkxGraph, 'pos')
draw(networkxGraph, pos, node_size = sizeValues)
draw_networkx_nodes(networkxGraph, pos, node_size = sizeValues)
draw_networkx_labels(networkxGraph, pos, nodeLabels)
plt.savefig(path + f"/plots/original_{jsonFileName}.png", dpi = 300)
plt.savefig(path + f"/plots/original_{jsonFileName}.pdf")
plt.clf()
| JanMStraub/Local-Iterative-Optimization-for-Graph-Bundling | code/output.py | output.py | py | 14,075 | python | en | code | 1 | github-code | 13 |
3298959736 | import wx
import cv2
import os
import numpy as np
from contour import *
from PIL import Image
from model.dbconnect import *
from props.InputProp import *
from props.FormField import *
class EditForm(wx.Frame):
def __init__(self, parent, title, id = 0):
wx.Frame.__init__(self, parent, title=title, size=(1000, 700), style = wx.SYSTEM_MENU | wx.CLOSE_BOX | wx.CAPTION )
self._db = DB()
self.parent = parent
get_user = self._db.get_id(id)
# homedir = os.path.expanduser('~')
dir_path = os.path.dirname(os.path.realpath(__file__))
img_width = 250
img_height = 150
self.finger_weight = 2000
self.total_weight = 0
self.status = "Disapproved"
panel = wx.Panel(self)
panel.SetBackgroundColour("STEEL BLUE")
self.imagepanel = wx.Panel(panel, size=(500, 620))
self.imagepanel.SetBackgroundColour("WHITE")
form_panel = wx.Panel(panel, size=(500, 500))
form_panel.SetBackgroundColour("STEEL BLUE")
sizer = wx.GridBagSizer(10,10)
hbox = wx.BoxSizer(wx.HORIZONTAL)
vbox = wx.BoxSizer(wx.VERTICAL)
# header = wx.Panel(panel, size=(1000, 100), style=wx.RAISED_BORDER)
# header.SetBackgroundColour("MEDIUM SLATE BLUE")
# font = wx.Font(30, wx.ROMAN, wx.ITALIC, wx.FONTWEIGHT_BOLD)
# _vbox = wx.BoxSizer(wx.VERTICAL)
# self.header_title = wx.StaticText(header, 1, "Banana Data System", style = wx.ALIGN_CENTER, size=(1000, 50))
# self.header_title.SetFont(font)
# _vbox.Add(self.header_title, 0, wx.ALIGN_CENTER_VERTICAL, 1)
# font = wx.Font(20, wx.ROMAN, wx.ITALIC, wx.FONTWEIGHT_BOLD)
# objname = wx.StaticText(header, 1, "Banana 1", style = wx.ALIGN_CENTER, size=(1000, 30))
# objname.SetFont(font)
# _vbox.Add(objname, 1, wx.ALIGN_CENTER_VERTICAL, 1)
# header.SetSizer(_vbox)
# vbox.Add(header, 0, wx.ALIGN_CENTER_VERTICAL, 1)
_hbox = wx.BoxSizer(wx.HORIZONTAL)
_vbox = wx.BoxSizer(wx.VERTICAL)
field = FormField()
menubar = field.setMenuBar()
field.setPanel(self.imagepanel)
font = wx.Font(14, wx.ROMAN, wx.ITALIC, wx.FONTWEIGHT_BOLD)
label = field.setLabel("Front")
_vbox.Add(label, 0, wx.ALIGN_CENTER_HORIZONTAL, 1)
front_box = self.getImage(str(get_user[5]), img_width, img_height, "\\..\\images\\front\\", "front")
_vbox.Add(front_box, 0, wx.ALIGN_TOP, 1)
label = field.setLabel("Back")
_vbox.Add(label, 0, wx.ALIGN_CENTER_HORIZONTAL, 1)
back_box = self.getImage(str(get_user[6]), img_width, img_height, "\\..\\images\\back\\", "back")
_vbox.Add(back_box, 0, wx.ALIGN_TOP, 1)
label = field.setLabel("Side")
_vbox.Add(label, 0, wx.ALIGN_CENTER_HORIZONTAL, 1)
side_box = self.getImage(str(get_user[7]), img_width, img_height, "\\..\\images\\side\\", "side")
_vbox.Add(side_box, 0, wx.ALIGN_TOP, 1)
hbox.Add(_vbox, 0, wx.ALIGN_TOP, 1)
field = FormField()
field.setPanel(form_panel)
field.setFont(14)
label_font = wx.Font(14, wx.ROMAN, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD)
label = field.setLabel("Width (CM)")
sizer.Add(label, pos = (0, 0), flag = wx.ALL, border = 10)
input_font = wx.Font(20, wx.ROMAN, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL)
self.input_width = field.setTextField("")
sizer.Add(self.input_width, pos = (0, 1), span = (1, 2), flag = wx.EXPAND|wx.ALL, border = 10)
label = field.setLabel("Height(CM)")
sizer.Add(label, pos = (1, 0), flag = wx.ALL, border = 10)
self.input_height = field.setTextField("")
sizer.Add(self.input_height, pos = (1, 1), span = (1, 2), flag = wx.EXPAND|wx.ALL, border = 10)
label = field.setLabel("Length(CM)")
sizer.Add(label, pos = (2, 0), flag = wx.ALL, border = 10)
self.input_length = field.setTextField("")
sizer.Add(self.input_length, pos = (2, 1), span = (1, 2), flag = wx.EXPAND|wx.ALL, border = 10)
label = field.setLabel("Weight(GRAMS)")
sizer.Add(label, pos = (3, 0), flag = wx.ALL, border = 10)
self.input_weight = field.setTextField("")
sizer.Add(self.input_weight, pos = (3, 1), span = (1, 2), flag = wx.EXPAND|wx.ALL, border = 10)
label = field.setLabel("Quality")
sizer.Add(label, pos = (4, 0), flag = wx.ALL, border = 10)
self.input_pix = field.setTextField("")
sizer.Add(self.input_pix, pos = (4, 1), span = (1, 2), flag = wx.EXPAND|wx.ALL, border = 10)
label = field.setLabel("Shelf Life")
sizer.Add(label, pos = (5, 0), flag = wx.ALL, border = 10)
self.input_life = field.setTextField("")
sizer.Add(self.input_life, pos = (5, 1), span = (1, 2), flag = wx.EXPAND|wx.ALL, border = 10)
label = field.setLabel("Status")
sizer.Add(label, pos = (6, 0), flag = wx.ALL, border = 10)
self.input_stat = field.setTextField("")
sizer.Add(self.input_stat, pos = (6, 1), span = (1, 2), flag = wx.EXPAND|wx.ALL, border = 10)
self.submit_btn = wx.Button(form_panel, label='Update', size=(400, 30))
sizer.Add(self.submit_btn, pos = (7, 0), span = (2, 2), flag = wx.ALIGN_CENTER_HORIZONTAL|wx.ALL, border = 10)
form_panel.SetSizer(sizer)
hbox.Add(form_panel, 1, wx.ALIGN_LEFT, border = 10)
vbox.Add(hbox, 0, wx.ALIGN_TOP, 1)
btn_panel = wx.Panel(panel, size=(1000, 200))
btn_panel.SetBackgroundColour("STEEL BLUE")
btn_hbox = wx.BoxSizer(wx.HORIZONTAL)
back_record = wx.Button(btn_panel, label='Back to records', size=(150, 50))
btn_hbox.Add(back_record, 0, wx.SHAPED, border = 1)
back_eval = wx.Button(btn_panel, label='Back to Evaluate', size=(150, 50))
btn_hbox.Add(back_eval, 0, wx.SHAPED, border = 1)
view_sq = wx.Button(btn_panel, label='View S.Q.', size=(150, 50))
btn_hbox.Add(view_sq, 0, wx.SHAPED, border = 1)
btn_cancel = wx.Button(btn_panel, label='Cancel', size=(150, 50))
btn_hbox.Add(btn_cancel, 0, wx.SHAPED, border = 1)
btn_panel.SetSizer(btn_hbox)
vbox.AddSpacer(30)
vbox.Add(btn_panel, 1, wx.ALIGN_TOP, 1)
panel.SetSizer(vbox)
self.SetMenuBar(menubar)
self.Bind(wx.EVT_BUTTON, lambda event: self.insertData(event, id), self.submit_btn)
self.Bind(wx.EVT_BUTTON, lambda event: self.goToEvaluate(event, self), back_eval)
self.Bind(wx.EVT_BUTTON, lambda event: self.goToRecord(event, self), back_record)
self.Bind(wx.EVT_BUTTON, lambda event: self.goToMenu(event, self), btn_cancel)
self.Bind(wx.EVT_BUTTON, lambda event: self.showQualityModal(event), view_sq)
self.Bind(wx.EVT_MENU, lambda event: field.menuHandler(event, self))
self.Bind(wx.EVT_CLOSE, self.onClose)
self.fieldSetValues()
self.Centre()
self.Show(True)
def insertData(self, event, item_id):
self.db = DB()
prop = InputProp(float(self.total_weight), float(self.height), float(self.width), float(self.length), 1,
int(round(self.percent)), self.status, self.life)
self.db.updateData(prop, item_id)
SuccessDialog(self, "Success").ShowModal()
def showQualityModal(self, event):
QualityModal(self, "Standard Quality").ShowModal()
def getImage(self, imageStr, img_width, img_height, directory, objtype):
_hbox = wx.BoxSizer(wx.HORIZONTAL)
dir_path = os.path.dirname(os.path.realpath(__file__))
#front filter
front_filter = wx.Image(dir_path + directory + imageStr, wx.BITMAP_TYPE_ANY)
_front_filter = cv2.imread(dir_path + directory + imageStr)
_front_filter = self.adjust_gamma(_front_filter, 2.5)
filtercol = self.filteredColor(_front_filter)
front_w = filtercol['width']
front_h = filtercol['height']
front_p = filtercol['percent']
front_i = filtercol['image']
front_l = filtercol['life']
self.length = 0
if objtype == "side":
self.length = front_w
else:
self.width = front_w
self.height = front_h
self.front_percent = front_p
self.front_life = front_l
self.percent = self.front_percent
self.life = self.front_life
self.total_weight = round((self.width * self.height * self.length), 2)
front_filter.SetData(front_i.tostring())
_front_img = front_filter.Scale(img_width, img_height)
self.videobmp = wx.StaticBitmap(self.imagepanel, wx.ID_ANY, wx.Bitmap(_front_img))
_hbox.Add(self.videobmp, 0, wx.ALIGN_CENTER_VERTICAL, 1)
#front detection
front_detection = wx.Image(dir_path + directory + imageStr, wx.BITMAP_TYPE_ANY)
_front_detection = cv2.imread(dir_path + directory + imageStr)
_front_detection = self.adjust_gamma(_front_detection, 1.5)
detectioncol = self.detectionObj(_front_detection)
front_detection.SetData(detectioncol.tostring())
_front_cascade = front_detection.Scale(img_width, img_height)
self.videobmp2 = wx.StaticBitmap(self.imagepanel, wx.ID_ANY, wx.Bitmap(_front_cascade))
_hbox.Add(self.videobmp2, 0, wx.ALIGN_CENTER_VERTICAL, 1)
return _hbox
def filteredColor(self, obj):
# self.filtered = cv2.resize(obj, (self.resized_w, self.resized_h), interpolation = cv2.INTER_AREA)
life = 14
hsv = cv2.cvtColor(obj, cv2.COLOR_BGR2HSV)
lower_green = np.array([30, 0, 0])
upper_green = np.array([70, 255, 255])
mask = cv2.inRange(hsv, lower_green, upper_green)
no_pixels = cv2.countNonZero(mask)
res = cv2.bitwise_and(obj, obj, mask= mask)
size = get_size(res)
percent = (no_pixels / size['size']) * 100
life = life * (percent/100)
if percent > 50:
self.status = "Approved"
return {'image' : size['image'], 'height' : size['height'],
'width' : size['width'], 'percent' : percent, 'life' : life}
def detectionObj(self, obj):
detection = cv2.CascadeClassifier('cascade/BananaCascade.xml')
obj_detect = cv2.cvtColor(obj, cv2.COLOR_BGR2GRAY)
faces = detection.detectMultiScale(obj_detect, scaleFactor=1.1, minNeighbors=4,
maxSize=(400, 400), minSize=(200, 200), flags = cv2.CASCADE_SCALE_IMAGE)
for (x,y,w,h) in faces:
cv2.rectangle(obj,(x,y),(x+w,y+h),(255,0,0),2)
fingers = detection.detectMultiScale(obj_detect, scaleFactor=1.1, minNeighbors=10, maxSize=(30, 70), minSize=(30, 70), flags = cv2.CASCADE_SCALE_IMAGE)
# self.total_weight += len(fingers) * self.finger_weight
for (x,y,w,h) in fingers:
cv2.rectangle(obj,(x,y),(x+w,y+h),(255,0,0),2)
return obj
def fieldSetValues(self):
self.input_width.SetValue(str(self.width))
self.input_height.SetValue(str(self.height))
self.input_pix.SetValue(str(round(self.percent)) + "%")
self.input_weight.SetValue(str(round(self.total_weight, 2)))
self.input_stat.SetValue(self.status)
self.input_life.SetValue(str(round(self.life)))
self.input_length.SetValue(str(self.length))
def onClose(self, event):
self.Destroy()
def goToEvaluate(self, event, parent):
capture = cv2.VideoCapture(0)
frame = wx.Frame(None)
module = __import__("CameraCapture")
cam = getattr(module, "CameraCapture")(frame, capture)
cam.Show()
parent.Hide()
def goToRecord(self, event, parent):
module = __import__("form.FlexList", fromlist=['FlexList'])
menu = getattr(module, "FlexList")("Record")
menu.Show()
parent.Hide()
def goToMenu(self, event, parent):
module = __import__("MenuForm")
menu = getattr(module, "MenuForm")("Main Menu")
menu.Show()
parent.Hide()
def adjust_gamma(self, image, gamma=1.0):
invGamma = 1.0 / gamma
table = np.array([((i / 255.0) ** invGamma) * 255
for i in np.arange(0, 256)]).astype("uint8")
return cv2.LUT(image, table)
class SuccessDialog(wx.Dialog):
def __init__(self, parent, title):
super(SuccessDialog, self).__init__(parent, title = title, size = (250, 150))
panel = wx.Panel(self,)
hbox = wx.BoxSizer(wx.HORIZONTAL)
label_font = wx.Font(20, wx.ROMAN, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD)
self.label = wx.StaticText(panel, 1, "Successfully Update")
self.label.SetFont(label_font)
hbox.Add(self.label, 1, wx.ALIGN_CENTER_HORIZONTAL, border = 10)
class QualityModal(wx.Dialog):
def __init__(self, parent, title):
super(QualityModal, self).__init__(parent, title = title, size = (500, 400))
panel = wx.Panel(self, size=(500, 400))
vbox = wx.BoxSizer(wx.VERTICAL)
hbox = wx.BoxSizer(wx.HORIZONTAL)
label_font = wx.Font(14, wx.ROMAN, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD)
label = wx.StaticText(panel, 1, "Weight (KG)")
label.SetFont(label_font)
hbox.Add(label, 1, wx.ALL, border = 10)
hbox.AddSpacer(10)
label = wx.StaticText(panel, 1, "Size (CM))")
label.SetFont(label_font)
hbox.Add(label, 1, wx.ALL, border = 10)
vbox.Add(hbox, 0, wx.ALIGN_TOP, border = 10)
hbox = wx.BoxSizer(wx.HORIZONTAL)
label = wx.StaticText(panel, 1, "Small \n1.8 KG - 2.4 KG")
label.SetFont(label_font)
hbox.Add(label, 1, wx.ALL, border = 10)
hbox.AddSpacer(10)
label = wx.StaticText(panel, 1, "Small \n10 cm")
label.SetFont(label_font)
hbox.Add(label, 1, wx.ALL, border = 10)
vbox.Add(hbox, 0, wx.ALIGN_TOP, border = 10)
hbox = wx.BoxSizer(wx.HORIZONTAL)
label = wx.StaticText(panel, 1, "Medium \n2.5 KG - 3.4 KG")
label.SetFont(label_font)
hbox.Add(label, 1, wx.ALL, border = 10)
hbox.AddSpacer(10)
label = wx.StaticText(panel, 1, "Small \n10 cm - 14 cm")
label.SetFont(label_font)
hbox.Add(label, 1, wx.ALL, border = 10)
vbox.Add(hbox, 0, wx.ALIGN_TOP, border = 10)
hbox = wx.BoxSizer(wx.HORIZONTAL)
label = wx.StaticText(panel, 1, "Large \n3.5 KG - 4.0 KG")
label.SetFont(label_font)
hbox.Add(label, 1, wx.ALL, border = 10)
hbox.AddSpacer(10)
label = wx.StaticText(panel, 1, "Large \n15 cm")
label.SetFont(label_font)
hbox.Add(label, 1, wx.ALL, border = 10)
vbox.Add(hbox, 0, wx.ALIGN_TOP, border = 10)
panel.SetSizer(vbox) | saitama09x/pythoncv | form/EditForm.py | EditForm.py | py | 14,957 | python | en | code | 0 | github-code | 13 |
9585866242 | import logging
import pygame
import math
from typing import List
from pygame.math import Vector2
from pygame.sprite import Sprite, Group
from pygame.mask import from_surface
from plat.core.utils import *
from plat.core.components import BaseComponent
class MoverMixin:
""" Call self.calculate_newpos() on child update method. """
AXIS_X = 1
AXIS_Y = 2
AXIS_BOTH = 3
AXIS_NONE = 4
# NOTE:
FRICTION = -0.16
FRICTION_AXIS = AXIS_X
INPUT_VEL_MULTIPLIER = Vector2(2, 2)
@property
def direction(self):
return self.velocity.normalize() if self.velocity.length() > 0 else Vector2(0,0)
@direction.setter
def direction(self, newdir):
self.velocity = self.velocity * newdir.normalize()
def new(self):
super().new()
self.velocity = Vector2(0, 0)
self.base_acceleration = (0, 0)
self.acceleration = Vector2(self.base_acceleration)
self.last_pos = None
def calculate_newpos(self):
self.last_pos = self.pos
self.input_vel = self.get_input_vel()
self.calculated_vel = self._get_calculated_vel()
self.acceleration = self._calculate_acceleration()
self.velocity = self._calculate_velocity()
self.pos = self._calculate_position()
def _get_calculated_vel(self):
vel = Vector2(self.input_vel)
vel.x *= self.INPUT_VEL_MULTIPLIER.x
vel.y *= self.INPUT_VEL_MULTIPLIER.y
return vel
def _calculate_acceleration(self) -> Vector2:
""" Returns new acceleration in current update """
acc = self.calculate_acceleration()
print('acc before joystick input:', acc)
acc = acc + self.calculated_vel
acc = self._calculate_friction(acc)
print('acc after frition: ', acc)
return acc
def _calculate_friction(self, acc) -> Vector2:
# if self.FRICTION_AXIS == self.AXIS_X:
# # print(f"(acc) {acc.x} + {self.velocity.x} * {self.FRICTION} = {acc.x + self.velocity.x * self.FRICTION}")
# acc.x += self.velocity.x * self.FRICTION
# elif self.FRICTION_AXIS == self.AXIS_Y:
# acc.y += self.velocity.y * self.FRICTION
# elif self.FRICTION_AXIS == self.AXIS_BOTH:
# acc += self.velocity * self.FRICTION
# elif self.FRICTION_AXIS == self.AXIS_NONE:
# pass
# else:
# raise ValueError(f'Unkown FRICTION_AXIS for {self}')
acc += self.velocity * self.FRICTION
return acc
def calculate_acceleration(self) -> Vector2:
return Vector2(self.base_acceleration)
def _calculate_velocity(self) -> Vector2:
""" Returns new velocity in current update. """
vel = self.velocity + self.acceleration
vel.x = 0 if abs(vel.x) < 0.1 else vel.x
vel.y = 0 if abs(vel.y) < 0.1 else vel.y
return vel
def _calculate_position(self):
if self.FRICTION_AXIS != self.AXIS_NONE:
# print(f"(pos) {self.pos} + {self.velocity} + 0.5 * {self.acceleration} = {self.pos + self.velocity + 0.5 * self.acceleration}")
return self.pos + self.velocity + 0.5 * self.acceleration
else:
return self.pos + self.velocity
class JoyMoverMixin(MoverMixin):
AXIS_DEADZONE = 0.30
FRICTION = -0.5
def new(self):
super().new()
self.joy = self.game.joystick
self.joyinput = (0, 0)
def get_input_vel(self):
return self._normalized_axis_value(0), self._normalized_axis_value(1)
def _normalized_axis_value(self, axis):
""" joy.get_axis() returns a number between """
value = self.joy.get_axis(axis)
if abs(value) < self.AXIS_DEADZONE:
return 0
return max(min(value, 0.99), -0.99)
class GravityMixin(MoverMixin):
GRAVITY = 0.1
def new(self):
super().new()
self.base_acceleration = (0, self.GRAVITY)
class CollisionableMixin(BaseComponent):
# TODO: Add flags to enable collisions on each side
COLLIDE_LEFT = True
COLLIDE_RIGHT = True
COLLIDE_TOP = True
COLLIDE_BOTTOM = True
def new(self):
self.mask = None
super().new()
def on_update(self):
super().on_update()
if self.image:
self.mask = from_surface(self.image)
def get_collissions(self) -> List[Sprite]:
dir_ = self._parse_direction(self.direction)
self.rect.y += dir_.y
cols = pygame.sprite.spritecollide(self, self.game.state.obj.grid.children.sprites, False)
self.rect.y -= dir_.y
self.rect.x += dir_.x
cols += pygame.sprite.spritecollide(self, self.game.state.obj.grid.children.sprites, False)
self.rect.x -= dir_.x
return cols
def _check_collisions(self):
hits = self.get_collissions()
for hit in hits:
self._check_collision(hit)
def _check_collision(self, hit):
pass
def on_collision_start(self, other: 'CollisionableMixin'):
pass
def on_collision_end(self, other: 'CollisionableMixin'):
pass
class AnimationMixin(BaseComponent):
def new(self):
self.animations = self.get_animations()
self.current_animation = self.default_animation()
super().new()
def get_animations(self) -> dict:
""" Return dict of ´name-animation´ pairs """
return {}
def default_animation(self) -> str:
""" Return name of default animation """
return ''
def on_update(self):
super().on_update()
self.animate()
def change_animation(self, name):
if name not in self.animations.keys():
raise KeyError(name)
if name != self.current_animation:
self.animations[self.current_animation].reset()
self.current_animation = name
self.animations[self.current_animation].reset()
def animate(self):
now = pygame.time.get_ticks()
anim = self.animations[self.current_animation]
if now - anim.last_update > anim.delay:
frame = anim.get_next_frame(now)
self.image = frame
| manuelpepe/PLAT | plat/core/mixins.py | mixins.py | py | 6,167 | python | en | code | 0 | github-code | 13 |
12376488285 | #!/usr/bin/env python
import vlc
from tkinter import *
from tkinter import filedialog
import json
from tkinter import messagebox
class VideoFrameLabeler:
def __init__(self):
self._init_root_window()
self._init_media_player()
self._bind_keyboard_events()
main_pane = PanedWindow(self.root_window, orient=VERTICAL)
browse_media_pane = self._build_browse_media_pane(main_pane)
media_control_pane = self._build_media_control_pane(main_pane)
time_text = self._build_play_percentage_text(main_pane)
label_drop_down_menu = self._build_label_dropdown_menu(main_pane)
save_button = self._build_save_button(main_pane)
add_button = self._build_add_button(main_pane)
goto_button = self._build_goto_button(main_pane)
delete_button = self._build_delete_button(main_pane)
label_list = self._build_label_list(main_pane)
main_pane.add(browse_media_pane)
main_pane.add(media_control_pane)
main_pane.add(time_text)
main_pane.add(label_drop_down_menu)
main_pane.add(add_button)
main_pane.add(goto_button)
main_pane.add(delete_button)
main_pane.add(label_list)
main_pane.add(save_button)
main_pane.pack()
self.root_window.mainloop()
def _init_root_window(self):
self.root_window = Tk()
self.root_window.title('Video Frame Labeler')
self.root_window.wm_attributes("-topmost", 1)
self.root_window.geometry("280x800")
def _init_media_player(self):
self.media_player = vlc.MediaPlayer()
self.is_playing = False
self.directory = './'
events = self.media_player.event_manager()
events.event_attach(vlc.EventType.MediaPlayerPaused, self._on_paused)
events.event_attach(vlc.EventType.MediaPlayerPlaying, self._on_playing)
events.event_attach(vlc.EventType.MediaPlayerStopped, self._on_paused)
events.event_attach(vlc.EventType.MediaListEndReached, self._on_paused)
def _bind_keyboard_events(self):
root = self.root_window
def play_callback(_):
self._play_button_callback()
def forward_callback(_):
self._forward_button_callback()
def fast_forward_callback(_):
self._fast_forward_button_callback()
def backward_callback(_):
self._backward_button_callback()
def fast_backward_callback(_):
self._fast_backward_button_callback()
def browse_callback(_):
self._build_load_media_callback()
def add_callback(_):
self._on_add_callback()
def delete_callback(_):
self._on_delete_callback()
def save_callback(_):
self._on_save_callback()
def goto_callback(_):
self._on_goto_callback()
def key_press_callback(e):
key_code = e.keycode
if 9 < key_code < 20 or 23 < key_code < 34 or 37 < key_code < 47 or 51 < key_code < 59:
code = -1
if 9 < key_code < 20:
code = key_code - 10
elif 23 < key_code < 34:
code = key_code - 14
elif 37 < key_code < 47:
code = key_code - 18
elif 51 < key_code < 59:
code = key_code - 23
if -1 < code < len(self.label_list):
self.label_listbox.select_clear(0, 'end')
self.label_listbox.select_set(code)
self._on_add_callback()
root.bind("<space>", play_callback)
root.bind("<Right>", fast_forward_callback)
root.bind("<Left>", fast_backward_callback)
root.bind("<Up>", forward_callback)
root.bind("<Down>", backward_callback)
root.bind("<Control_L><b>", browse_callback)
root.bind("<Control_L><d>", delete_callback)
root.bind("<Control_L><a>", add_callback)
root.bind("<Control_L><s>", save_callback)
root.bind("<Control_L><g>", goto_callback)
root.bind("<KeyPress>", key_press_callback)
def _build_browse_media_pane(self, master):
pane = PanedWindow(master, orient=HORIZONTAL)
self.url_input_entry = Entry(pane)
self.browse_button = Button(
pane, text="Browse", command=self._build_load_media_callback)
pane.add(self.url_input_entry)
pane.add(self.browse_button)
return pane
def _build_media_control_pane(self, master):
pane = PanedWindow(master, orient=HORIZONTAL)
self.play_button = Button(
pane, text="Play", command=self._play_button_callback)
forward_button = Button(
pane, text=">", command=self._forward_button_callback)
fast_forward_button = Button(
pane, text=">>", command=self._fast_forward_button_callback)
backward_button = Button(
pane, text="<", command=self._backward_button_callback)
fast_backward_button = Button(
pane, text="<<", command=self._fast_backward_button_callback)
pane.add(fast_backward_button)
pane.add(backward_button)
pane.add(self.play_button)
pane.add(forward_button)
pane.add(fast_forward_button)
return pane
def _build_play_percentage_text(self, master):
self.time_percent = StringVar()
time_label = Label(master, textvariable=self.time_percent)
time_label.config(font=("Verdana", 16))
time_label.pack()
self.vlc_event_manager = self.media_player.event_manager()
self.vlc_event_manager.event_attach(vlc.EventType.MediaPlayerTimeChanged, self._on_media_time_changed)
return time_label
def _build_label_dropdown_menu(self, master):
self.label_list = self._load_option_list()
self.label_listbox = Listbox(master, height=len(self.label_list))
i = 0
arr = ['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', 'a', 's',
'd', 'f', 'g', 'h', 'j', 'k', 'l', 'z', 'x', 'c', 'v', 'b', 'n', 'm']
for label in self.label_list:
if i < 10:
c = str((i + 1) % 10)
else:
c = arr[i - 10]
self.label_listbox.insert(i, c + ": " + label)
i = i + 1
self.label_listbox.select_set(0)
return self.label_listbox
def _build_save_button(self, master):
return Button(master, text='Save', command=self._on_save_callback)
def _build_delete_button(self, master):
return Button(master, text='Delete', command=self._on_delete_callback)
def _build_add_button(self, master):
return Button(master, text='Add', command=self._on_add_callback)
def _build_goto_button(self, master):
return Button(master, text='Goto', command=self._on_goto_callback)
def _build_label_list(self, master):
self.video_label_listbox = Listbox(master, height=10)
return self.video_label_listbox
@staticmethod
def _load_option_list():
lines = []
with open('option_list.txt') as file:
for line in file:
line = line.strip()
lines.append(line)
return lines
def _build_load_media_callback(self):
filename = filedialog.askopenfilename(initialdir=self.directory,
title="Select a Video")
if len(filename) > 0:
self.url_input_entry.delete(0, 'end')
self.url_input_entry.insert(0, filename)
try:
media = vlc.Media(filename)
self.media_player.set_media(media)
self.media_player.play()
self.annotations = {}
self._load_json()
finally:
self._reload_annotations()
def _play_button_callback(self):
if self.is_playing:
self.media_player.pause()
else:
self.media_player.play()
def _forward_button_callback(self):
u_time = self.media_player.get_time() + 5000
self.media_player.set_time(u_time)
def _backward_button_callback(self):
u_time = self.media_player.get_time() - 5000
self.media_player.set_time(u_time)
def _fast_forward_button_callback(self):
self.media_player.set_rate(self.media_player.get_rate() + 1)
def _fast_backward_button_callback(self):
self.media_player.set_rate(self.media_player.get_rate() - 1)
def _reload_annotations(self):
self.video_label_listbox.delete(0, 'end')
for k, v in self.annotations.items():
self.video_label_listbox.insert(0, str(k) + ": " + v)
self.video_label_listbox.select_set(0)
def _on_paused(self, _):
self.is_playing = False
self.play_button['text'] = 'Play'
def _on_playing(self, _):
self.is_playing = True
self.play_button['text'] = 'Pause'
def _on_end_reached(self, _):
self.media_player.set_time(0)
def _on_add_callback(self):
key = self.media_player.get_time()
value = self.label_list[self.label_listbox.curselection()[0]]
self.annotations[str(key)] = value
self._reload_annotations()
def _on_save_callback(self):
filename = self._get_json_filename()
with open(filename, 'w') as f:
json.dump(self.annotations, f)
messagebox.showinfo(title='Saved', message='Saved to ' + filename)
def _on_delete_callback(self):
selected = self.video_label_listbox.get(
self.video_label_listbox.curselection()).split(':')[0]
del self.annotations[selected]
self._reload_annotations()
def _on_goto_callback(self):
selected = self.video_label_listbox.get(
self.video_label_listbox.curselection()).split(':')[0]
self.media_player.set_time(int(selected))
def _on_media_time_changed(self, _):
percent = self.media_player.get_position() * 100
rounded = round(percent, 2)
self.time_percent.set(str(rounded) + '%')
def _get_json_filename(self):
directory = self.url_input_entry.get().rsplit('/', 1)
self.directory = directory[0]
filename = directory[1].rsplit('.', 1)
return directory[0] + '/' + filename[0] + '.json'
def _load_json(self):
filename = self._get_json_filename()
with open(filename) as f:
self.annotations = json.load(f)
VideoFrameLabeler()
| UdaraWanasinghe/VideoFrameLabeler | main.py | main.py | py | 10,573 | python | en | code | 1 | github-code | 13 |
26885299285 | #!/usr/bin/env python3
import os, sys
import json
import numpy as np
import matplotlib.pyplot as plt
import colorcet as cc
import sklearn.gaussian_process as gp
import utils
import opt.objective as objective
BAYES_LOG_PATH = '../opt/bayes/data/expDecay4.json'
N_SAMPLES = (100, 115, 130, 200)
NX, NY = 40, 40
XMIN, XMAX = 18, 22.2
YMIN, YMAX = 15, 20
LEVELS = np.linspace(-1.5, 2.4, 100)
XTEXT, YTEXT = 1.5e18, 3e19
def plot_bayes(ax, input, output):
reg = gp.GaussianProcessRegressor(
kernel=gp.kernels.Matern(length_scale=[1., 1.], nu=2.5),
# alpha=1e-6,
normalize_y=True,
n_restarts_optimizer=5,
random_state=420,
)
reg.fit(input, output)
x = np.logspace(XMIN, XMAX, NX)
y = np.logspace(YMIN, YMAX, NY)
xy = np.log10([[xx, yy] for xx in x for yy in y])
mu = reg.predict(xy)
nD = 10 ** xy[:,0]
nNe = 10 ** xy[:,1]
cntr = ax.tricontourf(nD, nNe, np.log10(mu), levels=LEVELS, cmap=cc.cm.diverging_bwr_40_95_c42)
# add each sampled point
nD = 10 ** input[:,0]
nNe = 10 ** input[:,1]
ax.scatter(nD, nNe, c='k', s=1)
nD_, nNe_, obj_ = utils.get_optimum(10**input[:,0], 10**input[:,1], output)
ax.scatter(nD_, nNe_, c='r', marker='*', s=60)
print(utils.get_optimum(input[:,0], input[:,1], output))
ax.set_yscale('log')
ax.set_xscale('log')
ax.set_xticks([1e18, 1e20, 1e22])
ax.set_yticks([1e16, 1e18, 1e20])
ax.set_xticklabels([r'$10^{18}$', r'$10^{20}$', r'$10^{22}$'])
ax.set_yticklabels([r'$10^{16}$', r'$10^{18}$', r'$10^{20}$'])
return cntr
def main():
with open(BAYES_LOG_PATH) as file:
log = list(map(json.loads, file))
input = np.array([[sample['params']['log_nD'], sample['params']['log_nNe']] for sample in log], dtype=np.float32)
output = -1 * np.array([sample['target'] for sample in log], dtype=np.float32)
global XMIN, XMAX, YMIN, YMAX
XMIN, XMAX = input[:,0].min(), input[:,0].max()
YMIN, YMAX = input[:,1].min(), input[:,1].max()
print(output.min())
utils.setFigureFonts()
fig, axs = plt.subplots(ncols=2, nrows=2, figsize=utils.FIGSIZE_2X2, sharey=True, sharex=True)
axs = [ax for row in axs for ax in row]
for n_samples, ax, alph in zip(N_SAMPLES, axs, ('a', 'b', 'c', 'd')):
x = input[:n_samples,:]
y = output[:n_samples]
cntr = plot_bayes(ax, x, y)
ax.set_xlim(10**XMIN, 10**XMAX)
ax.set_ylim(10**YMIN, 10**YMAX)
ax.text(XTEXT, YTEXT, r'$\rm (' + alph +')$')
# ax.set_title(r'$' + str(n_samples) + r'{\rm \:samples}$')
# # colourbar settings
ticks = np.linspace(-1, 2, 4)
cbar_ax = fig.add_axes([.9, 0.2, utils.COLOURBAR_WIDTH, 0.7])
cbar = fig.colorbar(cntr, cax=cbar_ax, ticks=ticks)
cbar.ax.set_title(r'$\mathcal{L}_1$')
cbar.ax.set_yticklabels([r'$10^{-1}$', r'$10^0$', r'$10^1$', r'$10^2$'])
# add text
# x, y = 1.5e18, 3e19
# ax1.text(x, y, r"${\rm (a)\,scan}+{\rm Powell's}$")
# ax2.text(x, y, r"${\rm (b)\,BayesOpt}$")
# ax1.set_title(r"${\rm scan}+{\rm Powell's\:method}$")
# ax2.set_title(r"${\rm Bayesian\:optimization}$")
fig.supylabel(r'$n_{\rm Ne}\;({\rm m}^{-3})$', x=.04, y=.55)
fig.supxlabel(r'$n_{\rm D}\;({\rm m}^{-3})$', y=.04)
plt.tight_layout()
# axs[0].set_ylabel(r'$n_{\rm Ne}\;({\rm m}^{-3})$')
# axs[2].set_ylabel(r'$n_{\rm Ne}\;({\rm m}^{-3})$')
# axs[2].set_xlabel(r'$n_{\rm D}\;({\rm m}^{-3})$')
# axs[3].set_xlabel(r'$n_{\rm D}\;({\rm m}^{-3})$')
fig.subplots_adjust(wspace=.1, hspace=.05, right=.85)
plt.show()
if __name__ == '__main__':
sys.exit(main())
| peterhalldestam/exjobb | py/figs/plotProcession.py | plotProcession.py | py | 3,688 | python | en | code | 2 | github-code | 13 |
10927229368 | import glob
import os
import struct
class Debug():
global_log_level = 1
def __init__(self, level=None):
self.level = level if level else 0
def __call__(self, wrapped_function):
def wrapper_function(*args, **kwargs):
if Debug.global_log_level <= self.level:
print("[DEBUG] Called {}".format(locals()))
print("[DEBUG] Called {}".format(wrapped_function))
return wrapped_function(*args, **kwargs)
return wrapper_function
class InputDataset():
def __init__(self, file_path):
self.file_obj = open(file_path, 'rb')
self.file_size = os.stat(file_path).st_size
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.file_obj.close()
def __gt__(self, other):
if other < self.file_size:
return True
else:
return False
class Reader():
@Debug()
def _unpack_int(self, raw):
return struct.unpack('i', raw)[0]
@Debug()
def _unpack_str(self, raw):
return struct.unpack('4s', raw)[0].decode('utf-8')
@Debug()
def _unpack_double(self, raw):
return struct.unpack('f', raw)[0]
@Debug()
def read_files(self, pattern, packet_structure):
fields = [
attribute.attrib for attribute in packet_structure.header.attribute
]
fields.extend([
variable.attrib for variable in packet_structure.payload.variable
])
all_packets = []
packet_count = 0
for entry in glob.glob(pattern):
packet_offset = 0
with InputDataset(entry) as input_dataset:
print(input_dataset)
while packet_offset < input_dataset:
packet_count += 1
if packet_count == 4:
break
single_packet = []
for field in fields:
field_offset = packet_offset + int(field['offset'])
field_size = int(field['size'])
input_dataset.file_obj.seek(field_offset)
raw = input_dataset.file_obj.read(field_size)
if hasattr(self, '_unpack_{}'.format(field['type'])):
value = getattr(self, '_unpack_{}'.format(field['type']))(raw)
else:
value = struct.unpack('B', raw)[0]
single_packet.append({
'name': field['name'],
'value': value
})
packet_offset += int(fields[-1]['offset']) + int(fields[-1]['size'])
all_packets.append(single_packet)
return all_packets
| porvik/python-training-b1 | code/module_1/src/binary_reader/inp/__init__.py | __init__.py | py | 2,837 | python | en | code | 0 | github-code | 13 |
24694458376 | import pickle
a=open('Menu_A_C.dat','wb')
list_shop={'Coffee':[['Kohi Americano','100','110'],
['Tokyo Cappucino','110','120'],
['Caffe Latte','110','120']],
'Tea':[['Hakone Jasmine Tea','75','80'],
['Chamomile Tea','80','85'],
['Matcha Latte','90','100']],
'Hot Choco':[['Signature Hot Chocolate','100','120'],
['Vanilla Chocolate','110','120']],
'Iced Beverages':[['Iced Tokyo Kohi','140','150'],
['Iced Macadamia','130','140'],
['Iced Mochaccino','130','140'],
['Iced Macchiato','130','140']]}
pickle.dump(list_shop,a)
a.close()
print('Done')
o_o=open('moni.txt','w')
o_o.write('0')
o_o.close()
| KiranEaswar/Cafe-Machine | Cafe_startup.py | Cafe_startup.py | py | 840 | python | en | code | 0 | github-code | 13 |
17198907774 | import argparse
import pandas as pd
from tqdm import tqdm
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('-f', '--files', nargs='+',
help='The path to the files containing extractions \
per review and item.')
parser.add_argument('-n', '--number', type=int, default=10000,
help='The number of reviews to be considered.')
args = parser.parse_args()
return args
def measure_growth(files, number):
frames, all_sets = [], [set() for _ in range(len(files))]
for f in tqdm(files):
dat = pd.read_csv(f)
dat['ext'] = \
dat.apply(lambda x: x['modifier'] + ';' + x['aspect'], axis=1)
rev_data = dat.groupby('review_id').agg(ext=('ext', lambda x: set(x)))
rev_data = rev_data.reset_index()
frames.append(rev_data.sample(frac=1))
print('reviews,' + ','.join(files))
for i in range(number):
to_print = ''
for j, f in enumerate(frames):
if i % 1000 == 0:
to_print += ',' + str(len(all_sets[j]))
all_sets[j] = all_sets[j].union(f.iloc[i]['ext'])
if i % 1000 == 0:
print(str(i) + ',' + to_print)
def main():
args = parse_arguments()
measure_growth(**vars(args))
if __name__ == "__main__":
main()
| sampoauthors/Sampo | scripts/extraction_space_analysis.py | extraction_space_analysis.py | py | 1,366 | python | en | code | 2 | github-code | 13 |
26431722331 | from collections import deque
with open("./input.txt") as f:
hands = f.read().strip().split("\n\n")
p1 = deque([int(line.strip()) for line in hands[0].strip().splitlines()[1:]])
p2 = deque([int(line.strip()) for line in hands[1].strip().splitlines()[1:]])
while len(p1) != 0 and len(p2) != 0:
c1 = p1.popleft()
c2 = p2.popleft()
if c1 > c2:
p1 += [c1, c2]
else:
p2 += [c2, c1]
winner = p1 if len(p1) > 0 else p2
print("Answer 1:", sum([(i + 1) * c for i, c in enumerate(reversed(winner))]))
def recurse(p1, p2, level=0):
seen = set()
while len(p1) != 0 and len(p2) != 0:
if (tuple(p1), tuple(p2)) in seen:
return 1, 0
seen.add((tuple(p1), tuple(p2)))
c1 = p1.popleft()
c2 = p2.popleft()
if len(p1) >= c1 and len(p2) >= c2:
winner, _ = recurse(deque(list(p1)[:c1]), deque(list(p2)[:c2]), level=level+1)
if winner == 1:
p1 += [c1, c2]
else:
p2 += [c2, c1]
elif c1 > c2:
p1 += [c1, c2]
else:
p2 += [c2, c1]
if len(p1) > 0:
return 1, sum([(i+1)*n for i, n in enumerate(reversed(p1))])
return 2, sum([(i+1)*n for i, n in enumerate(reversed(p2))])
with open("./input.txt") as f:
hands = f.read().strip().split("\n\n")
p1 = deque([int(line.strip()) for line in hands[0].strip().splitlines()[1:]])
p2 = deque([int(line.strip()) for line in hands[1].strip().splitlines()[1:]])
print("Answer 2:", recurse(p1, p2)[1])
| korylprince/adventofcode | 2020/22/main.py | main.py | py | 1,560 | python | en | code | 1 | github-code | 13 |
16760889794 | import pandas as pd
import numpy as np
import gc
from sklearn.preprocessing import LabelEncoder
from utils import categorical_mode as mode
from lightgbm import LGBMClassifier
class prev_applications:
def preprocess(self, df):
df.replace("NXA", np.nan, inplace=True)
df.replace("XAP", np.nan, inplace=True)
df.loc[:,['DAYS_FIRST_DRAWING','DAYS_FIRST_DUE','DAYS_LAST_DUE_1ST_VERSION','DAYS_LAST_DUE','DAYS_TERMINATION']].replace(365243.0, np.nan, inplace=True)
def generate_features(self, df):
df['RATIO_AMT_APPLICATION_TO_AMT_CREDIT'] = (df['AMT_APPLICATION'] / df['AMT_CREDIT'])
df['RATIO_AMT_CREDIT_TO_AMT_ANNUITY'] = df['AMT_CREDIT'] / df['AMT_ANNUITY']
df['RATIO_AMT_APPLICATION_TO_AMT_ANNUITY'] = df['AMT_APPLICATION'] / df['AMT_ANNUITY']
df['DIFF_AMT_CREDIT_AMT_GOODS_PRICE'] = df['AMT_CREDIT'] - df['AMT_GOODS_PRICE']
df['DIFF_AMT_APPLICATION_AMT_GOODS_PRICE'] = df['AMT_APPLICATION'] - df['AMT_GOODS_PRICE']
df['DIFF_RATE_DOWN_PAYMENT_RATE_INTEREST_PRIMARY'] = df['RATE_DOWN_PAYMENT'] - df['RATE_INTEREST_PRIMARY']
df['DIFF_DAYS_LAST_DUE_DAYS_FIRST_DUE'] = df['DAYS_LAST_DUE'] - df['DAYS_FIRST_DUE']
# Collect all numerical feautures
numerical_features = [
'AMT_ANNUITY',
'AMT_APPLICATION',
'AMT_CREDIT',
'AMT_DOWN_PAYMENT',
'AMT_GOODS_PRICE',
'HOUR_APPR_PROCESS_START',
'RATE_DOWN_PAYMENT',
'RATE_INTEREST_PRIMARY',
'RATE_INTEREST_PRIVILEGED',
'SELLERPLACE_AREA',
'DAYS_FIRST_DRAWING',
'DAYS_FIRST_DUE',
'DAYS_LAST_DUE_1ST_VERSION',
'DAYS_LAST_DUE',
'DAYS_TERMINATION',
'DAYS_DECISION',
'RATIO_AMT_APPLICATION_TO_AMT_CREDIT',
'RATIO_AMT_CREDIT_TO_AMT_ANNUITY',
'RATIO_AMT_APPLICATION_TO_AMT_ANNUITY',
'DIFF_AMT_CREDIT_AMT_GOODS_PRICE',
'DIFF_AMT_APPLICATION_AMT_GOODS_PRICE',
'DIFF_RATE_DOWN_PAYMENT_RATE_INTEREST_PRIMARY',
'DIFF_DAYS_LAST_DUE_DAYS_FIRST_DUE',
]
# Collect all categorical features
categorical_features = [
'NAME_CONTRACT_TYPE',
'WEEKDAY_APPR_PROCESS_START',
'FLAG_LAST_APPL_PER_CONTRACT',
'NFLAG_LAST_APPL_IN_DAY',
'NAME_CASH_LOAN_PURPOSE',
'NAME_CONTRACT_STATUS',
'NAME_PAYMENT_TYPE',
'CODE_REJECT_REASON',
'NAME_TYPE_SUITE',
'NAME_CLIENT_TYPE',
'NAME_GOODS_CATEGORY',
'NAME_PORTFOLIO',
'NAME_PRODUCT_TYPE',
'CHANNEL_TYPE',
'NAME_SELLER_INDUSTRY',
'NAME_YIELD_GROUP',
'PRODUCT_COMBINATION',
'NFLAG_INSURED_ON_APPROVAL',
]
# Label Encode each categorical feature
if hasattr(self, "encoders") == False:
self.encoders = {}
for feature in categorical_features:
label_encoder = self.encoders.get(feature, LabelEncoder())
df[feature] = label_encoder.fit_transform(df[feature])
self.encoders[feature] = label_encoder
# Define aggregation function for each numerical variable
aggregations = {}
for feature in numerical_features:
aggregations[feature] = ['min', 'max', 'mean', 'var']
# Replace nan values with value in previous column
df.fillna(method = 'ffill', inplace = True)
# Define aggregation function for each categorical variable
for feature in categorical_features:
aggregations[feature] = [mode]
'''
df = entire table - prev_applications
aggreagates = group by id and calculate summary
'''
df = df.groupby('SK_ID_CURR').agg({**aggregations})
df.columns = pd.Index([col_name+"_"+method.upper() for col_name, method in df.columns.tolist()])
def fit(self, df):
#do preprocessing
#feature engineering
#fit model
#return self
self.preprocess(df)
self.generate_features(df)
lgb = LGBMClassifier(class_weight='balanced')
train_cols = set(df.columns) - {'TARGET'}
lgb.fit(df[train_cols], df['TARGET'])
self.lgb = lgb
return self
def predict(self, x_test):
#same preprocessing as in fit
#predict from model instead of fit
#return predictions
L1 = pd.DataFrame(index = x_test['SK_ID_CURR'].unique())
print("Pre-processing data.....")
self.preprocess(x_test)
print("Building new features.....")
self.generate_features(x_test)
print("Preparing output.....")
pred = self.lgb.predict_proba(x_test)[:, 1]
L1['LGB_PREV_APPLICATIONS'] = pred
return L1 | swarupa29/Data-Analytics-Project | Version1/prev_appl.py | prev_appl.py | py | 4,984 | python | en | code | 0 | github-code | 13 |
19436032927 | def solution(want, number, discount):
possibleDays = 0
wishCart = []
for i in range(len(want)):
wishCart.extend([want[i]] * number[i])
wishCart.sort()
for i in range(len(discount) - 9):
dList = discount[i:i+10]
dList.sort()
if wishCart == dList:
possibleDays += 1
return possibleDays | Youmi-Kim/problem-solved | 프로그래머스/2/131127. 할인 행사/할인 행사.py | 할인 행사.py | py | 374 | python | en | code | 0 | github-code | 13 |
74829038096 | from django.shortcuts import render, redirect
from .forms import RatingForm
from .models import Rating
from .filters import RatingFilter
def home(request):
if request.method == "POST":
form = RatingForm(request.POST)
if form.is_valid():
form.save(commit=True)
return redirect('search')
else:
form = RatingForm()
return render(request, 'home.html', {"form" : form})
def search(request):
template = 'search.html'
rating = Rating.objects.all()
rating_filter = RatingFilter(request.GET, queryset=rating)
context = {"rating": rating_filter}
return render(request,template, context) | abhishekzgithub/djangoFilterApp | core/views.py | views.py | py | 646 | python | en | code | 0 | github-code | 13 |
4675424577 | import time
import RPi.GPIO as GPIO
from socket import *
# Squash warnings
GPIO.setwarnings(False)
# GPIO Mode (BOARD / BCM)
GPIO.setmode(GPIO.BCM)
# GPIO Pins
GPIO_TRIGGER = 18
GPIO_ECHO = 24
# Set GPIO direciton (IN/OUT)
GPIO.setup(GPIO_TRIGGER, GPIO.OUT)
GPIO.setup(GPIO_ECHO, GPIO.IN)
def measure_distance():
# Set the trigger to high
GPIO.output(GPIO_TRIGGER, True)
# Set the trigger to low after some time
time.sleep(0.0000001)
GPIO.output(GPIO_TRIGGER, False)
# StartTime, StopTime
startTime = time.time()
stopTime = time.time()
# Save StartTime
while GPIO.input(GPIO_ECHO) == 0:
startTime = time.time()
while GPIO.input(GPIO_ECHO) == 1:
stopTime = time.time()
# return distance
return ((stopTime - startTime) * 34300) / 2
# if __name__ == '__main__':
# try:
# while True:
# distance = measure_distance()
# if distance < 10:
# print('Obstacle in front - {} cm, stopping vehicle!'.format(distance))
# time.sleep(1)
# except Exception as e:
# print('Stopped accessing ultrasonic sensor, due to following error: {}'.format(e))
# GPIO.cleanup()
| hkanumilli/Autonomous-RC-Vehicle | RC_Car/ultrasonic.py | ultrasonic.py | py | 1,237 | python | en | code | 0 | github-code | 13 |
32071352978 | #!/usr/bin/env python
import numpy as np
import pandas as pd
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import accuracy_score, confusion_matrix
from sklearn.model_selection import GridSearchCV
def load_npz(path):
data = np.load(path)
X = data["X"]
y = data["Y"]
return X, y
def quality_scores(y_true, y_pred):
accuracy = accuracy_score(y_true, y_pred)
tn, fp, fn, tp = confusion_matrix(y_true, y_pred).ravel()
sensitivity = tp / (tp + fn)
specificity = tn / (tn + fp)
return {
"accuracy": accuracy,
"sensitivity": sensitivity,
"specificity": specificity,
}
X_train, y_train = load_npz("${Xy_train}")
X_test, y_test = load_npz("${Xy_test}")
df = pd.read_csv("${FEATURES}")
selected_features = [str(x).split(",") for x in df.features]
clf = RandomForestClassifier()
param_grid = {
"n_estimators": [200, 500],
"max_features": ["auto", "log2"],
"max_depth": [4, 6, 8],
"criterion": ["gini", "entropy"],
}
scores = []
for alpha, features in zip(df.alpha, selected_features):
if features == ["nan"]:
s = {"accuracy": None, "sensitivity": None, "specificity": None}
scores.append(s)
continue
features = [int(x) for x in features]
X_train_alpha = X_train[:, features]
X_test_alpha = X_test[:, features]
cv_clf = GridSearchCV(clf, param_grid)
cv_clf.fit(X_train_alpha, y_train)
y_pred = cv_clf.predict(X_test_alpha)
score = quality_scores(y_test, y_pred)
scores.append(score)
data_out = {
"DATASET": df.DATASET,
"fold": df.fold,
"AM": df.AM,
"alpha": df.alpha,
"features": df.features,
"accuracy": [x["accuracy"] for x in scores],
"sensitivity": [x["sensitivity"] for x in scores],
"specificity": [x["specificity"] for x in scores],
}
df_out = pd.DataFrame(data_out)
df_out.to_csv("accuracy.csv", index=False)
| PeterJackNaylor/knockoff-MMD-HSIC | src/templates/rf.py | rf.py | py | 1,924 | python | en | code | 1 | github-code | 13 |
2355587497 | # 977 - Squares of Sorted Array
# https://leetcode.com/problems/squares-of-a-sorted-array/
class Solution:
# list the methods to be run against the test cases
implementations = ["get_sorted_squares"]
def get_sorted_squares(self, nums: list) -> list:
"""
Use two pointers to calculate the squares and sort in one pass
Since nums is already sorted in non-decreasing order, the only concern
we have in sorting is when there exist negative numbers in nums. By
using a pointer starting from each end of nums, we can calculate the
squares of each end number, and fill in the sorted squares list from
right to left by taking the larger square each time.
Time: O(n)
Space: O(n) (the output list)
"""
# set up pointers
n = len(nums)
left = 0
right = n - 1
# set up output list so we can add squares to it in reverse order
sorted_squares = [0] * n
# Iterate result in reverse, since by incrementing from the ends,
# we are moving in descending order (in absolute values)
# i represents the index in sorted_squares
# we will fill it from right to left, adding the largest squares first
for i in range(n - 1, -1, -1):
a, b = nums[left], nums[right]
if abs(a) > abs(b):
sorted_squares[i] = a * a
left += 1
else:
sorted_squares[i] = b * b
right -= 1
return sorted_squares
# =============================== DRIVER CODE ================================
if __name__ == "__main__":
from class_print_tests import PrintTests as PT
# enter test cases: ['description', [inputs], expected_result]
test_cases = [
["Example 1", [[1, 3, 7, 10, 12, 15]], [1, 9, 49, 100, 144, 225]],
["Example 2", [[-7, -3, 2, 3, 11]], [4, 9, 9, 49, 121]],
["Single Element - Positive", [[3]], [9]],
["Single Element - Negative", [[-3]], [9]],
["Two Elements", [[-1, 5]], [1, 25]],
]
# run test cases and print results using PrintTests class
pt = PT(Solution(), test_cases)
pt.run()
| andrewt110216/algorithms-and-data-structures | leetcode/p0977_solution.py | p0977_solution.py | py | 2,212 | python | en | code | 0 | github-code | 13 |
73860137617 | import numpy as np
import keras
from keras.models import Sequential, Model as K_Model, save_model, load_model
from keras.layers import (Input, Dense, Dropout, Flatten, Concatenate, Reshape,
GRU, LSTM, RepeatVector, Permute, concatenate, multiply, maximum, add)
from keras.optimizers import SGD, Adam
from keras.layers.normalization import BatchNormalization
from keras.callbacks import EarlyStopping, ModelCheckpoint
from keras.regularizers import l2
from keras import backend as K
from keras.utils import plot_model
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import os
import datetime
import glob
import random
from random import shuffle
import utils
from utils import Training_Data_Loader
from custom_losses_and_metrics import (selective_hinge as s_hinge,
mean_hinge_accuracy as m_hinge_acc,
logistic_loss as log_loss,
mean_logistic_loss_accuracy as m_log_acc,
selective_binary_accuracy as s_binary_acc,
normed_selective_binary_accuracy as ns_binary_acc,
average_mean_accuracy as avg_mean_acc,
first_prediction_accuracy as fp_acc)
#class skeleton for models
class Model:
def __init__(self, model_name = 'skeleton'):
self.model_name = model_name
self.set_shape()
self.create_folder()
self.set_timestamp()
def set_shape(self):
self.input_shape_tracks = (20,40,)
self.input_shape_sessions = (10,)
self.output_shape = (20,)
def create_folder(self):
self.path = 'models/' + self.model_name + '/'
if not os.path.exists(self.path):
os.makedirs(self.path)
def set_timestamp(self):
now = datetime.datetime.now()
self.now = now.strftime("%Y-%m-%d_%H:%M")
def build_model(self):
pass
def generate_train_data(self, path, validation_nr = 0, batch_size = 64,
iterations_per_file = 10):
tracks_logs = sorted(glob.glob(path + "/training_set_preproc/log_*.csv"))
del tracks_logs[validation_nr]
shuffle(tracks_logs)
n = len(tracks_logs)
idx = 0
while True:
track = tracks_logs[idx]
loader = Training_Data_Loader(track, batch_size)
random_idx = random.sample(range(loader.n_slices),
min(iterations_per_file,loader.n_slices))
idx = (idx + 1 ) % n
for k in range(len(random_idx)):
x_rnn, x_fc, y = loader.get_data(random_idx[k])
yield ({'tracks_input': x_rnn, 'session_input': x_fc},
{'output': y})
def generate_valid_data(self, path, validation_nr = 0, batch_size = 64):
tracks_logs = sorted(glob.glob(path + "/training_set_preproc/log_*.csv"))
track = tracks_logs[validation_nr]
session = os.path.dirname(track) + '/session_' + os.path.basename(track)
x_rnn, x_fc, y = utils.load_training_data_simple(track, session)
n_slices = int(x_rnn.shape[0] / batch_size)
random_idx = np.random.randint(x_rnn.shape[0], size=x_rnn.shape[0])
while True:
for k in range(n_slices):
random_idx_slices = random_idx[k*batch_size:(k+1)*batch_size]
batch_x_rnn = x_rnn[random_idx_slices,:,:]
batch_x_fc = x_fc[random_idx_slices,:]
batch_y = y[random_idx_slices,:]
yield ({'tracks_input': batch_x_rnn, 'session_input': batch_x_fc},
{'output': batch_y})
def generate_test_data(self, path):
tracks_logs = sorted(glob.glob(path + "/test_set_preproc/log_*.csv"))
n = len(tracks_logs)
idx = 0
while True:
track = tracks_logs[idx]
session = os.path.dirname(track) + '/session_' + os.path.basename(track)
x_rnn, x_fc = utils.load_test_data_simple(track, session)
idx = (idx + 1 ) % n
yield ({'tracks_input': x_rnn, 'session_input': x_fc})
def compile(self, optimizer = 'Adam', loss = 's_hinge', lr = 0.001):
optimizer_v = []
if optimizer == 'Adam':
optimizer_v = Adam(lr = lr)
elif optimizer == 'SGD':
optimizer_v = SGD(lr = lr)
if loss == 's_hinge':
self.model.compile(optimizer = optimizer_v, loss = s_hinge,
metrics=[ns_binary_acc, avg_mean_acc, fp_acc])
elif loss == 'm_hinge_acc':
self.model.compile(optimizer = optimizer_v, loss = m_hinge_acc,
metrics=[ns_binary_acc, avg_mean_acc, fp_acc])
elif loss == 'log_loss':
self.model.compile(optimizer = optimizer_v, loss = log_loss,
metrics=[ns_binary_acc, avg_mean_acc, fp_acc])
elif loss == 'm_log_acc':
self.model.compile(optimizer = optimizer_v, loss = m_log_acc,
metrics=[ns_binary_acc, avg_mean_acc, fp_acc])
else:
self.model.compile(optimizer = optimizer_v, loss = loss,
metrics=[ns_binary_acc, avg_mean_acc, fp_acc])
def fit(self, x_train_rnn, x_train_fc, y_train, x_valid_rnn = None,
x_valid_fc = None, y_valid = None, epochs=50, batch_size=64,
patience = 5, verbosity = 0):
# define callbacks
self.callbacks = [EarlyStopping(monitor='val_loss', patience=patience),
ModelCheckpoint(filepath=self.path + self.model_name + '_{epoch:02d}_{val_average_mean_accuracy:.4f}.h5',
monitor='val_loss', save_best_only=False)]
self.history = self.model.fit({'tracks_input': x_train_rnn, 'session_input': x_train_fc},
{'output': y_train},
validation_data=({'tracks_input': x_valid_rnn, 'session_input': x_valid_fc},
{'output': y_valid}),
epochs=epochs, batch_size=batch_size, callbacks = self.callbacks, verbose = verbosity,
validation_split = 0.2)
n_epochs = len(self.history.history['loss'])
print('Model trained for %u epochs' % n_epochs)
return self.history
def fit_generator(self, path, epochs=50, batch_size=64, patience = 5,
steps_per_epoch = 500, validation_steps = 100, verbosity = 0,
iterations_per_file = 50):
# define callbacks
self.callbacks = [EarlyStopping(monitor='val_loss', patience=patience),
ModelCheckpoint(filepath=self.path + self.model_name + '_{epoch:02d}_{val_average_mean_accuracy:.4f}.h5',
monitor='val_loss', save_best_only=False)]
n_files = len(glob.glob(path + "/training_set_preproc/log_*.csv"))
n = random.randint(0,n_files - 1)
self.history = self.model.fit_generator(
self.generate_train_data(path, batch_size = batch_size,
validation_nr = n, iterations_per_file = iterations_per_file),
validation_data =
self.generate_valid_data(path, batch_size = batch_size, validation_nr = n),
epochs = epochs, callbacks = self.callbacks, verbose = verbosity,
steps_per_epoch = steps_per_epoch, validation_steps = validation_steps)
n_epochs = len(self.history.history['loss'])
print('Model trained for %u epochs' % n_epochs)
return self.history
def evaluate(self, x_rnn, x_fc, y, verbosity=0):
eval = self.model.evaluate({'tracks_input': x_rnn, 'session_input': x_fc},
{'output': y}, verbose=verbosity)
for i in range(1,len(eval)):
print("%s: %.2f%%" % (self.model.metrics_names[i], (eval[i]*100)))
return eval
def predict(self, x_rnn, x_fc, verbosity=0, write_to_file = False,
overwrite = True, path = None):
y_pred = self.model.predict({'tracks_input': x_rnn, 'session_input': x_fc},
verbose=verbosity)
if write_to_file:
session_length = x_fc[:,0] * 10 + 10
if path == None:
path = '../../data/submissions'
path = path + '/' + self.model_name + '_' + self.now + '.txt'
utils.save_submission(y_pred,session_length, path, overwrite = overwrite)
def save_model(self):
path = self.path + self.model_name + '_' + self.now + '.h5'
save_model(self.model,path,overwrite=True,include_optimizer=True)
def load_model(self, name):
path = self.path + name + '.h5'
self.model = load_model(path,
custom_objects={'selective_hinge': s_hinge,
'mean_hinge_accuracy' : m_hinge_acc,
'logistic_loss' : log_loss,
'mean_logistic_loss_accuracy' : m_log_acc,
'normed_selective_binary_accuracy' : ns_binary_acc,
'average_mean_accuracy' : avg_mean_acc,
'first_prediction_accuracy' : fp_acc})
def plot_model(self):
path = self.path + self.model_name + '_' + self.now + '_architecture.png'
plot_model(self.model, to_file=path)
def print_summary(self):
print(self.model.summary())
def plot_training(self):
#Extract the metrics from the history
keys = self.history.history.keys()
for key in keys:
if key.find('val_') > -1 and key.find('val_loss') == -1:
# Plot training & validation accuracy values
plt.plot(self.history.history[key[4:]])
plt.plot(self.history.history[key])
plt.title('Model ' + key[4:])
plt.ylabel(key[4:])
plt.xlabel('Epoch')
plt.legend(['Train', 'Valid'], loc='upper left')
plot_name = (self.path + self.model_name + '_'
+ self.now + '_' + key[4:] + '.png')
plt.savefig(plot_name, bbox_inches='tight')
plt.clf()
# Plot training & validation loss values
plt.plot(self.history.history['loss'])
plt.plot(self.history.history['val_loss'])
plt.title('Model loss')
plt.ylabel('Loss')
plt.xlabel('Epoch')
plt.legend(['Train', 'Valid'], loc='upper left')
plot_name = (self.path + self.model_name + '_'
+ self.now + '_loss.png')
plt.savefig(plot_name, bbox_inches='tight')
# Hybrid Models which treat the session and the track information in parallel before merging
class Hybrid(Model):
def __init__(self, model_name = 'hybrid'):
self.model_name = model_name
self.set_shape()
self.create_folder()
self.set_timestamp()
def build_model(self,
rnn_layer_sizes = np.array([20, 20, 20]),
dense_layer_parallel_sizes = np.array([10, 20]),
dense_layer_sequential_sizes = np.array([32, 20]),
dropout_prob_rnn = 0.3,
dropout_prob_dense = 0.3,
lambda_reg_rnn = 0.001,
lambda_reg_dense = 0.001,
merge = 'multiply'):
self.rnn_layer_sizes = rnn_layer_sizes
self.dense_layer_parallel_sizes = dense_layer_parallel_sizes
self.dense_layer_sequential_sizes = dense_layer_sequential_sizes
self.dropout_prob_rnn = dropout_prob_rnn
self.dropout_prob_dense = dropout_prob_dense
self.lambda_reg_rnn = lambda_reg_rnn
self.lambda_reg_dense = lambda_reg_dense
self.merge = merge
if dense_layer_parallel_sizes[-1] != rnn_layer_sizes[-1]:
print('Dimensions of last layers of RNN and of parallel dense network must agree!')
return
# define inputs
tracks_input = Input(self.input_shape_tracks , dtype='float32', name='tracks_input')
session_input = Input(self.input_shape_sessions , dtype='float32', name='session_input')
# RNN side
x_rnn = LSTM(self.rnn_layer_sizes[0],return_sequences=True,
kernel_regularizer=l2(self.lambda_reg_rnn))(tracks_input)
for i in range(1, self.rnn_layer_sizes.size):
x_rnn = LSTM(self.rnn_layer_sizes[i],return_sequences=True,
kernel_regularizer=l2(self.lambda_reg_rnn))(x_rnn)
x_rnn = BatchNormalization()(x_rnn)
out_rnn = Dropout(self.dropout_prob_rnn)(x_rnn)
# dense side
x_fc = Dense(self.dense_layer_parallel_sizes[0], activation='relu',
kernel_regularizer=l2(self.lambda_reg_dense))(session_input)
for i in range(1, self.dense_layer_parallel_sizes.size):
x_fc = Dense(self.dense_layer_parallel_sizes[i], activation='relu',
kernel_regularizer=l2(self.lambda_reg_dense))(x_fc)
x_fc = BatchNormalization()(x_fc)
out_fc = Dropout(self.dropout_prob_dense)(x_fc)
x = []
# merge RNN and dense side
if self.merge == 'multiply':
x = multiply([out_rnn, out_fc])
elif self.merge == 'add':
out_fc = RepeatVector(20)(out_fc)
x = add([out_rnn, out_fc])
elif self.merge == 'concatenate':
out_fc = RepeatVector(20)(out_fc)
x = concatenate([out_rnn, out_fc], axis = -1)
elif self.merge == 'maximum':
out_fc = RepeatVector(20)(out_fc)
x = maximum([out_rnn, out_fc])
else:
print('Choose proper merge variation: multiply, add, concatenate or maximum')
for i in range(self.dense_layer_sequential_sizes.size - 1):
x = Dense(self.dense_layer_sequential_sizes[i], activation='relu',
kernel_regularizer=l2(self.lambda_reg_dense))(x)
x = Dense(self.dense_layer_sequential_sizes[-1], activation='linear',
kernel_regularizer=l2(self.lambda_reg_dense))(x)
output = Reshape(self.output_shape, name = 'output')(x)
# create model and compile it
self.model = K_Model(inputs=[tracks_input, session_input], outputs=[output])
# Single RNN models which merge all the information before processing it
class Single_RNN_Full(Model):
def __init__(self, model_name = 'single_rnn_full'):
self.model_name = model_name
self.set_shape()
self.create_folder()
self.set_timestamp()
def build_model(self,
rnn_layer_sizes = np.array([20, 20, 20]),
dense_layer_sequential_sizes = np.array([32, 20]),
dropout_prob_rnn = 0.3,
dropout_prob_dense = 0.3,
lambda_reg_rnn = 0.001,
lambda_reg_dense = 0.001,
multiple_concatenate = False):
self.rnn_layer_sizes = rnn_layer_sizes
self.dense_layer_sequential_sizes = dense_layer_sequential_sizes
self.dropout_prob_rnn = dropout_prob_rnn
self.dropout_prob_dense = dropout_prob_dense
self.lambda_reg_rnn = lambda_reg_rnn
self.lambda_reg_dense = lambda_reg_dense
self.multiple_concatenate = multiple_concatenate
# define inputs
tracks_input = Input(self.input_shape_tracks , dtype='float32', name='tracks_input')
session_input = Input(self.input_shape_sessions , dtype='float32', name='session_input')
# Concatenate sessions and tracks
session_rep = RepeatVector(20)(session_input)
x_input = concatenate([tracks_input, session_rep], axis = -1)
# RNN part
x = LSTM(self.rnn_layer_sizes[0],return_sequences=True,
kernel_regularizer=l2(self.lambda_reg_rnn))(x_input)
if self.multiple_concatenate:
for i in range(1, self.rnn_layer_sizes.size):
x = concatenate([x, session_rep], axis = -1)
x = LSTM(self.rnn_layer_sizes[i],return_sequences=True,
kernel_regularizer=l2(self.lambda_reg_rnn))(x)
else:
for i in range(1, self.rnn_layer_sizes.size):
x = LSTM(self.rnn_layer_sizes[i],return_sequences=True,
kernel_regularizer=l2(self.lambda_reg_rnn))(x)
x = BatchNormalization()(x)
x = Dropout(self.dropout_prob_rnn)(x)
for i in range(self.dense_layer_sequential_sizes.size - 1):
x = Dense(self.dense_layer_sequential_sizes[i], activation='relu',
kernel_regularizer=l2(self.lambda_reg_dense))(x)
x = Dense(self.dense_layer_sequential_sizes[-1], activation='linear',
kernel_regularizer=l2(self.lambda_reg_dense))(x)
output = Reshape(self.output_shape, name = 'output')(x)
# create model
self.model = K_Model(inputs=[tracks_input, session_input], outputs=[output])
| christiansprecher/sequential_skip_prediction_challenge | code/neural_nets/models.py | models.py | py | 16,331 | python | en | code | 2 | github-code | 13 |
70352949138 | class Solution:
"""
Approaches:
Approach 1: Top Down
State:
- hval
- currentHouse * (value you are changing)
Base Cases:
- if there are no houses: can't pick anything
- if there is 1 houses: pick it
- if there is 2 houess: pick the max value house
Decisions:
- pick the current house and the house two houses before
- pick the house 1 before
Pseudocode:
def rob(self, hval):
def helper(hval, i):
if i < 0: return 0
# pick_house = hval[i] + helper(hval, i - 2)
# pick_house_before = helper(hval, i - 1)
# return max(pick_house, pick_house_before)
return max(hval[i] + helper(hval,i-2), helper(hval, i-1))
return helper(hval, len(hval)-1)
Time: O(2^n), Space: O(2^n) calls on the call stack
Decision Tree
[1,2,3,1]
h(hval, 4)
/ \
h(hval, 3) h(hval, 2) + hval[4]
/ \ / \
h(hval, 2) h(hval,1) +hval[3] h(hval, 1) h(hval, 0) + hval[2]
Approach 1: Top Down with Memoization
Same exact approach, but storing the results of completed subproblems
Pseudocode:
def rob(self, hval):
memo = {}
def helper(hval, i):
if i in memo: return memo[i]
if i < 0: return 0
# pick_house = hval[i] + helper(hval, i - 2)
# pick_house_before = helper(hval, i - 1)
# return max(pick_house, pick_house_before)
memo[i] = max(hval[i] + helper(hval,i-2), helper(hval, i-1))
return memo[i]
return helper(hval, len(hval)-1)
"""
def robTopDownDP(self, hval):
def helper(hval, i):
if i < 0: return 0
elif memo[i] >= 0: return memo[i]
#res = max(helper(hval, i-2) + hval[i], helper(hval, i -1))
#memo[i] = res
#return res
memo[i] = max(helper(hval, i-2) + hval[i], helper(hval, i -1))
return memo[i]
memo = [-1 for i in range(len(hval) + 1)]
return helper(hval, len(hval) - 1)
def robTopDown(self, hval):
def helper(hval, i):
if i < 0: return 0
res = max(helper(hval, i-2) + hval[i], helper(hval, i -1))
return res
return helper(hval, len(hval) - 1)
def robBottomUp(self, hval):
if len(hval) == 0: return 0
elif len(hval) == 1: return hval[0]
elif len(hval) == 2: return max(hval[0], hval[1])
memo = [0 for i in range(len(hval))]
memo[0] = hval[0]
memo[1] = max(hval[0], hval[1])
for i in range(2, len(memo)):
memo[i] = max(hval[i] + memo[i-2], memo[i-1])
return memo[-1]
class Solution_House_Rob_2:
def rob(self, a: List[int]) -> int:
n = len(a)
if n==0:
return 0
if n==1:
return a[0]
if n==2:
return max(a[0],a[1])
if(n==3):
return max({a[0],a[1],a[2]})
dp1,dp2=[0]*n,[0]*n
dp1[0],dp1[1] = a[0],max(a[0],a[1])
for i in range(2,n-1):
dp1[i] = max(dp1[i-2]+a[i], dp1[i-1])
dp2[1],dp2[2] = a[1],max(a[1],a[2])
for i in range(3,n):
dp2[i] = max(dp2[i-2]+a[i], dp2[i-1])
return max(max(dp1), max(dp2))
| eriktoor/interview-practice | dp/houseRobber.py | houseRobber.py | py | 3,776 | python | en | code | 0 | github-code | 13 |
42340417219 | #用静态方法抓取静态网页
import scrapy
import pandas as pd
from selenium import webdriver
from lxml import etree
from scrapy.http.response.html import HtmlResponse
from scrapy.selector.unified import SelectorList
from zh.items import ZhItem
class EastmoneySpider(scrapy.Spider):
name = 'Eastmoney'
allowed_domains = ['group.eastmoney.com']
start_urls = ["http://group.eastmoney.com"]
def parse(self, response):
content = response.xpath("//div[@class='mod-center']//div[@class='mod-center-dom']//"
"div[@class='mod-center-right']//"
"div[@class='combin_rank list']//div[@class='info_ul']//"
"div[@class='data']")
na = list()
wi = list()
re = list()
for c in content:
name = c.xpath(".//li[@class='w110']//a//text()").getall()
win = c.xpath(".//li[@class='w70']//text()").getall()
revenue1 = c.xpath(".//li[@class='w80 red checked']//text()").getall()
revenue2 = c.xpath(".//li[@class='w70 red checked']//text()").getall()
url = c.xpath(".//li[@class='w110']//a//@href").getall()
if len(revenue1) != 0:
revenue = revenue1
else:
revenue = revenue2
na.append(name)
wi.append(win)
re.append(revenue)
#生成excel表格
df1 = {'姓名': na[0], '胜率': wi[0], '收益': re[0]}
sku_df1 = pd.DataFrame(df1)
sku_df1.to_excel('1日排行.xlsx', index=False)
df2 = {'姓名': na[1], '胜率': wi[1], '收益': re[1]}
sku_df2 = pd.DataFrame(df2)
sku_df2.to_excel('5日排行.xlsx', index=False)
df3 = {'姓名': na[2], '胜率': wi[2], '收益': re[2]}
sku_df3 = pd.DataFrame(df3)
sku_df3.to_excel('20日排行.xlsx', index=False)
df4 = {'姓名': na[3], '胜率': wi[3], '收益': re[3]}
sku_df4 = pd.DataFrame(df4)
sku_df4.to_excel('250日排行.xlsx', index=False)
df5 = {'姓名': na[4], '胜率': wi[4], '收益': re[4]}
sku_df5 = pd.DataFrame(df5)
sku_df5.to_excel('总排行.xlsx', index=False)
pass
def parse_item(self,response):
item = ZhItem()
yield item | Devil-zhao/Eastmoney | zh/zh/spiders/Eastmoney.py | Eastmoney.py | py | 2,331 | python | en | code | 0 | github-code | 13 |
31941539040 | from typing import List
class Solution:
def findDuplicate(self, nums: List[int]) -> int:
left, right = 1, len(nums) - 1
while left < right:
mid = (left + right) // 2
cnt = 0
for num in nums:
if num <= mid:
cnt += 1
if cnt <= mid:
left = mid + 1
else:
right = mid
return left
if __name__ == "__main__":
solu = Solution()
print(solu.findDuplicate([1, 3, 4, 2, 2]))
print(solu.findDuplicate([3, 1, 3, 4, 2]))
| wylu/leetcodecn | src/python/explore/binarysearch/exercise/寻找重复数.py | 寻找重复数.py | py | 578 | python | en | code | 3 | github-code | 13 |
9642579408 | from setuptools import find_packages, setup
package_name = 'hi_connect'
setup(
name=package_name,
version='0.0.0',
packages=find_packages(exclude=['test']),
data_files=[
('share/ament_index/resource_index/packages',
['resource/' + package_name]),
('share/' + package_name, ['package.xml']),
],
install_requires=['setuptools'],
zip_safe=True,
maintainer='hirobon',
maintainer_email='hirobon1690@gmail.com',
description='TODO: Package description',
license='TODO: License declaration',
tests_require=['pytest'],
entry_points={
'console_scripts': [
'connect = hi_connect.connect:main',
'connect_sim = hi_connect.connect_sim:main',
'connect_seiton = hi_connect.connect_seiton:main',
'connect_em = hi_connect.connect_em:main',
],
},
)
| KeioRoboticsAssociation/hi_connect | setup.py | setup.py | py | 880 | python | en | code | 1 | github-code | 13 |
70457620177 | import requests
import os.path
import cv2
import numpy as np
def get(year=2021, month=11, day=6, hour=10, minute=0):
url = "https://img1.kachelmannwetter.com/images/data/cache/wwanalyze/wwanalyze_%04d_%02d_%02d_262_%02d%02d.png" % (year,month,day,hour, minute)
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36'}
image_path = 'images/' + os.path.basename(url)
if os.path.exists(image_path) == True:
print('File exists')
else:
print('download file from ' + url)
response = requests.get(url, headers=headers)
open(image_path, "wb").write(response.content)
return image_path
def check(image_path):
img = cv2.imread(image_path)
x = 200
y = 170
w = 100
h = 100
a = ( w * h )
crop_img = img[y:y+h, x:x+w]
YELLOW_MIN = np.array([40, 235, 235], np.uint8)
YELLOW_MAX = np.array([60, 255, 255], np.uint8)
BLUE_MIN = np.array([190, 70, 0], np.uint8)
BLUE_MAX = np.array([220, 120, 40], np.uint8)
dst = cv2.inRange(crop_img, YELLOW_MIN, YELLOW_MAX)
yellow = cv2.countNonZero(dst)
dst = cv2.inRange(crop_img, BLUE_MIN, BLUE_MAX)
blue = cv2.countNonZero(dst)
if( blue/ a > 0.5 ):
print('Percentage of blue is: ' + str(blue/a*100) + '%. Delete.')
os.remove(image_path)
return
print('Percentage of yellow is: ' + str(yellow/a*100) + '%')
#show(crop_img)
def show(img):
cv2.namedWindow("opencv")
cv2.imshow("opencv",img)
cv2.waitKey(0)
exit
| codeispoetry/sea_of_fog | kachelmann.py | kachelmann.py | py | 1,591 | python | en | code | 0 | github-code | 13 |
9333213375 | import json
import re
import pandas as pd
import plotly.offline as py
import plotly.graph_objs as go
import matplotlib.pyplot as plt
from wordcloud import WordCloud
from Tweet import Tweet
def parse_json():
list_all_tweet = []
with open('../resources/EGC_tweets.json', 'r', encoding="utf8") as f:
tweet_dict = json.load(f)
for t in tweet_dict:
text = t['text']
fav = t['favorite_count']
rt = t['retweet_count']
date_tweet = str(t['created_at']).replace('+0000', '')
date_tweet = pd.to_datetime(date_tweet, format='%c')
tweet = Tweet(text,fav,rt,date_tweet)
list_all_tweet.append(tweet)
return list_all_tweet
def parse_json_no_RT():
list_all_tweet = []
with open('../resources/EGC_tweets.json', 'r', encoding="utf8") as f:
tweet_dict = json.load(f)
for t in tweet_dict:
if 'retweeted_status' not in t:
text = t['text']
fav = t['favorite_count']
rt = t['retweet_count']
date_tweet = str(t['created_at']).replace('+0000', '')
date_tweet = pd.to_datetime(date_tweet, format='%c')
tweet = Tweet(text,fav,rt,date_tweet)
list_all_tweet.append(tweet)
return list_all_tweet
def get_RT_Tweets():
list_all_rt_tweet = []
with open('../resources/EGC_tweets.json', 'r', encoding="utf8") as f:
tweet_dict = json.load(f)
for t in tweet_dict:
if 'retweeted_status' in t:
list_all_rt_tweet.append(t['text'])
return len(list_all_rt_tweet)
def get_most_rt_fav_tweets(listTweets):
list_rt_sorted = sorted(listTweets, key=lambda tweet: tweet[2], reverse=True)
list_fav_sorted = sorted(listTweets, key=lambda tweet: tweet[1], reverse=True)
with open('../out/most_rt_and_liked.txt', 'w') as tweet_file:
tweet_file.write('Most rt tweet : ' + str(list_rt_sorted[0]))
tweet_file.write('Most fav tweet : ' + str(list_fav_sorted[0]))
def get_Most_Quoted_Person():
listmentions = []
list_user = []
#myre = re.compile('тЭД|ЁЯез')
with open('../resources/EGC_tweets.json', 'r', encoding="utf8") as f:
tweet_dict = json.load(f)
for t in tweet_dict:
if t['in_reply_to_status_id'] is None and t['entities']['user_mentions'] and 'retweeted_status' not in t:
listmentions.append(t['entities']['user_mentions'])
#print(t)
for userm in listmentions:
for e in userm:
#word = myre.sub('', str(e))
#word = e['name'].encode('ascii', 'ignore').decode('ascii')
list_user.append(str(e['name']))
unique_words = set(list_user)
pd.Series(list_user).value_counts().head(20).plot.bar(
figsize=(14, 7), fontsize=16, color='lightcoral'
)
plt.gca().set_title(' top quoted user', fontsize=20)
plt.gca().set_xticklabels(plt.gca().get_xticklabels(), rotation=45, ha='right', fontsize=16)
#plt.show()
plt.savefig('../out/top_quoted_user.png')
def get_Most_RT_Person():
listmentions = []
list_user = []
with open('../resources/EGC_tweets.json', 'r', encoding="utf8") as f:
tweet_dict = json.load(f)
for t in tweet_dict:
if t['in_reply_to_status_id'] is None and t['entities']['user_mentions'] and 'retweeted_status' in t:
listmentions.append(t['entities']['user_mentions'])
for userm in listmentions:
for e in userm:
if e['name'] != "Association EGC":
#print(e['name'])
list_user.append(str(e['name']))
unique_words = set(list_user)
pd.Series(list_user).value_counts().head(20).plot.bar(
figsize=(14, 7), fontsize=16, color='lightcoral'
)
plt.gca().set_title(' top retweeted user', fontsize=20)
plt.gca().set_xticklabels(plt.gca().get_xticklabels(), rotation=45, ha='right', fontsize=16)
#plt.show()
plt.savefig('../out/top_rt_user.png')
def get_tweet_activity(listTweets):
#print(listTweets)
list_time = []
for t in listTweets:
list_time.append(t[3])
trace = go.Histogram(
x=list_time,
marker=dict(
color='blue'
),
opacity=0.75
)
layout = go.Layout(
title='Tweet Activity',
height=450,
width=1200,
xaxis=dict(
title='Month and year'
),
yaxis=dict(
title='Tweet Quantity'
),
bargap=0.2,
)
data = [trace]
fig = go.Figure(data=data, layout=layout)
py.offline.plot(fig) | TaoGrolleau/Bigdata-project | src/Tweet_Analyser.py | Tweet_Analyser.py | py | 4,772 | python | en | code | 0 | github-code | 13 |
35962564491 | import os
import sys
import joblib
import numpy as np
import torch
from copy import deepcopy
import gym
from .wrappers import ObjectiveWrapper, ObservationDictWrapper, WarpFrame, get_objective_wrapper
from .crafter_wrappers import CrafterRenderWrapper, ImageToPyTorch, CrafterMonitorWrapper
CRAFTER_KWARGS = dict(
size=(84, 84),
render_centering=False,
health_reward_coef=0.0,
immortal=True,
idle_death=100
)
CRAFTER_ORIGINAL_KWARGS = dict(
size=(84, 84),
render_centering=False,
vanila=True
)
MINIGRID_KWARGS = dict(
default=dict(achievement_reward=True),
keycorridor=dict(room_size=5, num_rows=3),
distractions_hard=dict(room_size=5, num_rows=9, num_nodes=15),
distractions=dict(room_size=5, num_rows=5, num_nodes=8),
distractions_easy=dict(room_size=5, num_rows=2, num_nodes=4, max_steps=300)
)
ENV_KWARGS = dict(
crafter=CRAFTER_KWARGS,
minigrid=MINIGRID_KWARGS,
)
def get_env(flags):
return flags.env.split('-')[0]
def make_env(env_name, kwargs={}, flags=None, dummy_env=False):
kwargs = deepcopy(kwargs)
env_names = env_name.split('-')
base_env = env_names[0]
is_crafter = base_env == 'crafter'
is_minigrid = base_env == 'minigrid'
env = None
if is_crafter:
from crafter.env import Env as CrafterEnv
env_cls = CrafterEnv
elif is_minigrid:
import gym_minigrid.envs
from gym_minigrid.wrappers import RGBImgPartialObsWrapper, ImgObsWrapper
MINIGRID_ENVS = dict(
keycorridor=gym_minigrid.envs.KeyCorridor,
blockedunlockpickup=gym_minigrid.envs.BlockedUnlockPickup,
distractions=gym_minigrid.envs.Distractions
)
env_cls = MINIGRID_ENVS[env_names[1]]
else:
raise NotImplementedError(f'Unrecognized env: {base_env}')
def get_key(key, default=None):
if key in kwargs:
return kwargs.pop(key)
elif flags is not None:
return flags.get(key, default)
else:
return default
env_id = get_key("env_id")
crafter_monitor = get_key("use_crafter_monitor", False)
if len(env_names) == 1:
env_kwargs = deepcopy(ENV_KWARGS[base_env])
else:
env_kwargs = deepcopy(ENV_KWARGS[base_env].get('default', {}))
if is_minigrid and env_names[1] == 'distractions':
if get_key("distractions_hard", False):
env_kwargs.update(ENV_KWARGS[base_env].get("distractions_hard", {}))
elif get_key("distractions_easy", False):
env_kwargs.update(ENV_KWARGS[base_env].get("distractions_easy", {}))
else:
env_kwargs.update(ENV_KWARGS[base_env].get(env_names[1], {}))
if is_crafter:
if get_key("crafter_original", False):
env_kwargs = deepcopy(CRAFTER_ORIGINAL_KWARGS)
if get_key("crafter_limited", False):
env_kwargs["partial_achievements"] = "limited"
env_kwargs["idle_death"] = 500
num_objectives = get_key("num_objectives")
objective_selection_algo = get_key("objective_selection_algo")
causal_graph_load_path = get_key("causal_graph_load_path")
include_new_tasks = get_key("include_new_tasks", True)
done_if_reward = get_key("done_if_reward", False)
graph_no_jumping = get_key("graph_no_jumping", False)
env_kwargs.update(kwargs)
if env is None:
env = env_cls(**env_kwargs)
if is_crafter:
env = CrafterRenderWrapper(env)
if crafter_monitor and env_id is not None:
save_dir = get_key("savedir") + "/crafter_monitor"
os.makedirs(save_dir, exist_ok=True)
env = CrafterMonitorWrapper(env, env_id, save_dir, save_freq=30)
if is_minigrid:
env = RGBImgPartialObsWrapper(env)
env = ImgObsWrapper(env)
env = WarpFrame(env, grayscale=False)
if is_crafter or is_minigrid:
env = ImageToPyTorch(env)
env = ObservationDictWrapper(env, 'frame')
if num_objectives is not None:
if objective_selection_algo == 'random':
selection = ('random', {})
elif causal_graph_load_path is not None:
graph = joblib.load(causal_graph_load_path)
selection = ('graph', {'graph': graph, 'no_jumping': graph_no_jumping})
else:
selection = ('random', {})
env = get_objective_wrapper(selection[0], selection[1], env, num_objectives, include_new_tasks=include_new_tasks, done_if_reward=done_if_reward)
return env
| pairlab/iclr-23-sea | torchbeast/env/env.py | env.py | py | 4,535 | python | en | code | 4 | github-code | 13 |
15016747598 | class Ass2:
def __init__(self,string):
self.s = string
def reverse(self):
j1 = self.s.split()
j2 = j1[::-1]
j3 = " ".join(j2)
return j3
a = Ass2("I love my india")
print(a.reverse())
| harshtheking100/Python | OOP/Ass2.py | Ass2.py | py | 251 | python | en | code | 0 | github-code | 13 |
15190389790 | #!/usr/bin/env python3.4
# coding: latin-1
# (c) Massachusetts Institute of Technology 2015-2018
# (c) Brian Teague 2018-2019
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
Color Translation
-----------------
Translate measurements from one color's scale to another, using a two-color
or three-color control.
To use, set up the **Controls** list with the channels to convert and the FCS
files to compute the mapping. Click **Estimate** and make sure to check that
the diagnostic plots look good.
.. object:: Add Control, Remove Control
Add and remove controls to compute the channel mappings.
.. object:: Use mixture model?
If ``True``, try to model the **from** channel as a mixture of expressing
cells and non-expressing cells (as you would get with a transient
transfection), then weight the regression by the probability that the
the cell is from the top (transfected) distribution. Make sure you
check the diagnostic plots to see that this worked!
.. note::
You cannot have any operations before this one which estimate model
parameters based on experimental conditions. (Eg, you can't use a
**Density Gate** to choose morphological parameters and set *by* to an
experimental condition.) If you need this functionality, you can access it
using the Python module interface.
.. plot::
import cytoflow as flow
import_op = flow.ImportOp()
import_op.tubes = [flow.Tube(file = "tasbe/mkate.fcs")]
ex = import_op.apply()
color_op = flow.ColorTranslationOp()
color_op.controls = {("Pacific Blue-A", "FITC-A") : "tasbe/rby.fcs",
("PE-Tx-Red-YG-A", "FITC-A") : "tasbe/rby.fcs"}
color_op.mixture_model = True
color_op.estimate(ex)
color_op.default_view().plot(ex)
ex = color_op.apply(ex)
'''
import warnings
from traitsui.api import View, Item, EnumEditor, Controller, VGroup, \
ButtonEditor, HGroup, InstanceEditor
from envisage.api import Plugin, contributes_to
from traits.api import (provides, Callable, Tuple, List, Str, HasTraits,
File, Event, Dict, on_trait_change, Bool, Constant,
Property)
from pyface.api import ImageResource
import cytoflow.utility as util
from cytoflow.operations.color_translation import ColorTranslationOp, ColorTranslationDiagnostic
from cytoflow.views.i_selectionview import IView
from cytoflowgui.view_plugins.i_view_plugin import ViewHandlerMixin, PluginViewMixin
from cytoflowgui.op_plugins import IOperationPlugin, OpHandlerMixin, OP_PLUGIN_EXT, shared_op_traits
from cytoflowgui.subset import ISubset, SubsetListEditor
from cytoflowgui.color_text_editor import ColorTextEditor
from cytoflowgui.op_plugins.i_op_plugin import PluginOpMixin, PluginHelpMixin
from cytoflowgui.vertical_list_editor import VerticalListEditor
from cytoflowgui.workflow import Changed
from cytoflowgui.serialization import camel_registry, traits_repr, traits_str, dedent
ColorTranslationOp.__repr__ = traits_repr
class _Control(HasTraits):
from_channel = Str
to_channel = Str
file = File
def __repr__(self):
return traits_repr(self)
class ColorTranslationHandler(OpHandlerMixin, Controller):
add_control = Event
remove_control = Event
# MAGIC: called when add_control is set
def _add_control_fired(self):
self.model.controls_list.append(_Control())
def _remove_control_fired(self):
if self.model.controls_list:
self.model.controls_list.pop()
def control_traits_view(self):
return View(HGroup(Item('from_channel',
editor = EnumEditor(name = 'handler.context.previous_wi.channels')),
Item('to_channel',
editor = EnumEditor(name = 'handler.context.previous_wi.channels')),
Item('file',
show_label = False)),
handler = self)
def default_traits_view(self):
return View(VGroup(Item('controls_list',
editor = VerticalListEditor(editor = InstanceEditor(view = self.control_traits_view()),
style = 'custom',
mutable = False),
style = 'custom'),
Item('handler.add_control',
editor = ButtonEditor(value = True,
label = "Add a control")),
Item('handler.remove_control',
editor = ButtonEditor(value = True,
label = "Remove a control")),
label = "Controls",
show_labels = False),
Item('mixture_model',
label = "Use mixture\nmodel?"),
VGroup(Item('subset_list',
show_label = False,
editor = SubsetListEditor(conditions = "context.previous_wi.conditions",
metadata = "context.previous_wi.metadata",
when = "'experiment' not in vars() or not experiment")),
label = "Subset",
show_border = False,
show_labels = False),
Item('do_estimate',
editor = ButtonEditor(value = True,
label = "Estimate!"),
show_label = False),
shared_op_traits)
class ColorTranslationPluginOp(PluginOpMixin, ColorTranslationOp):
handler_factory = Callable(ColorTranslationHandler)
add_control = Event
remove_control = Event
controls = Dict(Tuple(Str, Str), File, transient = True)
controls_list = List(_Control, estimate = True)
mixture_model = Bool(False, estimate = True)
translation = Constant(None)
@on_trait_change('controls_list_items, controls_list:+', post_init = True)
def _controls_changed(self):
self.changed = (Changed.ESTIMATE, ('controls_list', self.controls_list))
# bits to support the subset editor
subset_list = List(ISubset, estimate = True)
subset = Property(Str, depends_on = "subset_list.str")
# MAGIC - returns the value of the "subset" Property, above
def _get_subset(self):
return " and ".join([subset.str for subset in self.subset_list if subset.str])
@on_trait_change('subset_list.str')
def _subset_changed(self, obj, name, old, new):
self.changed = (Changed.ESTIMATE, ('subset_list', self.subset_list))
def default_view(self, **kwargs):
return ColorTranslationPluginView(op = self, **kwargs)
def estimate(self, experiment):
for i, control_i in enumerate(self.controls_list):
for j, control_j in enumerate(self.controls_list):
if control_i.from_channel == control_j.from_channel and i != j:
raise util.CytoflowOpError("Channel {0} is included more than once"
.format(control_i.from_channel))
# check for experiment metadata used to estimate operations in the
# history, and bail if we find any
for op in experiment.history:
if hasattr(op, 'by'):
for by in op.by:
if 'experiment' in experiment.metadata[by]:
raise util.CytoflowOpError('experiment',
"Prior to applying this operation, "
"you must not apply any operation with 'by' "
"set to an experimental condition.")
self.controls = {}
for control in self.controls_list:
self.controls[(control.from_channel, control.to_channel)] = control.file
if not self.subset:
warnings.warn("Are you sure you don't want to specify a subset "
"used to estimate the model?",
util.CytoflowOpWarning)
try:
ColorTranslationOp.estimate(self, experiment, subset = self.subset)
except:
raise
finally:
self.changed = (Changed.ESTIMATE_RESULT, self)
def should_clear_estimate(self, changed, payload):
if changed == Changed.ESTIMATE:
return True
return False
def clear_estimate(self):
self._coefficients.clear()
self._trans_fn.clear()
self._sample.clear()
self.changed = (Changed.ESTIMATE_RESULT, self)
def get_notebook_code(self, idx):
op = ColorTranslationOp()
op.copy_traits(self, op.copyable_trait_names())
for control in self.controls_list:
op.controls[(control.from_channel, control.to_channel)] = control.file
return dedent("""
op_{idx} = {repr}
op_{idx}.estimate(ex_{prev_idx}{subset})
ex_{idx} = op_{idx}.apply(ex_{prev_idx})
"""
.format(repr = repr(op),
idx = idx,
prev_idx = idx - 1,
subset = ", subset = " + repr(self.subset) if self.subset else ""))
class ColorTranslationViewHandler(ViewHandlerMixin, Controller):
def default_traits_view(self):
return View(Item('context.view_warning',
resizable = True,
visible_when = 'context.view_warning',
editor = ColorTextEditor(foreground_color = "#000000",
background_color = "#ffff99")),
Item('context.view_error',
resizable = True,
visible_when = 'context.view_error',
editor = ColorTextEditor(foreground_color = "#000000",
background_color = "#ff9191")))
@provides(IView)
class ColorTranslationPluginView(PluginViewMixin, ColorTranslationDiagnostic):
handler_factory = Callable(ColorTranslationViewHandler)
def plot_wi(self, wi):
self.plot(wi.previous_wi.result)
def should_plot(self, changed, payload):
if changed == Changed.ESTIMATE_RESULT:
return True
return False
def get_notebook_code(self, idx):
view = ColorTranslationDiagnostic()
view.copy_traits(self, view.copyable_trait_names())
view.subset = self.subset
return dedent("""
op_{idx}.default_view({traits}).plot(ex_{prev_idx})
"""
.format(traits = traits_str(view),
idx = idx,
prev_idx = idx - 1))
@provides(IOperationPlugin)
class ColorTranslationPlugin(Plugin, PluginHelpMixin):
id = 'edu.mit.synbio.cytoflowgui.op_plugins.color_translation'
operation_id = 'edu.mit.synbio.cytoflow.operations.color_translation'
short_name = "Color Translation"
menu_group = "Gates"
def get_operation(self):
return ColorTranslationPluginOp()
def get_icon(self):
return ImageResource('color_translation')
@contributes_to(OP_PLUGIN_EXT)
def get_plugin(self):
return self
### Serialization
@camel_registry.dumper(ColorTranslationPluginOp, 'color-translation', version = 1)
def _dump(op):
return dict(controls_list = op.controls_list,
mixture_model = op.mixture_model,
subset_list = op.subset_list)
@camel_registry.loader('color-translation', version = 1)
def _load(data, version):
return ColorTranslationPluginOp(**data)
@camel_registry.dumper(_Control, 'color-translation-control', version = 1)
def _dump_control(c):
return dict(from_channel = c.from_channel,
to_channel = c.to_channel,
file = c.file)
@camel_registry.loader('color-translation-control', version = 1)
def _load_control(data, version):
return _Control(**data)
@camel_registry.dumper(ColorTranslationPluginView, 'color-translation-view', version = 1)
def _dump_view(view):
return dict(op = view.op)
@camel_registry.loader('color-translation-view', version = 1)
def _load_view(data, ver):
return ColorTranslationPluginView(**data)
| kkiwimagi/cytoflow-microscopy-kiwi | cytoflowgui/op_plugins/color_translation.py | color_translation.py | py | 13,411 | python | en | code | 0 | github-code | 13 |
17128026268 | from csv import DictReader
from actores.models import Organizacion, Institucion
import logging
consola = logging.getLogger("consola")
def run(file_path):
reader = DictReader(file(file_path), delimiter=',')
for i , linea in enumerate(reader, 1):
procesarLinea(i,linea)
def procesarLinea(num,linea):
consola.info("Procesando Linea "+str(num)+": "+str(linea))
linea_institucion = linea['institucion'].strip()
linea_organizacion = linea['organizacion'].strip()
linea_calle = linea['calle'].strip()
linea_numero = linea['numero'].strip()
linea_piso = linea['piso'].strip()
linea_depto = linea['depto'].strip()
linea_cp = linea['cp'].strip()
linea_localidad = linea['localidad'].strip()
linea_provincia = linea['provincia'].strip()
linea_lon = linea['lon'].strip()
linea_lat = linea['lat'].strip()
organizacion = None
if linea_organizacion != '':
organizacion,_ = Organizacion.objects.get_or_create(nombre=linea_organizacion)
else:
organizacion,_ = Organizacion.objects.get_or_create(nombre="Sin Datos")
institucion,_ = Institucion.objects.get_or_create(nombre=linea_institucion, ubicacion='POINT(%s %s)' % (linea_lon,linea_lat))
institucion.calle = linea_calle
institucion.numero = linea_numero
institucion.piso = linea_piso
institucion.depto = linea_depto
institucion.cp = linea_cp
institucion.localidad = linea_localidad
institucion.provincia = linea_provincia
#institucion.ubicacion = 'POINT(%s %s)' % (linea_lon,linea_lat)
institucion.organizacion = organizacion
institucion.save()
return institucion
| martinfr/ara | ara/scripts/cargar_instituciones.py | cargar_instituciones.py | py | 1,653 | python | es | code | 0 | github-code | 13 |
31942625800 | from typing import List
# @lc code=start
class Solution:
def closestCost(self, baseCosts: List[int], toppingCosts: List[int],
target: int) -> int:
ans, m = min(baseCosts), len(toppingCosts)
def dfs(c: int, i: int) -> None:
nonlocal ans
if c - target > abs(ans - target):
return
if abs(c - target) == abs(ans - target):
ans = min(ans, c)
elif abs(c - target) < abs(ans - target):
ans = c
if i == m:
return
dfs(c, i + 1)
dfs(c + toppingCosts[i], i + 1)
dfs(c + toppingCosts[i] * 2, i + 1)
for base in baseCosts:
dfs(base, 0)
return ans
# @lc code=end
if __name__ == '__main__':
solu = Solution()
baseCosts = [1, 7]
toppingCosts = [3, 4]
target = 10
print(solu.closestCost(baseCosts, toppingCosts, target))
baseCosts = [2, 3]
toppingCosts = [4, 5, 100]
target = 18
print(solu.closestCost(baseCosts, toppingCosts, target))
baseCosts = [10]
toppingCosts = [1]
target = 1
print(solu.closestCost(baseCosts, toppingCosts, target))
| wylu/leetcodecn | src/python/p1700to1799/1774.最接近目标价格的甜点成本.py | 1774.最接近目标价格的甜点成本.py | py | 1,216 | python | zh | code | 3 | github-code | 13 |
14323098709 | from django.db import models
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from kpbt.accounts.models import BowlerProfile
from kpbt.centers.models import BowlingCenter
from kpbt.teams.models import Team, TeamRoster
from kpbt.games.models import Series
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.core.files import File
from kptracker.settings import SCHEDULEFILES_FOLDER as SCHEDULEDIR
from kptracker.settings import BACKUPS_FOLDER as BACKUPSDIR
from collections import deque
from itertools import islice
from dateutil import rrule
import datetime
import itertools
from num2words import num2words
import math, json
class League(models.Model):
bowling_center = models.ForeignKey('BowlingCenter', on_delete=models.SET_NULL, null=True,
related_name='leagues', verbose_name=('bowling center'))
bowlers = models.ManyToManyField('BowlerProfile', through='LeagueBowler')
secretary = models.OneToOneField(User, on_delete=models.SET_NULL, null=True)
name = models.CharField(max_length=32)
current_week = models.PositiveSmallIntegerField(default=1)
week_pointer = models.PositiveSmallIntegerField(default=1)
def __str__(self):
return self.bowling_center.name + ", " + self.name
def set_center(self, center_name):
center = get_object_or_404(BowlingCenter, name=center_name)
self.bowling_center = center
def set_name(self, name):
self.name = name
def set_secretary(self, user):
self.secretary = user
self.save()
def create_pairings(self):
num_teams = self.leaguerules.num_teams
num_weeks = self.schedule.num_weeks # // 2
if num_teams % 2:
num_teams += 1
filename = str(num_teams) + 'teams'
filedir = SCHEDULEDIR + filename + '.csv'
pairings = [None] * num_weeks
with open(filedir) as schedule:
schedule.readline() #skip first line to allow week number to align with list index
raw_weekly_pairings = schedule.readlines()
week_number_counter=1
for raw_pairings in raw_weekly_pairings:
weekly_pairing_list = raw_pairings.strip('\n').split(',')
pair_counter = 1
for pair in weekly_pairing_list:
teams = pair.split('-')
team_one = Team.objects.get(league=self, number=teams[0])
team_two = Team.objects.get(league=self, number=teams[1])
new_pairing = WeeklyPairings.objects.create(league=self, team_one=team_one, team_two=team_two, week_number=week_number_counter, lane_pair = math.ceil(pair_counter))
new_pairing.save()
pair_counter +=1
week_number_counter += 1
def rescore(self, rescore_week):
cw = self.current_week
rules = self.leaguerules
#1. Reload league from earlier backup to prepare for rescoring effort
reset_week = rescore_week - 1
print('reset week: ', reset_week)
self.reset_weekly_from_backup(reset_week)
#2. For ever week between reset_week and current week
# a. Reset series team points values to 0
# b. recalculate the applied average/handicap for that series
# c. Call score_league for that week after resetting
for i in range (rescore_week, cw):
series_data = Series.objects.filter(league=self, week_number=i)
#Delete week's results records
WeeklyResults.objects.filter(league=self, week_number=i).delete()
for series in series_data:
#Recalculate league_average and handicap values
lb = get_object_or_404(LeagueBowler, league=self, bowler=series.bowler.id)
if self.week_pointer == 1:
average = lb.league_average
else:
average = lb.calc_average()
if rules.is_handicap:
handicap = (rules.handicap_percentage / 100) * (rules.handicap_scratch - average)
if handicap < 0:
handicap = 0
else:
handicap = 0
series.applied_average = average
series.applied_handicap = handicap
series.save()
self.score_week(i)
def reset_weekly_from_backup(self, reset_week):
backup_filename= str(self.id) + '_' + str(reset_week) + '.json'
with open(BACKUPSDIR + backup_filename, 'r') as bkup:
backup = json.load(bkup)
for teams in backup['teams'].items():
team = get_object_or_404(Team, league=self, number=teams[0])
for key,value in teams[1].items():
setattr(team, key, value)
team.save()
for lb_records in backup['league_bowler_records'].items():
lb_record = get_object_or_404(LeagueBowler, league=self, id=lb_records[0])
for key, value in lb_records[1].items():
setattr(lb_record, key, value)
lb_record.save()
for tr_records in backup['team_roster_records'].items():
tr = get_object_or_404(TeamRoster, id=tr_records[0])
for key, value in tr_records[1].items():
setattr(tr, key, value)
tr.save()
def score_week(self, week_number):
this_week = WeeklyPairings.objects.filter(league=self, week_number=week_number).order_by('lane_pair')
rules = self.leaguerules
for pair in this_week:
team_one = pair.team_one
team_two = pair.team_two
results_one = WeeklyResults.objects.create(league=self, team=team_one, week_number=week_number, lane_pair=pair.lane_pair, opponent=team_two)
results_two = WeeklyResults.objects.create(league=self, team=team_two, week_number=week_number, lane_pair=pair.lane_pair, opponent=team_one)
results_one.opponent = team_two
results_two.opponent = team_one
team_one_series = Series.objects.filter(league=self, team=team_one, week_number=week_number)
team_two_series = Series.objects.filter(league=self, team=team_two, week_number=week_number)
game_points = rules.game_point_value
series_points = rules.series_point_value
weekly_points = rules.total_weekly_points()
team_one_total_series = 0
team_two_total_series = 0
team_one_points = 0
team_two_points = 0
for i in range(1, 4): #Games number 1-3
game = 'g' + str(i)
t1_hc_score = Series.calc_team_handicap_game_score(team_one, week_number, i, team_one_series)
setattr(results_one, game, t1_hc_score)
team_one_total_series += t1_hc_score
t2_hc_score = Series.calc_team_handicap_game_score(team_two, week_number, i, team_two_series)
setattr(results_two, game, t2_hc_score)
team_two_total_series += t2_hc_score
if t1_hc_score > t2_hc_score:
team_one_points += game_points
elif t1_hc_score < t2_hc_score:
team_two_points += game_points
else:
team_one_points += game_points / 2
team_two_points += game_points / 2
results_one.series = team_one_total_series
results_two.series = team_two_total_series
if team_one_total_series > team_two_total_series:
team_one_points += series_points
elif team_one_total_series < team_two_total_series:
team_two_points += series_points
results_one.points_won = team_one_points
results_one.points_lost = weekly_points - team_one_points
results_two.points_won = team_two_points
results_two.points_lost = weekly_points - team_two_points
results_one.save()
results_two.save()
team_one.update_points(team_one_points, weekly_points - team_one_points)
team_two.update_points(team_two_points, weekly_points - team_two_points)
team_one.update_team_pinfall(team_one_series)
team_two.update_team_pinfall(team_two_series)
for series1 in team_one_series:
lb_record = get_object_or_404(LeagueBowler, league=self, bowler= series1.bowler)
lb_record.update(series1)
tr_record = get_object_or_404(TeamRoster, bowler=series1.bowler, team=series1.team)
tr_record.update_games(series1)
for series2 in team_two_series:
lb_record = get_object_or_404(LeagueBowler, league=self, bowler= series2.bowler)
lb_record.update(series2)
tr_record = get_object_or_404(TeamRoster, bowler=series2.bowler, team=series2.team)
tr_record.update_games(series2)
def create_weekly_score_backup(self, week_number):
#week_number = self.week_pointer
backup_filename= str(self.id) + '_' + str(week_number) + '.json'
backup = open(BACKUPSDIR + backup_filename, 'w')
backup_dict = {}
#Backups file header information
header_dict = { "league_name": self.name , "week" : str(week_number) }
backup_dict.update( {"header" : header_dict})
#Teams Backup Info
teams_set = self.teams.all()
teams = {}
for team in teams_set:
team_dict = {team.id : {"total_scratch_pins" : team.total_scratch_pins, "total_handicap_pins": team.total_handicap_pins, "total_pinfall" : team.total_pinfall, "team_points_won" : team.team_points_won, "team_points_lost" : team.team_points_lost}}
teams.update(team_dict)
backup_dict.update({"teams" : teams})
#LeagueBowler/TeamRoster Backups
lb_records = LeagueBowler.objects.filter(league=self)
tr_records = TeamRoster.objects.filter(team_id__in=teams)
team_rosters_dict = {}
lb_records_dict = {}
for lb in lb_records:
lb_dict = {lb.id : {"league_average" : lb.league_average, "games_bowled" : lb.games_bowled, "league_total_scratch" : lb.league_total_scratch, "league_total_handicap" : lb.league_total_handicap, "league_high_scratch_game" : lb.league_high_scratch_game, "league_high_handicap_game" : lb.league_high_handicap_game, "league_high_scratch_series" : lb.league_high_scratch_series, "league_high_handicap_series" : lb.league_high_handicap_series} }
lb_records_dict.update(lb_dict)
backup_dict.update({"league_bowler_records" : lb_records_dict})
for tr in tr_records:
tr_dict = {tr.id : { "games_with_team" : tr.games_with_team }}
team_rosters_dict.update(tr_dict)
backup_dict.update({"team_roster_records" : team_rosters_dict})
json.dump(backup_dict, backup, indent=4)
with open(BACKUPSDIR + backup_filename) as backup:
data = json.load(backup)
backup.close()
def advance_week(self):
self.current_week += 1
self.week_pointer = self.current_week
def set_week_pointer(self, week_selection):
self.week_pointer = week_selection
class LeagueRules(models.Model):
league = models.OneToOneField(League, on_delete=models.CASCADE)
DESIGNATION = (
('A', 'Adult'),
('S', 'Senior'),
('J', 'Junior'),
('N', 'Any'),
)
GENDER = (
('M', 'Men'),
('W', 'Women'),
('X', 'Mixed'),
)
num_teams = models.PositiveSmallIntegerField()
designation = models.CharField(max_length=1, choices=DESIGNATION)
gender = models.CharField(max_length=1, choices=GENDER)
playing_strength = models.PositiveSmallIntegerField(default=1)
max_roster_size = models.PositiveSmallIntegerField(default=9)
entering_average = models.PositiveSmallIntegerField(default=0)
is_handicap = models.BooleanField(default=False)
handicap_scratch = models.PositiveSmallIntegerField(default=0)
handicap_percentage = models.PositiveSmallIntegerField(default=0)
bye_team_point_threshold = models.PositiveSmallIntegerField(default=0)
absentee_score = models.PositiveSmallIntegerField(default=0)
game_point_value = models.PositiveSmallIntegerField(default=0)
series_point_value = models.PositiveSmallIntegerField(default=0)
def total_weekly_points(self):
return (3 * self.game_point_value) + self.series_point_value
class LeagueBowler(models.Model):
bowler = models.ForeignKey(BowlerProfile, on_delete=models.CASCADE)
league = models.ForeignKey(League, on_delete=models.CASCADE)
games_bowled = models.PositiveSmallIntegerField(default=0)
league_average = models.PositiveSmallIntegerField(default=0)
league_high_scratch_game = models.PositiveSmallIntegerField(default=0)
league_high_handicap_game = models.PositiveSmallIntegerField(default=0)
league_high_scratch_series = models.PositiveSmallIntegerField(default=0)
league_high_handicap_series = models.PositiveSmallIntegerField(default=0)
league_total_scratch = models.PositiveSmallIntegerField(default=0)
league_total_handicap = models.PositiveSmallIntegerField(default=0)
def __str__(self):
return self.bowler.get_name()
def update(self, series):
series_scratch_score = 0
series_handicap_score = 0
games_played_counter = 0
handicap = series.applied_handicap
average = series.applied_average
#if not scores:
#scores = Series.objects.filter(league=self.league, bowler=self.bowler, week_number=self.league.week_pointer)
scores = series.get_scores_list()
for score in scores:
if score[0] == 'A':
#Bowler was absent for this game, does not count toward league stats
pass
else:
games_played_counter += 1
series_scratch_score += int(score)
if int(score) > self.league_high_scratch_game: #Update highest scratch score
self.league_high_scratch_game = int(score)
game_handicap_score = int(score) + int(handicap)
series_handicap_score += game_handicap_score
if game_handicap_score > self.league_high_handicap_game: #Update highest handicap game score
self.league_high_handicap_game = game_handicap_score
self.games_bowled += games_played_counter
self.league_total_scratch += series_scratch_score
if series_scratch_score > self.league_high_scratch_series:
self.league_high_scratch_series = series_scratch_score
self.league_total_handicap += series_handicap_score
if series_handicap_score > self.league_high_handicap_series:
self.league_high_handicap_series = series_handicap_score
self.update_average()
self.save()
def update_average(self):
self.league_average = self.league_total_scratch / self.games_bowled
def calc_average(self):
return self.league_total_scratch / self.games_bowled
class Schedule(models.Model):
WEEKDAY = (
('MO', 'Monday'),
('TU', 'Tuesday'),
('WE', 'Wednesday'),
('TH', 'Thursday'),
('FR', 'Friday'),
('SA', 'Saturday'),
('SU', 'Sunday'),
)
league = models.OneToOneField(League, on_delete=models.CASCADE)
date_starting = models.DateField()
date_ending = models.DateField()
num_weeks = models.PositiveSmallIntegerField(default=0)
start_time = models.TimeField()
day_of_week = models.CharField(max_length=2, choices=WEEKDAY)
def calc_num_weeks(self):
weeks = rrule.rrule(rrule.WEEKLY, dtstart=self.date_starting, until=self.date_ending)
self.num_weeks = weeks.count()
class WeeklyResults(models.Model):
league = models.ForeignKey(League, on_delete=models.CASCADE, related_name='results')
week_number = models.PositiveSmallIntegerField(default=0)
lane_pair = models.PositiveSmallIntegerField(default=0)
team = models.ForeignKey(Team, on_delete=models.CASCADE)
opponent = models.ForeignKey(Team, on_delete=models.CASCADE, related_name='opponent')
average = models.PositiveSmallIntegerField(default=0)
handicap = models.PositiveSmallIntegerField(default=0)
g1 = models.PositiveSmallIntegerField(default=0)
g2 = models.PositiveSmallIntegerField(default=0)
g3 = models.PositiveSmallIntegerField(default=0)
series = models.PositiveSmallIntegerField(default=0)
points_won = models.PositiveSmallIntegerField(default=0)
points_lost = models.PositiveSmallIntegerField(default=0)
class WeeklyPairings(models.Model):
league = models.ForeignKey(League, on_delete=models.CASCADE, related_name='pairings')
team_one = models.ForeignKey(Team, on_delete=models.CASCADE, related_name='first_pair')
team_two = models.ForeignKey(Team, on_delete=models.CASCADE, related_name='second_pair')
week_number = models.PositiveSmallIntegerField(default=0)
lane_pair = models.PositiveSmallIntegerField(default=0)
def __str__(self):
return str(self.team_one.number) + " - " + str(self.team_two.number)
def get_lanes_by_pairnumber(self):
return str(self.lane_pair *2 - 1) + ' - ' + str(self.lane_pair*2)
| cjtrombley/KPTracker | kptracker_serv/kpbt/leagues/models.py | models.py | py | 15,579 | python | en | code | 0 | github-code | 13 |
5144705836 | #!/user/bin/python3
# -*- codeing:utf-8 -*-
# Time : 2019/10/16 10:31
# Author : LiuShiHua
# Desc :
# 导入机器学习linear_model库
from sklearn import linear_model
# 导入交叉验证库
from sklearn.model_selection import train_test_split
# 导入数值计算库
import numpy as np
# 导入科学计算库
import pandas as pd
# 导入图表库
import matplotlib.pyplot as plt
# 读取数据并创建数据表,名称为cost_and_click
cost_and_click = pd.DataFrame(pd.read_excel('cost_and_click.xlsx'))
# 查看数据表前5行的内容
cost_and_click.head()
# 将广告成本设为自变量X
X = np.array(cost_and_click[['cost']])
# 将点击量设为因变量Y
Y = np.array(cost_and_click['click'])
# 查看自变量和因变量的行数
print(X.shape)
print(Y.shape)
# 设置图表字体为华文细黑,字号15
plt.rc('font', family='STXihei', size=15)
# 绘制散点图,广告成本X,点击量Y,设置颜色,标记点样式和透明度等参数
plt.scatter(X, Y, 60, color='blue', marker='o', linewidth=3, alpha=0.8)
# 添加x轴标题
plt.xlabel('成本')
# 添加y轴标题
plt.ylabel('点击量')
# 添加图表标题
plt.title('广告成本与点击量分析')
# 设置背景网格线颜色,样式,尺寸和透明度
plt.grid(color='#95a5a6', linestyle='--', linewidth=1, axis='both', alpha=0.4)
# 显示图表
plt.show()
# 将原始数据通过随机方式分割为训练集和测试集,其中测试集占比为40%
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=0.4, random_state=0)
# 查看训练集数据的行数
print(X_train.shape)
print(y_train.shape)
# 将训练集代入到线性回归模型中
clf = linear_model.LinearRegression()
clf.fit(X_train, y_train)
# 线性回归模型的斜率
print(clf.coef_)
# 线性回归模型的截距
print(clf.intercept_)
# 判定系数R Square
clf.score(X_train, y_train)
#输入自变量预测因变量
print(clf.predict([[20000]]))
#将测试集的自变量代入到模型预测因变量
print(list(clf.predict(X_test)))
print(list(y_test))
#计算误差平方和
print(((y_test - clf.predict(X_test)) **2).sum()) | yingtian648/my_test | module/machine_learn/20191016.py | 20191016.py | py | 2,120 | python | zh | code | 0 | github-code | 13 |
34629021012 | #!/usr/bin/python3
#Importowanie potrzebnych bibliotek, pika - komunikacja miedzy Rasp a brokerem, subprocess - umozliwia pobranie danych wyjściowych innego skryptu
import pika
import subprocess
import time
###Tutaj jest przyklad sposobu na pobranie zmiennych podczas wywolania kodu
###potrzebny jest import sys
#sensor_args = { '11': Adafruit_DHT.DHT11,
# '22': Adafruit_DHT.DHT22,
# '2302': Adafruit_DHT.AM2302 }
#if len(sys.argv) == 3 and sys.argv[1] in sensor_args:
# sensor = sensor_args[sys.argv[1]]
# pin = sys.argv[2]
credentials = pika.PlainCredentials("client", "clientpass")
conn_params = pika.ConnectionParameters("136.244.101.115", credentials = credentials)
conn_broker = pika.BlockingConnection(conn_params)
channel = conn_broker.channel()
listaWynikow = []
#####pobieram dane nr1
##ustawiam wartosc humid na skrajnie wysoka zeby zrobic zabezpieczenie przed blednym pomiarem (wszystkie bledne pomiary maja humid powyzej 145):
humidity = 255.00
#program bedzie pobieral dane z czujnika dopoki sie nie upewni, ze ma realny wynik
while humidity>145:
proc0 = subprocess.Popen('./pobierzDane',stdout=subprocess.PIPE)
wiadomoscPrzed1 = str(proc0.stdout.read())
#print(wiadomoscPrzed1)
temp = float(wiadomoscPrzed1[2:6])
humidity = float(wiadomoscPrzed1[8:12])
date = wiadomoscPrzed1[15:25]
time = wiadomoscPrzed1[28:36]
#print("{Temp : " + str(temp) + ", Humid : " + str(humidity) + ", Date : " + date + ", Time : " + time+"}")
wiadomosc = str("{\nTemp : '" + str(temp) + "',\nHumid : '" + str(humidity) + "',\nDate : '" + date + "',\nTime : '" + time+"'\n}")
###Tutaj mozna odkomentowac, zeby dodac elementy temperatury i wilgotnosci do listy celem ich ewentualnej weryfikacji dodatkowej
#listaWynikow.append([temp,humidity])
###!! UWAGA !! zeby zbieranie danych i zapisywanie roznych wynikow do tablicy mialo miejsce konieczne jest skopiowanie powyzszego sposobu pobierania danych z urzadzenia oraz powtorzenie rozszerzania listy o nowe zmienne.
###sprawdzam w "brudny" sposob
#print(listaWynikow)
#tempTemp = []
#tempHum= []
#for i in range(0,3):
# tempTemp.append(int(float(listaWynikow[i][0])))
# tempHum.append(int(float(listaWynikow[i][1])))
#if(abs(tempTemp[0]-tempTemp[1])<=2.0 and abs(tempTemp[1]-tempTemp[2])<2.0):
# sredniaTemp = (tempTemp[0]+tempTemp[1]+tempTemp[2])/3
#elif (abs(tempTemp[0]-tempTemp[1])<=2.0):
# sredniaTemp=(tempTemp[0]+tempTemp[1])/2
#elif (abs(tempTemp[1]-tempTemp[2])<=2.0):
# sredniaTemp = (tempTemp[1]+tempTemp[2])/2
#elif (abs(tempTemp[0]-tempTemp[2])<=2.0):
# sredniaTemp = (tempTemp[0]+tempTemp[2])/2
#else:
# sredniaTemp = tempTemp[2]
#if(abs(tempHum[0]-tempHum[1])<=2 and abs(tempHum[1]-tempHum[2])<2):
# sredniaHum = (tempHum[0]+tempHum[1]+tempHum[2])/3
#elif (abs(tempHum[0]-tempHum[1])<=2):
# sredniaHum=(tempHum[0]+tempHum[1])/2
#elif (abs(tempHum[1]-tempHum[2])<=2):
# sredniaHum = (tempHum[1]+tempHum[2])/2
#elif (abs(tempHum[0]-tempHum[2])<=2):
# sredniaHum = (tempHum[0]+tempHum[2])/2
#else:
# sredniaHum=tempTemp[2]
###tworze wiadomosc koncowa
#wiadomoscKoncowa = '{Temp:"'+str(sredniaTemp) + '",Humidity:"' + str(sredniaHum) + '",Date:"' + wiadomoscPo[44:54] + '",Time:"' + wiadomoscPo[68:76] +'"}'
###wysylam
print("wysylam : " + wiadomosc)
msg = "".join(wiadomosc)
msg_props = pika.BasicProperties()
msg_props.content_type = "text/plain"
channel.basic_publish(exchange="data_exchange",properties=msg_props,routing_key="raspberry",body=msg)
conn_broker.close()
| xyanteos/Skrypty-RaspberryPI | SKRYPTmqtt.py | SKRYPTmqtt.py | py | 3,542 | python | pl | code | 1 | github-code | 13 |
15666186317 | """
Script to put most recent letterboxd reviews into JSON file.
"""
from datetime import datetime
from feedparser import parse
from json import dump
from sys import argv
RSS_URL = "https://letterboxd.com/g30rg3/rss/"
# Get output path or default
if len(argv) < 2:
output_filepath = "films.json"
else:
output_filepath = argv[1]
# Get data from RSS feed
feed = parse(RSS_URL)
posts = feed.entries
# Parse into json
reviews = []
for post in posts:
watchdate = post.get("letterboxd_watcheddate")
if watchdate is None:
continue
date_obj = datetime.strptime(watchdate, "%Y-%m-%d")
reviews.append({
"title": post.get("letterboxd_filmtitle"),
"link": post.get("link"),
"rating": post.get("letterboxd_memberrating"),
"watched": int(date_obj.timestamp())
})
# Sort by watch date
sorted_reviews = sorted(reviews, key=lambda i: i["watched"], reverse=True)
# Write to file
f = open(output_filepath, "w")
dump(sorted_reviews, f)
f.close()
| Ge0rg3/georgeom.net | scripts/update_films.py | update_films.py | py | 1,005 | python | en | code | 3 | github-code | 13 |
4787858508 | import re
text = input()
matched = re.findall(r"([=|\/])([A-Z][A-Za-z]{2,})\1", text)
result = []
numbers = []
if matched:
for match in matched:
result.append(match[1])
numbers.append(len(match[1]))
print(f"Destinations: {', '.join(result)}")
print(f"Travel Points: {sum(numbers)}") | Iskren-Dimitrov/SoftUni_Python_Fundamentals | final_exam_fundamentals_practise/destination_mapper.py | destination_mapper.py | py | 305 | python | en | code | 0 | github-code | 13 |
14957188207 | import pymysql
from crawler.tools import contentFix
class SQLHandler(object):
def __init__(self):
self.__conn = pymysql.connect(host='127.0.0.1', user='root',
password='0000', db='crawler', charset='utf8', autocommit=True)
self.cursor = self.__conn.cursor()
def __contains__(self, article):
title = article.title
content = article.content
query_result = self.queryArticleTitle(title)
if len(query_result) == 0:
return False
if query_result[0][2] == content:
return True
return False
def query(self, sql):
self.cursor.execute(sql)
result = self.cursor.fetchall()
return result
def execute(self, sql):
self.cursor.execute(sql)
def insertArticle(self, article):
'''插入新文章'''
if article not in self:
try:
sql = 'insert into tbl_article(title, content, public_comments_count, likes_count, slug, views_count, total_rewards_count, first_shared_at, like_rate) ' \
'values ("%s", "%s", "%d", "%d", "%s", "%d", "%d", null, "%f");' % (
self.__conn.escape_string(article.title), self.__conn.escape_string(article.content), article.public_comments_count, article.likes_count,
article.slug,
article.views_count, article.total_rewards_count, article.like_rate)
print("article.title: ", article.title)
print("article.content: ", article.content)
print("article.public_comments_count: ", article.public_comments_count)
print("article.likes_count: ", article.likes_count)
print("article.slug: ", article.slug)
print("article.views_count: ", article.views_count)
print("article.total_rewards_count: ", article.total_rewards_count)
print("article.first_shared_at: ", article.first_shared_at)
print("article.like_rate: ", article.like_rate)
self.execute(sql)
except Exception as e:
raise (e)
def queryArticleTitle(self, title):
'''根据文章标题查询'''
sql = "select * from tbl_article where title = '%s'" % title
return self.query(sql)
def queryArticle(self, keyword):
'''模糊查询文章'''
result = []
sql_title = "select * from tbl_article where title like '%{}%'".format(keyword)
title_result = self.query(sql_title)
for item in title_result:
result.append(item)
sql_content = "select * from tbl_article where content like '%{}%'".format(keyword)
content_result = self.query(sql_content)
for item in content_result:
result.append(item)
# author表待添加
# sql_author = "select * from tbl_article where author like '%{}%'".format(keyword)
# author_result = self.query(sql_author)
# for item in author_result:
# result.append(item)
if len(result) == 0:
return None
return list(set(result))
| money666-sxy/crawler_backend | crawler/db_handle/sql_handler.py | sql_handler.py | py | 3,208 | python | en | code | 0 | github-code | 13 |
39890564366 | from tkinter import *
from tkinter.ttk import Treeview
from tkinter import messagebox
import tkinter as tk
import sys
import PIL
from PIL import ImageTk
from PIL import Image
import tkinter.ttk as ttk
from tkinter.ttk import Combobox
from datetime import datetime
import mysql.connector
def quit(ekran):
ekran.destroy()
sys.exit(5000)
def kullanici_ekran(veritab, ekran, isim, mycursor):
kullaniciEkran = Toplevel(ekran)
kullaniciEkran.title("Kullanıcı Arayüzü")
kullaniciEkran.geometry("1280x600+20+0")
kullaniciEkran.resizable(FALSE, FALSE)
my_image = ImageTk.PhotoImage(Image.open("bgimage.jpg"))
arkaplan = Label(kullaniciEkran, image=my_image)
arkaplan.place(x=0, y=0)
kullaniciEkran.iconbitmap("YICO.ico")
siparisVer = Button(kullaniciEkran, compound=TOP, text="Sipariş Ver",
command=lambda: siparisVerFonk(veritab, kullaniciEkran, my_image, isim),bg="#2a1415",fg="White",font=("helvetica","15","bold"))
siparisVer.place(x=0, y=120, height=100, width=205)
siparislerim = Button(kullaniciEkran, text="Siparişlerim", compound=TOP,
command=lambda: siparislerimFonk(kullaniciEkran, my_image, isim, mycursor),bg="#2a1415",fg="White",font=("helvetica","15","bold"))
siparislerim.place(x=0, y=220, height=100, width=205)
profilim = Button(kullaniciEkran, text="Profilim", compound=TOP,
command=lambda: profilimFonk(kullaniciEkran, my_image, isim, mycursor,veritab),bg="#2a1415",fg="White",font=("helvetica","15","bold"))
profilim.place(x=0, y=320, height=100, width=205)
MenulerNeler1 = Label(kullaniciEkran,
text="Catering şirketi olarak \n İstanbul’da günlük tabldot yemek servisi hizmetiyle \n birlikte sizlerin yanınızdayız. \n Catering şirketimiz İstanbul 'da yemek üretimi ve servis hizmetlerine \n 1979 yılında Şişli Bomonti iş merkezinde \n küçük çapta bir mutfak ile, yüz kişiye yemek servisi yaparak, çalışmaya başladı.\n Kuruluş zamanımızda geniş çaplı firmalar kurumsal yemek ihtiyaçlarını\n kendi kurumsal bünyeleri içerisinde bir veya iki personel ile karşılıyorlardı. \n Küçük ve orta çaplı firmalar ise ihtiyaçları olan yemeği yeni kurulmakta olan \n tabldot firmalarından veya çevredeki lokantalardan temin ediyorlardı.",font=("helvetica",15),bg="#b89073",fg="black",width=75,height=17, border=0)
MenulerNeler1.place(x=205,y=120)
mycursor5=veritab.cursor()
mycursor5.execute("SELECT ad FROM musteriler WHERE mail = %s", (isim.get(),))
data4 = mycursor5.fetchall()
mycursor5.execute("SELECT soyad FROM musteriler WHERE mail = %s", (isim.get(),))
data5=mycursor5.fetchall()
textyazisii=("Hoşgeldin",data4 , data5 )
label2 = Label(master=kullaniciEkran,
text=textyazisii, bg="#502b26", fg="white",width=25,height=2)
label2.place(x=205,y=35)
logo= Image.open("YLOGO_1.png")
logoyukle = ImageTk.PhotoImage(logo)
goruntulogo = Label(kullaniciEkran, image=logoyukle ,bg="#2a1415" ,border=0)
goruntulogo.image = logoyukle
goruntulogo.place(x=45, y=5)
def profilimFonk(kullaniciEkran, my_image, isim, mycursor,veritab):
profilimEkran = Toplevel(kullaniciEkran)
profilimEkran.title("Profilim Arayüzü")
profilimEkran.geometry("1280x600+20+0")
profilimEkran.resizable(FALSE, FALSE)
arkaplan = Label(profilimEkran, image=my_image)
arkaplan.place(x=0, y=0)
def limitSizePas(*args):
value = sifresinir.get()
if len(value) > 8: sifresinir.set(value[:8])
sifresinir = StringVar()
sifresinir.trace('w', limitSizePas)
sifrelab = Label(profilimEkran, text="Şifreniz : (Max: 8)", width=15, font="30", bg="#2a1415" ,fg="white" )
sifrelab.place(x=410, y=210)
sifreentt = Entry(profilimEkran, width=25, show="*", textvariable=sifresinir , font="30", bg="#b89073" , border="0")
sifreentt.place(x=590, y=210)
adlab = Label(profilimEkran, text="Adınız :",width=15, font="30", bg="#2a1415" ,fg="white")
adlab.place(x=410, y=250)
adentt = Entry(profilimEkran, width=25 , font="30", bg="#b89073" , border="0")
adentt.place(x=590, y=250)
soyadlab = Label(profilimEkran, text="Soyadınız :",width=15, font="30", bg="#2a1415" ,fg="white")
soyadlab.place(x=410, y=290)
soyadentt = Entry(profilimEkran, width=25 , font="30", bg="#b89073" , border="0")
soyadentt.place(x=590, y=290)
mycursor2=veritab.cursor()
sifreentry=sifreentt.get()
adentry=adentt.get()
soyadentry=soyadentt.get()
gonderguncelle = Button(profilimEkran, text="GÜNCELLE!", command=lambda: gonderguncelleFonk(profilimEkran,mycursor2,veritab,sifreentt, adentt, soyadentt,isim,kullaniciEkran), font=3, width=18 ,bg="#502b26" ,fg="#b89073")
gonderguncelle.place(x=590, y=330)
def gonderguncelleFonk(profilimEkran,mycursor2,veritab,sifreentt, adentt, soyadentt,isim,kullaniciEkran):
mycursor2=veritab.cursor()
if len(adentt.get()) > 0:
sorgu=("UPDATE musteriler SET ad = %s WHERE mail = %s")
deger=(adentt.get(),isim.get())
mycursor2.execute(sorgu,deger)
veritab.commit()
sorgu=("UPDATE siparisler SET ad = %s WHERE musterimail = %s")
deger=(adentt.get(),isim.get())
mycursor2.execute(sorgu,deger)
veritab.commit()
if len(sifreentt.get()) > 0:
sorgu=("UPDATE musteriler SET sifre = %s WHERE mail = %s")
deger=(sifreentt.get(),isim.get())
mycursor2.execute(sorgu,deger)
veritab.commit()
if len(soyadentt.get()) > 0:
sorgu=("UPDATE musteriler SET soyad = %s WHERE mail = %s")
deger=(soyadentt.get(),isim.get())
mycursor2.execute(sorgu,deger)
veritab.commit()
sorgu=("UPDATE siparisler SET soyad = %s WHERE musterimail = %s")
deger=(soyadentt.get(),isim.get())
mycursor2.execute(sorgu,deger)
veritab.commit()
messagebox.showinfo("Bilgilendirme", "Güncelleme Başarılı! Yeniden Giriş Yapınız!",parent=profilimEkran)
kullaniciEkran.destroy()
def siparisvermefonk(siparisverme,veritab,acilan_kutu, acilanicecek, textbox, isim, my_image,deger,yemekadet,icecekadet):
acilankutu = acilan_kutu.get()
adreskutu = textbox.get("1.0",END)
acilanicecek=acilanicecek.get()
kullanicimail =isim.get()
katalog=deger.get()
yemekadeti=yemekadet.get()
icecekadeti=icecekadet.get()
if yemekadeti == "Yemek Adeti Seçiniz":
messagebox.showinfo("Uyarı", "Yemek adeti seçiniz!",parent=siparisverme)
yfiyatint=int(yemekadeti)
ifiyatint=int(icecekadeti)
yfiyat = int(100*yfiyatint)
ifiyat= int(5*ifiyatint)
indirimvarmi=False
cevape="Evet"
cevaph="Hayır"
sonuc=""
if yfiyat+ifiyat <1000:
toplam = int(yfiyat+ifiyat)
oncekifiyat="İndirim Yok!"
aradakiFark="İndirim Yok!"
indirimvarmi=False
if indirimvarmi==False:
sonuc = cevaph
else:
oncekifiyat=int(yfiyat+ifiyat)
toplam=int((yfiyat+ifiyat)*80/100)
aradakiFark = int(toplam-oncekifiyat)
indirimvarmi=True
if indirimvarmi==True:
sonuc = cevape
onay = Toplevel()
onay.geometry("1280x600+20+0")
onay.title("Sipariş Onay Ekranı")
onay.resizable(FALSE,FALSE)
onay.iconbitmap("YICO.ico")
frame = Label(master=onay, image=my_image)
frame.place(x=0,y=0)
for i in range(1):
textyazisi=("Seçilen Menü = {} \n Yemek Miktarı = {} \n Seçilen İçecek = {} \n İçecek Miktarı = {} \n Katalog Var Mı = {}\n Teslimat Adresi= {} \n Toplam Tutar = {} \n %20 İndirim Var Mı={} \n İndirimden Önceki Fiyat = {} \n İndirim Tutarı = {} ".format(acilankutu,yemekadeti,acilanicecek,icecekadeti,katalog,adreskutu,toplam,sonuc,oncekifiyat,aradakiFark))
label = Label(master=onay, text=textyazisi, bg="#2a1415" ,fg="white", border="0",width=100,height=20)
label.place(x=320,y=125)
onayButon = Button(
master= onay,
text = "SİPARİŞİ ONAYLA",
command = lambda: siparisOnay(onay,veritab,isim,acilankutu,acilanicecek, adreskutu, kullanicimail,katalog,yemekadeti,icecekadeti,toplam),
bg="white"
)
onayButon.place(x=640,y=450)
onay.wait_window()
def siparisOnay(onay,veritab,isim,acilankutu,acilanicecek,adreskutu,kullanicimail,katalog,yemekadeti,icecekadeti,toplam):
mycursor4=veritab.cursor()
mycursor4.execute("SELECT ad FROM musteriler WHERE mail = %s", (isim.get(),))
adi= mycursor4.fetchmany(1)
adii=''.join(str(adi)[1:-1])
adiii=adii.replace("('","")
adiiii=adiii[:-3]
mycursor5=veritab.cursor()
mycursor5.execute("SELECT soyad FROM musteriler WHERE mail = %s", (isim.get(),))
soyadi= mycursor5.fetchmany(1)
soyadii=''.join(str(soyadi)[1:-1])
soyadiii=soyadii.replace("('","")
soyadiiii=soyadiii[:-3]
mycursor3 = veritab.cursor()
sorgu = "INSERT INTO siparisler (ad,soyad,menu,menu_adet,icecek,icecek_adet,toplam,katalog,adres,musterimail) VALUES (%s,%s,%s,%s,%s, %s,%s,%s,%s,%s)"
deger = (adiiii,soyadiiii,acilankutu,yemekadeti,acilanicecek,icecekadeti,toplam,katalog,adreskutu,kullanicimail)
mycursor3.execute(sorgu, deger)
veritab.commit()
onay_mesaji=messagebox.showinfo("Uyarı","Siparişiniz Başarıyla Oluşturuldu!",parent=onay)
def siparisVerFonk(veritab, kullaniciEkran, my_image, isim):
siparisverme = Toplevel(kullaniciEkran)
siparisverme.geometry("1280x600+20+0")
siparisverme.title("Sipariş Verme Ekranı")
siparisverme.resizable(FALSE, FALSE)
frame = Label(master=siparisverme, image=my_image)
frame.pack(expand=True, fill="both")
siparisverme.iconbitmap("YICO.ico")
menuler = ["Geleneksel Menü", "Modern Sofra Menü", "Kış Menü"]
icecekler = ["İÇECEK YOK","KOLA","FANTA","ICE TEA", "LIMONATA","AYRAN"]
yemekadeti=[1,2,3,4,5,6,7,8,9,10]
icecekadeti=[0,1,2,3,4,5,6,7,8,9,10]
acilan_kutu = Combobox(siparisverme, values=menuler, width=37, height=14)
acilan_kutu.set("MENÜ SEÇİMİ YAPINIZ")
acilan_kutu.place(x=50, y=100)
yemekadet = Combobox(siparisverme, values=yemekadeti, width=37, height=50)
yemekadet.set("Yemek Adeti Seçiniz")
yemekadet.place(x=50, y=140)
acilanicecek = Combobox(siparisverme, values=icecekler, width=37, height=50)
acilanicecek.set("İÇECEK SEÇİMİ YAPINIZ")
acilanicecek.current(0)
acilanicecek.place(x=50, y=180)
icecekadet = Combobox(siparisverme, values=icecekadeti, width=37, height=50)
icecekadet.set("İçecek Adeti Seçiniz")
icecekadet.current(0)
icecekadet.place(x=50, y=220)
deger=StringVar()
sirketkatalogu=Checkbutton(siparisverme,text="Katalog Gönder",variable=deger,onvalue="Evet",offvalue="Hayır" , bg="White")
sirketkatalogu.deselect()
sirketkatalogu.place(x=110,y=470)
adres = Label(siparisverme, text="Adresinizi Giriniz :", font=("bold"),bg="#2a1415" ,fg="white" ,border = 0)
adres.place(x=50, y=265)
textbox = Text(siparisverme, bg="#2a1415" ,fg="white", width=30, height=10)
textbox.place(x=50, y=288)
siparisverbuton = Button(
master=siparisverme,
text="SİPARİŞ VER", bg="white",width=10, height=2,
command=lambda: siparisvermefonk(siparisverme,veritab,acilan_kutu,acilanicecek, textbox, isim, my_image,deger,yemekadet,icecekadet)
)
siparisverbuton.place(x=125, y=500)
resim1 = Image.open("MENU.png")
yukle = ImageTk.PhotoImage(resim1)
goruntu1 = Label(siparisverme,text="Menü-1", image=yukle, border=0)
goruntu1.image = yukle
goruntu1.place(x=305, y=100)
resim2 = Image.open("MENU22.png")
yukle2 = ImageTk.PhotoImage(resim2)
goruntu2 = Label(siparisverme,text="Menü-2", image=yukle2, border=0)
goruntu2.image = yukle2
goruntu2.place(x=535 , y=100)
resim3 = Image.open("MENU33.png")
yukle3 = ImageTk.PhotoImage(resim3)
goruntu3 = Label(siparisverme,text="Menü-3", image=yukle3, border=0)
goruntu3.image = yukle3
goruntu3.place(x=765 , y=100)
resim4 = Image.open("icecek2.png")
yukle4 = ImageTk.PhotoImage(resim4)
goruntu4 = Label(siparisverme,text="Menü-4", image=yukle4, border=0)
goruntu4.image = yukle4
goruntu4.place(x=995 , y=100)
def siparislerimFonk(kullaniciEkran, my_image, isim, mycursor):
siparislerim = Toplevel(kullaniciEkran)
siparislerim.title("Siparişlerim")
siparislerim.geometry("1280x600+20+0")
siparislerim.resizable(FALSE, FALSE)
frame = Label(master=siparislerim, image=my_image)
frame.place(x=0, y=0)
siparislerim.iconbitmap("YICO.ico")
mycursor.execute("SELECT * FROM siparisler WHERE musterimail = %s", (isim.get(),))
data = mycursor.fetchall()
columns = ["SiparişId","Ad","Soyad","Seçilen Menü","Sipariş Adeti","İçecek","İçecek Adeti","Toplam","Siparişte Katalog Var Mı", "Adres","Sipariş Tarihi", "Email"]
treeview_olustur(master=siparislerim, columns=columns, data=data,width=550)
def girisYap(veritab, mycursor, isim, sifre, ekran):
def kullanici_giris(tup):
mycursor.execute("SELECT * FROM musteriler WHERE mail=%s AND sifre=%s", tup)
return (mycursor.fetchall())
data = (
isim.get(),
sifre.get()
)
if isim.get() == "":
messagebox.showinfo("Uyarı", "Önce maili giriniz!")
elif sifre.get() == "":
messagebox.showinfo("Uyarı", "Önce şifreyi giriniz!")
else:
res = kullanici_giris(data)
if res:
messagebox.showinfo("Uyarı", "Giriş Başarılı!")
kullanici_ekran(veritab, ekran, isim, mycursor)
else:
messagebox.showinfo("Uyarı", "Eposta/Şifre Hatalı!")
def gonderFonk(uyeol,veritab, mailent, sifreent, adent, soyadent):
if mailent.get() == "":
messagebox.showinfo("Uyarı", "Önce maili giriniz!",parent=uyeol)
elif sifreent.get() == "":
messagebox.showinfo("Uyarı", "Önce şifreyi giriniz!",parent=uyeol)
elif adent.get() == "":
messagebox.showinfo("Uyarı", "Önce adınızı giriniz!",parent=uyeol)
elif soyadent.get() == "":
messagebox.showinfo("Uyarı", "Önce soyadınızı giriniz!",parent=uyeol)
else:
mycursor = veritab.cursor()
sorgu = "INSERT INTO musteriler (mail,sifre,ad,soyad) VALUES (%s, %s, %s, %s)"
deger = (mailent.get(), sifreent.get(), adent.get(), soyadent.get())
mycursor.execute(sorgu, deger)
veritab.commit()
uyari_mesaji = messagebox.showinfo("Bilgilendirme", "Üyeliğiniz Başarılı!",parent=uyeol)
def uyeol(veritab, ekran, my_image):
uyeol = Toplevel(ekran)
uyeol.geometry("1280x600")
uyeol.title("Uye Ol")
uyeol.resizable(FALSE, FALSE)
frame = Label(master=uyeol, image=my_image)
frame.pack(expand=True, fill="both")
uyeol.iconbitmap("YICO.ico")
mailab = Label(uyeol, text="Mail Adresiniz : ",width=15, font="30", bg="#2a1415" ,fg="white" )
mailab.place(x=410,y=170)
mailent = Entry(uyeol, width=25,font="30", bg="#b89073" , border="0")
mailent.place(x=590, y=170)
def limitSizePas(*args):
value = sifresinir.get()
if len(value) > 8: sifresinir.set(value[:8])
sifresinir = StringVar()
sifresinir.trace('w', limitSizePas)
sifrelab = Label(uyeol, text="Şifreniz : (Max: 8)", width=15, font="30", bg="#2a1415" ,fg="white" )
sifrelab.place(x=410, y=210)
sifreent = Entry(uyeol, width=25, show="*", textvariable=sifresinir , font="30", bg="#b89073" , border="0")
sifreent.place(x=590, y=210)
adlab = Label(uyeol, text="Adınız :",width=15, font="30", bg="#2a1415" ,fg="white")
adlab.place(x=410, y=250)
adent = Entry(uyeol, width=25 , font="30", bg="#b89073" , border="0")
adent.place(x=590, y=250)
soyadlab = Label(uyeol, text="Soyadınız :",width=15, font="30", bg="#2a1415" ,fg="white")
soyadlab.place(x=410, y=290)
soyadent = Entry(uyeol, width=25 , font="30", bg="#b89073" , border="0")
soyadent.place(x=590, y=290)
gonder = Button(uyeol, text="UYE OL!", command=lambda: gonderFonk(uyeol,veritab, mailent, sifreent, adent, soyadent), font=3, width=18 ,bg="#502b26" ,fg="#b89073")
gonder.place(x=590, y=330)
uyeol.wait_window()
def menubar(master):
def hakkindaFonk():
messagebox.showinfo("Hakkında", " Bu uygulama \n Karadeniz Teknik Üniversitesi \n İktisadi ve İdari Bilimler Fakültesi \n Yönetim Bilişim Sistemleri Bölümü öğrencilerinden \n Oğuzcan Uzunöner ve Canan Arabacı \n tarafından geliştirilmiştir." , parent=master)
menubar = Menu(master)
hakkinda= Menu (menubar,tearoff = 0)
hakkinda.add_command(label="Hakkında", command=hakkindaFonk)
menubar.add_cascade(label="Uygulama", menu = hakkinda)
master.config(menu=menubar)
def veriye_odaklan(treeview,master):
item = treeview.item(treeview.selection())
ciktii =list(item["values"])
if len(ciktii) > 2:
text=ciktii[9]
messagebox.showinfo("Tam Adres", text,parent=master)
def treeview_olustur(master, columns, data, width):
y_scrollbar = Scrollbar(master=master, orient="vertical")
y_scrollbar.pack(side="right", fill="y")
x_scrollbar = Scrollbar(master=master, orient="horizontal")
x_scrollbar.pack(side="bottom", fill="x")
treeview = Treeview(master=master, columns=columns, show="headings")
treeview.pack(expand=True, fill="both")
x_scrollbar["command"] = treeview.xview
y_scrollbar["command"] = treeview.yview
treeview["yscrollcommand"] = y_scrollbar.set
treeview["xscrollcommand"] = x_scrollbar.set
for i, j in enumerate(columns):
treeview.column(
column=f"#{i + 1}",
width=width,
anchor=CENTER
)
treeview.heading(
column=f"#{i + 1}",
text=j
)
for index, row in enumerate(data):
treeview.insert(
parent="",
index=index,
values=row
)
treeview.bind(
sequence="<Double-Button-1>",
func=lambda event: veriye_odaklan(treeview,master)
)
def personelekran(veritab,isim, mycursor,personelekrangiris,my_image):
personelEkran = Toplevel(personelekrangiris)
personelEkran.title("Personel Arayüzü")
personelEkran.geometry("1280x600+20+0")
personelEkran.resizable(FALSE, FALSE)
arkaplan = Label(personelEkran, image=my_image)
arkaplan.place(x=0, y=0)
personelEkran.iconbitmap("YICO.ico")
mycursor.execute("SELECT ad FROM personel WHERE mail = %s", (isim.get(),))
data4 = mycursor.fetchall()
mycursor.execute("SELECT soyad FROM personel WHERE mail = %s", (isim.get(),))
data5=mycursor.fetchall()
textyazisii=("Hoşgeldin",data4 , data5)
label2 = Label(master=personelEkran, text=textyazisii, font=("Helvetica","15","bold"),bg="#b89073", fg="white",width=30,height=5)
label2.place(x=250,y=50)
mycursor.execute("SELECT SUM(toplam) FROM siparisler")
kazanc=mycursor.fetchone()
toplamkazanctext=("Kazancınız", kazanc)
ToplamKazanc = Label(master=personelEkran,text=toplamkazanctext,font=("Helvetica","15","bold"),bg="#b89073", fg="white",width=30,height=5).place(x=650,y=50)
geleneksel = Button(personelEkran)
geleneksel.config(text="Geleneksel Menü Rapor", command=lambda: gelenekselFonk(veritab,personelEkran,my_image), font="30"
, width=23 ,bg="#502b26" ,fg="#b89073" )
geleneksel.place(x=250, y=200)
kis = Button(personelEkran)
kis.config(text="Kış Menü Rapor", command=lambda: kisFonk(veritab,personelEkran,my_image), font="30"
, width=23 ,bg="#502b26" ,fg="#b89073" )
kis.place(x=522, y=200)
modern = Button(personelEkran)
modern.config(text="Modern Sofra Menü Rapor", command=lambda: modernFonk(veritab,personelEkran,my_image), font="30"
, width=23 ,bg="#502b26" ,fg="#b89073" )
modern.place(x=794, y=200)
kola = Button(personelEkran)
kola.config(text="Kola Rapor", command=lambda: kolaFonk(veritab,personelEkran,my_image), font="30"
, width=23 ,bg="#502b26" ,fg="#b89073" )
kola.place(x=25, y=250)
fanta = Button(personelEkran)
fanta.config(text="Fanta Rapor", command=lambda: fantaFonk(veritab,personelEkran,my_image), font="30"
, width=23 ,bg="#502b26" ,fg="#b89073" )
fanta.place(x=275, y=250)
icetea = Button(personelEkran)
icetea.config(text="Ice Tea Rapor", command=lambda: iceteaFonk(veritab,personelEkran,my_image), font="30"
, width=23 ,bg="#502b26" ,fg="#b89073" )
icetea.place(x=525, y=250)
limonata = Button(personelEkran)
limonata.config(text="Limonata Rapor", command=lambda: limonataFonk(veritab,personelEkran,my_image), font="30"
, width=23 ,bg="#502b26" ,fg="#b89073" )
limonata.place(x=775, y=250)
ayran = Button(personelEkran)
ayran.config(text="Ayran Rapor", command=lambda: ayranFonk(veritab,personelEkran,my_image), font="30"
, width=23 ,bg="#502b26" ,fg="#b89073" )
ayran.place(x=1025, y=250)
mycursor.execute("SELECT * FROM siparisler")
data = mycursor.fetchall()
columns = ["SiparişId","Ad","Soyad","Seçilen Menü","Sipariş Adeti","İçecek","İçecek Adeti","Toplam","Siparişte Katalog Var Mı", "Adres","Sipariş Tarihi", "Email"]
frame =Frame(personelEkran, width=500,height=150)
frame.pack(side="bottom")
treeview_olustur(master=frame, columns=columns, data=data,width=330)
def gelenekselFonk(veritab,personelEkran,my_image):
gelenekselRapor = Toplevel(personelEkran)
gelenekselRapor.title("Personel Arayüzü")
gelenekselRapor.geometry("1280x600+20+0")
gelenekselRapor.resizable(FALSE, FALSE)
arkaplan = Label(gelenekselRapor, image=my_image)
arkaplan.place(x=0, y=0)
personelEkran.iconbitmap("YICO.ico")
mycursor=veritab.cursor()
mycursor.execute("SELECT menu, SUM(menu_adet) as 'menü adeti' FROM siparisler WHERE menu = 'Geleneksel Menü'")
data = mycursor.fetchall()
columns = ["Menü", "Satılan Adet"]
treeview_olustur(master=gelenekselRapor, columns=columns, data=data,width=550)
def kisFonk(veritab,personelEkran,my_image):
kisRapor = Toplevel(personelEkran)
kisRapor.title("Personel Arayüzü")
kisRapor.geometry("1280x600+20+0")
kisRapor.resizable(FALSE, FALSE)
arkaplan = Label(kisRapor, image=my_image)
arkaplan.place(x=0, y=0)
personelEkran.iconbitmap("YICO.ico")
mycursor=veritab.cursor()
mycursor.execute("SELECT menu, SUM(menu_adet) as 'menü adeti' FROM siparisler WHERE menu = 'Kış Menü'")
data = mycursor.fetchall()
columns = ["Menü", "Satılan Adet"]
treeview_olustur(master=kisRapor, columns=columns, data=data,width=550)
def modernFonk(veritab,personelEkran,my_image):
modernRapor = Toplevel(personelEkran)
modernRapor.title("Personel Arayüzü")
modernRapor.geometry("1280x600+20+0")
modernRapor.resizable(FALSE, FALSE)
arkaplan = Label(modernRapor, image=my_image)
arkaplan.place(x=0, y=0)
personelEkran.iconbitmap("YICO.ico")
mycursor=veritab.cursor()
mycursor.execute("SELECT menu, SUM(menu_adet) as 'menü adeti' FROM siparisler WHERE menu = 'Modern Sofra Menü'")
data = mycursor.fetchall()
columns = ["Menü", "Satılan Adet"]
treeview_olustur(master=modernRapor, columns=columns, data=data,width=550)
def kolaFonk(veritab,personelEkran,my_image):
kolaRapor = Toplevel(personelEkran)
kolaRapor.title("Personel Arayüzü")
kolaRapor.geometry("1280x600+20+0")
kolaRapor.resizable(FALSE, FALSE)
arkaplan = Label(kolaRapor, image=my_image)
arkaplan.place(x=0, y=0)
personelEkran.iconbitmap("YICO.ico")
mycursor=veritab.cursor()
mycursor.execute("SELECT icecek, SUM(icecek_adet) as 'İçecek adeti' FROM siparisler WHERE icecek = 'KOLA'")
data = mycursor.fetchall()
columns = ["İçecek", "Satılan Adet"]
treeview_olustur(master=kolaRapor, columns=columns, data=data,width=550)
def fantaFonk(veritab,personelEkran,my_image):
fantaRapor = Toplevel(personelEkran)
fantaRapor.title("Personel Arayüzü")
fantaRapor.geometry("1280x600+20+0")
fantaRapor.resizable(FALSE, FALSE)
arkaplan = Label(fantaRapor, image=my_image)
arkaplan.place(x=0, y=0)
personelEkran.iconbitmap("YICO.ico")
mycursor=veritab.cursor()
mycursor.execute("SELECT icecek, SUM(icecek_adet) as 'İçecek adeti' FROM siparisler WHERE icecek = 'FANTA'")
data = mycursor.fetchall()
columns = ["İçecek", "Satılan Adet"]
treeview_olustur(master=fantaRapor, columns=columns, data=data,width=550)
def iceteaFonk(veritab,personelEkran,my_image):
iceteaRapor = Toplevel(personelEkran)
iceteaRapor.title("Personel Arayüzü")
iceteaRapor.geometry("1280x600+20+0")
iceteaRapor.resizable(FALSE, FALSE)
arkaplan = Label(iceteaRapor, image=my_image)
arkaplan.place(x=0, y=0)
personelEkran.iconbitmap("YICO.ico")
mycursor=veritab.cursor()
mycursor.execute("SELECT icecek, SUM(icecek_adet) as 'İçecek adeti' FROM siparisler WHERE icecek = 'ICE TEA'")
data = mycursor.fetchall()
columns = ["İçecek", "Satılan Adet"]
treeview_olustur(master=iceteaRapor, columns=columns, data=data,width=550)
def limonataFonk(veritab,personelEkran,my_image):
limonataRapor = Toplevel(personelEkran)
limonataRapor.title("Personel Arayüzü")
limonataRapor.geometry("1280x600+20+0")
limonataRapor.resizable(FALSE, FALSE)
arkaplan = Label(limonataRapor, image=my_image)
arkaplan.place(x=0, y=0)
personelEkran.iconbitmap("YICO.ico")
mycursor=veritab.cursor()
mycursor.execute("SELECT icecek, SUM(icecek_adet) as 'İçecek adeti' FROM siparisler WHERE icecek = 'LIMONATA'")
data = mycursor.fetchall()
columns = ["İçecek", "Satılan Adet"]
treeview_olustur(master=limonataRapor, columns=columns, data=data,width=550)
def ayranFonk(veritab,personelEkran,my_image):
ayranRapor = Toplevel(personelEkran)
ayranRapor.title("Personel Arayüzü")
ayranRapor.geometry("1280x600+20+0")
ayranRapor.resizable(FALSE, FALSE)
arkaplan = Label(ayranRapor, image=my_image)
arkaplan.place(x=0, y=0)
personelEkran.iconbitmap("YICO.ico")
mycursor=veritab.cursor()
mycursor.execute("SELECT icecek, SUM(icecek_adet) as 'İçecek adeti' FROM siparisler WHERE icecek = 'AYRAN'")
data = mycursor.fetchall()
columns = ["İçecek", "Satılan Adet"]
treeview_olustur(master=ayranRapor, columns=columns, data=data,width=550)
def personelgirisYap(veritab, mycursor, isim, sifre,personelekrangiris,my_image):
def kullanici_giris(tup):
mycursor.execute("SELECT * FROM personel WHERE mail=%s AND sifre=%s", tup)
return (mycursor.fetchall())
data = (
isim.get(),
sifre.get()
)
if isim.get() == "":
messagebox.showinfo("Uyarı", "Önce maili giriniz!")
elif sifre.get() == "":
messagebox.showinfo("Uyarı", "Önce şifreyi giriniz!")
else:
res = kullanici_giris(data)
if res:
messagebox.showinfo("Uyarı", "Giriş Başarılı!")
personelekran(veritab,isim, mycursor,personelekrangiris,my_image)
else:
messagebox.showinfo("Uyarı", "Eposta/Şifre Hatalı!")
def personelgirisi(veritab,ekran):
ekran.destroy()
personelekrangiris=Tk()
personelekrangiris.title("Personel Ekranı")
personelekrangiris.geometry("1280x600+20+0")
personelekrangiris.resizable(FALSE, FALSE)
my_image = ImageTk.PhotoImage(Image.open("bgimage.jpg"))
arkaplan = Label(master=personelekrangiris, image=my_image)
arkaplan.place(x=0, y=0)
personelekrangiris.iconbitmap("YICO.ico")
mycursor = veritab.cursor()
isimSor = Label(personelekrangiris, text="Mail :",font="30", width=15, bg="#2a1415" ,fg="#b89073" )
isimSor.place(x=430,y=210)
isim = Entry(personelekrangiris ,font="30", bg="#b89073")
isim.place(x=628,y=210)
sifreSor = Label(personelekrangiris, text="Şifreniz(Max:8) : ", font="30", width=15 ,bg="#2a1415" ,fg="#b89073" )
sifreSor.place(x=430, y=250)
def limitSizePas(*args):
value = sifresinir.get()
if len(value) > 8: sifresinir.set(value[:8])
sifresinir = StringVar()
sifresinir.trace('w', limitSizePas)
sifre = Entry(personelekrangiris, show="*",textvariable=sifresinir ,font="30" , width=20 , bg="#b89073")
sifre.place(x=628, y=250)
buton = Button(personelekrangiris)
buton.config(text="Giriş yap!", command=lambda: personelgirisYap(veritab, mycursor, isim, sifre,personelekrangiris,my_image), font="30"
, width=23 ,bg="#502b26" ,fg="#b89073" )
buton.place(x=510, y=290)
personelekrangiris.mainloop()
def main():
veritab = mysql.connector.connect(
host="localhost",
user="root",
passwd="",
database="catering"
)
mycursor = veritab.cursor()
ekran = Tk()
ekran.title("Catering Sistemi")
ekran.geometry("1280x600+20+0")
ekran.resizable(FALSE, FALSE)
menubar(ekran)
my_image = ImageTk.PhotoImage(Image.open("bgimage.jpg"))
arkaplan = Label(ekran, image=my_image)
arkaplan.place(x=0, y=00)
ekran.iconbitmap("YICO.ico")
karsilama = Label(ekran)
karsilama.config(text="Catering Sistemimize Hoş Geldiniz", font="25",width=38 , bg="#2a1415" ,fg="#b89073" )
karsilama.place(x=430, y=170)
isimSor = Label(ekran, text="Mail :",font="30", width=15, bg="#2a1415" ,fg="#b89073" )
isimSor.place(x=430,y=210)
isim = Entry(ekran ,font="30", bg="#b89073" , border="0")
isim.place(x=628,y=210)
sifreSor = Label(ekran, text="Şifreniz(Max:8) : ", font="30", width=15 ,bg="#2a1415" ,fg="#b89073" )
sifreSor.place(x=430, y=250)
def limitSizePas(*args):
value = sifresinir.get()
if len(value) > 8: sifresinir.set(value[:8])
sifresinir = StringVar()
sifresinir.trace('w', limitSizePas)
sifre = Entry(ekran, show="*",textvariable=sifresinir ,font="30" , width=20 , bg="#b89073", border="0")
sifre.place(x=628, y=250)
buton = Button(ekran)
buton.config(text="Giriş Yap", command=lambda: girisYap(veritab, mycursor, isim, sifre, ekran), font="30"
, width=23 ,bg="#502b26" ,fg="#b89073" )
buton.place(x=510, y=290)
uye = Button(ekran)
uye.config(text="Üye Ol", command=lambda: uyeol(veritab, ekran, my_image),font="30",
width=23, bg="#502b26" ,fg="#b89073" )
uye.place(x=510, y=335 )
personel = Button(ekran)
personel.config(text="Personel Girişi", command=lambda : personelgirisi(veritab,ekran),font="30",width=23, bg="#502b26" ,fg="#b89073" )
personel.place(x=1000,y=30)
ekran.mainloop()
main()
| oguzcanuzunoner/python-tkinter | catering_siparis_bilgi_yonetim_sistemi/catering.py | catering.py | py | 31,212 | python | tr | code | 1 | github-code | 13 |
20346904673 | from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from sdc11073 import loghelper
from .alarmprovider import GenericAlarmProvider
from .audiopauseprovider import AudioPauseProvider
from .clockprovider import GenericSDCClockProvider
from .componentprovider import GenericSetComponentStateOperationProvider
from .contextprovider import EnsembleContextProvider, LocationContextProvider
from .metricprovider import GenericMetricProvider
from .operationprovider import OperationProvider
from .patientcontextprovider import GenericPatientContextProvider
if TYPE_CHECKING:
from sdc11073.mdib import ProviderMdib
from sdc11073.mdib.descriptorcontainers import AbstractOperationDescriptorProtocol
from sdc11073.mdib.transactions import TransactionManagerProtocol
from sdc11073.provider.operations import OperationDefinitionBase
from sdc11073.provider.sco import AbstractScoOperationsRegistry
from .providerbase import OperationClassGetter
class ProviderRoleProtocol(Protocol):
"""A ProviderRole implements operation handlers and can run other jobs that the role requires.
This Interface is expected by BaseProduct.
"""
def stop(self):
"""Stop worker threads etc."""
def init_operations(self, sco: AbstractScoOperationsRegistry):
"""Init instance.
Method is called on start.
"""
def make_operation_instance(self,
operation_descriptor_container: AbstractOperationDescriptorProtocol,
operation_cls_getter: OperationClassGetter) -> OperationDefinitionBase | None:
"""Return a callable for this operation or None.
If a mdib already has operations defined, this method can connect a handler to a given operation descriptor.
Use case: initialization from an existing mdib
"""
def make_missing_operations(self, sco: AbstractScoOperationsRegistry) -> list[OperationDefinitionBase]:
"""Make_missing_operations is called after all existing operations from mdib have been registered.
If a role provider needs to add operations beyond that, it can do it here.
"""
def on_pre_commit(self, mdib: ProviderMdib, transaction: TransactionManagerProtocol):
"""Manipulate operation (e.g. add more states)."""
def on_post_commit(self, mdib: ProviderMdib, transaction: TransactionManagerProtocol):
"""Implement actions after the transaction."""
...
class BaseProduct:
"""A Product is associated to a single sco.
It provides the operation handlers for the operations in this sco.
If a mdib contains multiple sco instances, there must be multiple Products.
"""
def __init__(self,
mdib: ProviderMdib,
sco: AbstractScoOperationsRegistry,
log_prefix: str | None = None):
"""Create a product."""
self._sco = sco
self._mdib = mdib
self._model = mdib.data_model
self._ordered_providers: list[ProviderRoleProtocol] = [] # order matters, first come, first serve
# start with most specific providers, end with most general ones
self._logger = loghelper.get_logger_adapter(f'sdc.device.{self.__class__.__name__}', log_prefix)
def _all_providers_sorted(self) -> list[ProviderRoleProtocol]:
return self._ordered_providers
def init_operations(self):
"""Register all actively provided operations."""
sco_handle = self._sco.sco_descriptor_container.Handle
self._logger.info('init_operations for sco %s.', sco_handle)
for role_handler in self._all_providers_sorted():
role_handler.init_operations(self._sco)
self._register_existing_mdib_operations(self._sco)
for role_handler in self._all_providers_sorted():
operations = role_handler.make_missing_operations(self._sco)
if operations:
info = ', '.join([f'{op.OP_DESCR_QNAME.localname} {op.handle}' for op in operations])
self._logger.info('role handler %s added operations to mdib: %s',
role_handler.__class__.__name__, info)
for operation in operations:
self._sco.register_operation(operation)
all_sco_operations = self._mdib.descriptions.parent_handle.get(self._sco.sco_descriptor_container.Handle, [])
all_op_handles = [op.Handle for op in all_sco_operations]
all_not_registered_op_handles = [op_h for op_h in all_op_handles if
self._sco.get_operation_by_handle(op_h) is None]
if not all_op_handles:
self._logger.info('sco %s has no operations in mdib.', sco_handle)
elif all_not_registered_op_handles:
self._logger.info('sco %s has operations without handler! handles = %r',
sco_handle, all_not_registered_op_handles)
else:
self._logger.info('sco %s: all operations have a handler.', sco_handle)
self._mdib.xtra.mk_state_containers_for_all_descriptors()
self._mdib.pre_commit_handler = self._on_pre_commit
self._mdib.post_commit_handler = self._on_post_commit
def stop(self):
"""Stop all role providers."""
for role_handler in self._all_providers_sorted():
role_handler.stop()
def make_operation_instance(self,
operation_descriptor_container: AbstractOperationDescriptorProtocol,
operation_cls_getter: OperationClassGetter) -> OperationDefinitionBase | None:
"""Call make_operation_instance of all role providers, until the first returns not None."""
operation_target_handle = operation_descriptor_container.OperationTarget
operation_target_descr = self._mdib.descriptions.handle.get_one(operation_target_handle,
allow_none=True) # descriptor container
if operation_target_descr is None:
# this operation is incomplete, the operation target does not exist. Registration not possible.
self._logger.warning('Operation %s: target %s does not exist, will not register operation',
operation_descriptor_container.Handle, operation_target_handle)
return None
for role_handler in self._all_providers_sorted():
operation = role_handler.make_operation_instance(operation_descriptor_container, operation_cls_getter)
if operation is not None:
self._logger.debug('%s provided operation for {operation_descriptor_container}',
role_handler.__class__.__name__)
return operation
self._logger.debug('%s: no handler for %s', role_handler.__class__.__name__, operation_descriptor_container)
return None
def _register_existing_mdib_operations(self, sco: AbstractScoOperationsRegistry):
operation_descriptor_containers = self._mdib.descriptions.parent_handle.get(
self._sco.sco_descriptor_container.Handle, [])
for descriptor in operation_descriptor_containers:
registered_op = sco.get_operation_by_handle(descriptor.Handle)
if registered_op is None:
self._logger.debug('found unregistered %s in mdib, handle=%s, code=%r target=%s',
descriptor.NODETYPE.localname, descriptor.Handle, descriptor.Type,
descriptor.OperationTarget)
operation = self.make_operation_instance(descriptor, sco.operation_cls_getter)
if operation is not None:
sco.register_operation(operation)
def _on_pre_commit(self, mdib: ProviderMdib, transaction: TransactionManagerProtocol):
for provider in self._all_providers_sorted():
provider.on_pre_commit(mdib, transaction)
def _on_post_commit(self, mdib: ProviderMdib, transaction: TransactionManagerProtocol):
for provider in self._all_providers_sorted():
provider.on_post_commit(mdib, transaction)
class DefaultProduct(BaseProduct):
"""Default Product."""
def __init__(self,
mdib: ProviderMdib,
sco: AbstractScoOperationsRegistry,
log_prefix: str | None = None):
super().__init__(mdib, sco, log_prefix)
self.metric_provider = GenericMetricProvider(mdib, log_prefix=log_prefix) # needed in a test
self._ordered_providers.extend([AudioPauseProvider(mdib, log_prefix=log_prefix),
GenericSDCClockProvider(mdib, log_prefix=log_prefix),
GenericPatientContextProvider(mdib, log_prefix=log_prefix),
GenericAlarmProvider(mdib, log_prefix=log_prefix),
self.metric_provider,
OperationProvider(mdib, log_prefix=log_prefix),
GenericSetComponentStateOperationProvider(mdib, log_prefix=log_prefix),
])
class ExtendedProduct(DefaultProduct):
"""Add EnsembleContextProvider and LocationContextProvider."""
def __init__(self,
mdib: ProviderMdib,
sco: AbstractScoOperationsRegistry,
log_prefix: str | None = None):
super().__init__(mdib, sco, log_prefix)
self._ordered_providers.extend([EnsembleContextProvider(mdib, log_prefix=log_prefix),
LocationContextProvider(mdib, log_prefix=log_prefix),
])
| Draegerwerk/sdc11073 | src/sdc11073/roles/product.py | product.py | py | 9,839 | python | en | code | 27 | github-code | 13 |
39714334198 | #Practice
#function:
"""def printme(str):
"this prints a passed string into this function"
print(str)
return;
printme(str = "My string")"""
"""def printinfo(name, age):
"this prints a passed info into this function"
print ("Name: ", name)
print ("Age: ", age)
return;
printinfo(age=50, name="miki")"""
#default arguments:
"""def printinfo(name, age = 35):
"this prints a passed info into this function"
print ("Name: ", name)
print ("Age: ", age)
return;
printinfo(age=50, name="miki")
printinfo(name="miki")"""
#non-keyword variable argument:
"""def printinfo(arg1, *vartuple):
print(arg1)
for var in vartuple:
print(var)
return
print("Output is")
printinfo(10)
printinfo(70, 60, 50)"""
#Anonymus Functions
"""sum = lambda num1, num2: num1 + num2;
print("Value of total : ",sum(10,20))
print("Value of total : ",sum(20,20))"""
#Anonymus Functions
def sum(num1, num2):
total = num1 + num2
print("Inside the Function : ",(total))
return total
total = sum(10,20)
print("Outside the function : ",(total))
| johnranyer13/CPE-105 | function.py | function.py | py | 1,143 | python | en | code | 0 | github-code | 13 |
24622777942 | #!/usr/bin/env python
# coding=utf-8
import re
import os
import sys
import time
import codecs
import numpy as np
import pandas as pd
import pickle
from tqdm import tqdm
from itertools import chain
def _read_file(filename = "../data/train"):
file = codecs.open(filename + ".txt", encoding='utf-8')
file_pos = codecs.open(filename + "_fpos.txt", "w", encoding = "utf-8")
for line in file:
#print line
leftB = []
rightB = []
replace_word = []
record_word = []
len_init_word = []
if u'[' in line:
#print "ha"
if u']' in line:
#print "ha"
for i in range(len(line)):
if line[i] == '[':
leftB.append(i)
if line[i] == ']':
rightB.append(i)
#print leftB, rightB
#print line[leftB[2] : rightB[2]]
for i in range(len(leftB)):
new_word = []
word_tag_paireeee = re.split(' ', line[leftB[i]+1:rightB[i]])
# print word_tag_paireeee[1].split("/")[0]
word_tag_paireeee = np.asarray(word_tag_paireeee)
#print np.shape(word_tag_paireeee)
len_init_word.append(len(word_tag_paireeee))
for j in range(len(word_tag_paireeee)):
new_word.append(word_tag_paireeee[j].split("/")[0])
replace_word.append(u"".join(new_word[:]))
#print replace_word[:len(word_tag_paireeee)]
#print len(replace_word[0]), 'I got full words here'
for i in range(len(rightB)):
len_remove = 0
len_replace = 0
for j in range(0, i):
len_remove += rightB[j] - leftB[j] #- 2*(len_init_word[j-1] -1)
len_replace += len(replace_word[j])
leftB[i] = leftB[i] - len_remove + len_replace
rightB[i] = rightB[i] - len_remove + len_replace
#print line[leftB[i]:rightB[i]]
line = line[:leftB[i]] + replace_word[i] + "/" +line[rightB[i]+1:]
file_pos.write(line)
#print line
#print line[330:344]
def main():
#Clean data to remove word in [ ]
_read_file("../data/train")
if __name__ == "__main__":
main()
| rohithkodali/pos_blstm | data_utils/gen_ner_file.py | gen_ner_file.py | py | 2,284 | python | en | code | 1 | github-code | 13 |
42530557177 | # 2048 Adaptation : Jake Levi : May 2018
# 5 hours (5/11/18 1:00)
# 3 hours (5/11/18 16:18) FINISHED ENGINE
# 4.5 hours (5/12/18 2:16) FINISHED GAME
import pygame
import copy
import random
from roundrects import aa_round_rect, round_rect
# World State
class Game2048:
def __init__(self, width, height):
self.width = width
self.height = height
self.res = (128 * width, 128 * height)
self.board = [[0] * self.width for i in range(self.height)]
self.screen = None
# ----------------------------GAME FUNCTIONS---------------------------- #
# Board Direction -> Board
# Takes in the new board and returns the board 'tilted' in passed direction
def tilt(board, direction):
# 1. Iterate through board until non-zero value
# 2. Single tilt the non-zero, continue
new_board = copy.deepcopy(board)
has_changed = True
while has_changed:
has_changed = False
if direction == "L" or direction == "U":
for i in range(len(board)):
for j in range(len(board[0])):
if new_board[i][j] != 0:
updated_board = single_tilt(new_board, direction, i, j)
has_changed = has_changed or updated_board != new_board
new_board = updated_board
else:
i = len(board) - 1
j = len(board[i]) - 1
while i >= 0:
while j >= 0:
if new_board[i][j] != 0:
updated_board = single_tilt(new_board, direction, i, j)
has_changed = has_changed or updated_board != new_board
new_board = updated_board
j -= 1
j = len(board[i]) - 1
i -= 1
return new_board
# Board Direction Number Number -> Game
# Takes in a direction and reference coordinates, changes the game board
def single_tilt(board, direction, i, j):
# 1. Check space in direction of tilt (Checking for out of bounds error first)
# 2.
# a. if space is 0, slide value over (change old spot to 0), return to step 2
# b. Else merge the two values (changing the old spot to 0)
new_board = copy.deepcopy(board)
new_i = i
new_j = j
if direction == "L" and j - 1 >= 0:
new_j -= 1
elif direction == "R" and j + 1 <= (len(new_board[0]) - 1):
new_j += 1
elif direction == "U" and i - 1 >= 0:
new_i -= 1
elif direction == "D" and i + 1 <= len(new_board) - 1:
new_i += 1
else:
return new_board # Out of Bounds, Same Board
neighbour = new_board[new_i][new_j]
if neighbour == 0: # If the space is empty, slide down
new_board[new_i][new_j] = new_board[i][j]
new_board[i][j] = 0
new_board = single_tilt(new_board, direction, new_i, new_j)
elif neighbour == new_board[i][j]: # If the two values are equal, merge 'em!
new_board[new_i][new_j] *= 2
new_board[i][j] = 0
return new_board
# [Maybe Board] -> [Maybe Board]
# Spawns a random tile on the board, or game overs if there are no spaces left
# 1. Find all spaces in array with value 0, return an list of their indexes
# 2. If the game is False or the array is empty, return False
# 3. Randomly choose a value from the array from step 1 DONE
# 4. Place either a 4 (10%) or 2 (90%) at the board index of the result of step 2 DONE
def spawn_tile(board):
free_spaces = index_of(board, 0)
if len(free_spaces) == 0:
return False
empty = free_spaces[
random.randrange(len(free_spaces))] # A randomly chosen board space from the list of empty spaces
new_num = random.choices([2, 4], [3, 1])
# Takes (y, x) and converts it to board[y][x], sets the value to 2 or 4
new_board = copy.deepcopy(board)
new_board[empty[0]][empty[1]] = new_num[0]
return new_board
# [Maybe Board] Number -> [List-of Number]
# Takes in a board and a value and returns a list of the indexes where the key appears
def index_of(board, key):
if not board:
return []
else:
indices = []
for i in range(len(board)):
for j in range(len(board[0])):
if board[i][j] == key:
indices.append((i, j)) # returns (y, x)
return indices
# Game -> None
# Prints the board to the terminal
def print_board(game):
for line in game.board:
print(line)
# Game -> Boolean
# Determines whether the board has any other moves left
def is_game_over(game):
# Tries tilting board, if the tilt changes the board, return False, otherwise, return True
return tilt(game.board, "L") == game.board and tilt(game.board, "U") == game.board and \
tilt(game.board, "R") == game.board and tilt(game.board, "D") == game.board
# Game -> None
# Greys out screen, displaying/returning score & board
def grey_screen(game):
grey_out = pygame.Surface(game.res)
grey_out.fill((47, 79, 79))
grey_out.set_alpha(100)
game.screen.blit(grey_out, (0, 0))
pygame.display.flip()
def score(game):
high_val = 0
freq = 0
for row in game.board:
for num in row:
if num > high_val:
freq = 1
high_val = num
elif num == high_val:
freq += 1
# Deal with multiple high tiles
if freq == 1:
return high_val
else:
return int(high_val * (1 + (freq / (game.width * game.height))))
# ----------------------------GRAPHICS---------------------------- #
# Game -> Surface
# Takes in the width and height (# of squares) of the desired grid
def draw_board(game):
res = (128 * game.width, 128 * game.height)
background = pygame.Surface(res)
for x in range(game.width):
for y in range(game.height):
aa_round_rect(background, (128 * x, 128 * y, 128, 128), (169, 169, 169), 5, 8)
pygame.draw.rect(background, (169, 169, 169), (128 * x, 128 * y, 128, 128), 8)
return background
def write(val):
# Divide 100 by the number of digits to reach the correct size
# 1 digit = 100
# 2 digits = 90
my_font = pygame.font.SysFont("Arial", 100 - (len(val) * 10), True)
my_text = my_font.render(val, True, (255, 255, 255))
my_text = my_text.convert_alpha()
return my_text
# Number -> Surface
# Takes in a value and produces a tile with unique colour
def make_tile(val):
tile = pygame.Surface((112, 112))
round_rect(tile, (0, 0, 112, 112), val_to_colour(val), 4)
tile.set_colorkey((0, 0, 0))
text_surface = write(str(val))
# 1 digit = 35, 0
# 2 digits = 20, 5
# 3 digits = 5, 10
# 4 digits = 0, 15
x = 35 - (15 * (len(str(val)) - 1))
if len(str(val)) >= 4:
x = 0
y = (len(str(val)) - 1) * 5
tile.blit(text_surface, (x, y))
return tile
# Number -> (Number, Number, Number)
# Takes in a value and produces a colour tuple according to the dictionary
def val_to_colour(val):
return {
2: (173, 216, 230),
4: (102, 205, 170),
8: (176, 196, 222),
16: (60, 179, 113),
32: (255, 160, 122),
64: (0, 191, 255),
128: (244, 164, 96),
256: (50, 205, 50),
512: (218, 165, 32),
1024: (34, 139, 34),
2048: (30, 144, 255),
4096: (147, 112, 219),
8192: (255, 69, 0),
16384: (220, 20, 60),
32768: (210, 105, 30),
65536: (255, 127, 80),
131072: (124, 252, 0)
}.get(val, (47, 79, 79))
# Game -> Image
# Outputs the graphical representation of the board (tiles only)
def tiles_render(game):
tiles = pygame.Surface((128 * game.width, 128 * game.height))
for x in range(game.width):
for y in range(game.height):
if game.board[y][x] != 0:
tiles.blit(make_tile(game.board[y][x]), ((128 * x) + 8, (128 * y) + 8))
tiles.set_colorkey((0, 0, 0))
return tiles
# Number Number [Number] -> Number
def main(w=5, h=5, ai=0):
# ----------------------------SETUP---------------------------- #
game = Game2048(w, h)
if ai == 0:
pygame.init()
game.screen = pygame.display.set_mode(game.res)
max_tile = pow(2, (game.width * game.height + 1))
pygame.display.set_caption("Max Tile Possible: " + str(max_tile))
clock = pygame.time.Clock()
fps = 60 # FPS cap
background = draw_board(game)
game.screen.blit(background, (0, 0))
pygame.display.flip()
# ----------------------------GAME LOOP---------------------------- #
# 1. Take in keypress
# 2. Tilt board
# 3. Spawn a random tile
# Initial double spawn_tile() to begin the game with 2 tiles
game.board = spawn_tile(spawn_tile(game.board))
print_board(game)
game.screen.blit(tiles_render(game), (0, 0))
pygame.display.flip()
while True:
old_board = copy.deepcopy(game.board)
clock.tick(fps)
if ai == 0:
wait_input = True
while wait_input:
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_LEFT:
game.board = tilt(game.board, "L")
wait_input = False
elif event.type == pygame.KEYDOWN and event.key == pygame.K_RIGHT:
game.board = tilt(game.board, "R")
wait_input = False
elif event.type == pygame.KEYDOWN and event.key == pygame.K_UP:
game.board = tilt(game.board, "U")
wait_input = False
elif event.type == pygame.KEYDOWN and event.key == pygame.K_DOWN:
game.board = tilt(game.board, "D")
wait_input = False
if event.type == pygame.QUIT:
# Return Score if user quits
return score(game)
elif ai == 1:
tilt(game.board, random.choice(["L", "R", "U", "D"]))
else:
print("Invalid AI Setting")
exit(1)
if game.board != old_board:
game.board = spawn_tile(game.board)
# Make board animation look more natural in console
print("\n" * 100)
game.screen.blit(background, (0, 0))
game.screen.blit(tiles_render(game), (0, 0))
pygame.display.flip()
print_board(game)
elif is_game_over(game):
grey_screen(game)
wait_input = True
while wait_input:
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE:
print("Score", score(game))
print("\n" * 100)
print("New Game")
print("\n")
main(game.width, game.height)
if event.type == pygame.QUIT or event.type == pygame.KEYDOWN:
return score(game)
if __name__ == "__main__":
# call the main function
w = int(input("Width: "))
h = int(input("Height: "))
print_board()
print("score: ", main(w, h))
| towrofterra/2048 | game_2048.py | game_2048.py | py | 11,871 | python | en | code | 0 | github-code | 13 |
21859634160 | import numpy as np
import pretty_midi
import librosa
import scipy.io.wavfile as wavfile
import torch
from torch.utils.data import Dataset, DataLoader
import torch.nn as nn
import torch.nn.functional as F
from torch.optim import Adam
from tqdm import tqdm
import os
# viz
import matplotlib.pyplot as plt
import json
torch.autograd.set_detect_anomaly(True)
"""
where am i?
"""
PATH = os.getcwd() + '/'
data_path = PATH + 'data/'
midi_path = data_path + '/nesmdb_midi/train/'
output_path = data_path + 'output/'
model_path = PATH + 'model/'
print(f'PATH: {PATH}')
print(f'data path: {data_path}')
print(f'midi path: {midi_path}')
print(f'output path: {output_path}')
print(f'model path: {model_path}')
"""
device
"""
device = 'cuda' if torch.cuda.is_available() else 'cpu'
print(f'device: {device}')
"""
global parameters
"""
batch_size = 100
lr = 1e-3
latent_dim = 2
"""
load midi files
"""
sample_space = sorted(os.listdir( midi_path ))
sample_space = np.random.choice( sample_space, 1000 )
print(f'sample space size: {len(sample_space)}')
"""
load midi files
"""
midi = {}
midi_error = []
for s in tqdm( sample_space ):
try:
# piano roll representation
aux = pretty_midi.PrettyMIDI( midi_path + s ).get_piano_roll( fs=100 )
if aux.shape[1] > 0:
midi[ s ] = aux
else:
midi_error.append(s)
except:
#print(f'error: {s}')
midi_error.append(s)
print(f'sample space: {len(midi)}')
print(f'corrupted files: {len(midi_error)}')
"""
data loader
"""
class AudioDataset( Dataset ):
def __init__(self, midi_files):
self.midi_files = midi_files
def __len__(self):
return len( self.midi_files )
def __getitem__( self, idx ):
midi_file = self.midi_files[ idx ]
# log-frequency spectogram
log_spec = librosa.amplitude_to_db(
librosa.feature.melspectrogram(
y=None,
sr=100,
S=midi_file.T,
n_fft=1024,
hop_length=512,
power=2.0,
n_mels=128),
ref=1.0
)
# convert to pytorch tensor
eps = 1e-38
log_spec_db = torch.from_numpy( log_spec ).float()#.unsqueeze(0) # add channel dimension
log_spec_norm = ( log_spec_db - torch.min(log_spec_db) ) / ( torch.max(log_spec_db) - torch.min(log_spec_db) + eps)
log_spec_norm = log_spec_norm.unsqueeze(0)
return log_spec_norm
dataset = AudioDataset( list(midi.values()) )
dataloader = torch.utils.data.DataLoader(
dataset,
batch_size=batch_size,
shuffle=True,
drop_last=True
)
for i, spec_tensor in enumerate( dataloader ):
print(f'batch {i}: {spec_tensor.size()}')
print(f'batch variance: {spec_tensor.var()}')
print(f'max, min: {spec_tensor.max()}, {spec_tensor.min()}')
print('\n')
"""
vanilla vae
"""
class Encoder( nn.Module ):
def __init__( self, input_dim, hidden_dim, latent_dim ):
super( Encoder, self ).__init__()
self.fc1 = nn.Linear( input_dim, hidden_dim )
self.fc2 = nn.Linear( hidden_dim, hidden_dim )
self.fc3 = nn.Linear( hidden_dim, latent_dim )
self.fc4 = nn.Linear( hidden_dim, latent_dim )
self.relu = nn.ReLU(0.2)
self.training = True
def forward( self, x ):
x = self.relu( self.fc1( x ) )
x = self.relu( self.fc2( x ) )
mu = self.fc3( x )
logvar = self.fc4( x )
return mu, logvar
class Decoder( nn.Module ):
def __init__( self, latent_dim, hidden_dim, output_dim ):
super( Decoder, self ).__init__()
self.fc1 = nn.Linear( latent_dim, hidden_dim )
self.fc2 = nn.Linear( hidden_dim, hidden_dim )
self.fc3 = nn.Linear( hidden_dim, output_dim )
self.relu = nn.ReLU(0.2)
def forward( self, x ):
x = self.relu( self.fc1( x ) )
x = self.relu( self.fc2( x ) )
x = self.fc3( x )
x_hat = torch.sigmoid( x )
return x_hat
class VAE( nn.Module ):
def __init__( self, Encoder, Decoder ):
super( VAE, self ).__init__()
self.encoder = Encoder
self.decoder = Decoder
def reparametrization( self, mu, logvar ):
eps = torch.randn_like( logvar ).to(device)
z = mu + (eps * torch.exp(0.5 * logvar))
return z
def forward( self, x ):
mean, logvar = self.encoder( x )
z = self.reparametrization( mean, logvar )
x_hat = self.decoder( z )
return x_hat, mean, logvar
"""
model
"""
encoder = Encoder( input_dim=128*128, hidden_dim=512, latent_dim=latent_dim ).to(device)
decoder = Decoder( latent_dim=latent_dim, hidden_dim=512, output_dim=128*128 ).to(device)
model = VAE( encoder, decoder ).to(device)
print(model)
"""
loss function
"""
def loss_function( x_hat, x, mu, logvar ):
reproduction_loss = F.binary_cross_entropy( x_hat, x, reduction='sum' )
KLD = -0.5 * torch.sum( 1 + logvar - mu.pow(2) - logvar.exp() ) # KL divergence
return reproduction_loss + KLD
BCE_loss = nn.BCELoss() # binary cross entropy loss
optimizer = Adam( model.parameters(), lr=lr )
"""
train
"""
epochs = 100
best_loss = float('inf')
loss_dict = {}
for epoch in range( epochs ):
overall_loss = 0
for i, spec_tensor in enumerate( dataloader ):
x = spec_tensor.view(-1, 128*128)
x = x.to(device)
optimizer.zero_grad()
x_hat, mean, logvar = model( x )
loss = loss_function( x_hat, x, mean, logvar )
loss.backward()
overall_loss += loss.item()
optimizer.step()
average_loss = overall_loss / (batch_size * i)
print(f'epoch: { epoch }, loss: { average_loss }')
loss_dict[ epoch ] = average_loss
if average_loss < best_loss:
best_loss = average_loss
torch.save( model.state_dict(), model_path + 'vae.pth' )
loss_dict_file = model_path + 'loss_dict.json'
with open( loss_dict_file, 'w' ) as fp:
json.dump( loss_dict, fp ) | no-lineal/mir_project | train.py | train.py | py | 6,230 | python | en | code | 0 | github-code | 13 |
38714896986 | import sys
import config as cfg
import data
## Inputs:
# PathToData: Training Data
# Num: Number of training samples
# StartNum: Start processing from this sample number
## Outputs:
# voxels_preprocessed.vtu
dataPath = cfg.Data_path_ps
num = cfg.num_simulations_ps
startNum = cfg.startNum_simulations_ps + 1
valid = 0
for i in range( startNum, num+startNum ):
print(str(i) + "/" + str(num))
if data.preprocess( dataPath, i ):
valid += 1
print( "Converted {:d} samples.".format( valid ) )
| Gaurav1302/contour2reg | Generate/preprocessData.py | preprocessData.py | py | 508 | python | en | code | 0 | github-code | 13 |
1199807385 | # 给你二叉树的根节点 root ,返回它节点值的 前序 遍历。
#
# 示例 1:
# 输入:root = [1,null,2,3]
# 输出:[1,2,3]
#
# 示例 2:
# 输入:root = []
# 输出:[]
#
# 示例 3:
# 输入:root = [1]
# 输出:[1]
#
# 示例 4:
# 输入:root = [1,2]
# 输出:[1,2]
#
# 示例 5:
# 输入:root = [1,null,2]
# 输出:[1,2]
#
# 提示:
# 树中节点数目在范围 [0, 100] 内
# -100 <= Node.val <= 100
# Definition for a binary tree node.
from typing import Optional, List
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def f(self, list: List[int], root: Optional[TreeNode]):
if not root:
return
list.append(root.val)
self.f(list, root.left)
self.f(list, root.right)
def preorderTraversal(self, root: Optional[TreeNode]) -> List[int]:
res = []
self.f(res, root)
return res
node1 = TreeNode(1)
node2 = TreeNode(2)
node3 = TreeNode(3)
node1.right = node2
node2.left = node3
solution = Solution()
print(solution.preorderTraversal(node1)) | Lemonstars/algorithm | leetcode/144. 二叉树的前序遍历/solution1.py | solution1.py | py | 1,183 | python | en | code | 0 | github-code | 13 |
32859313048 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import itertools
from ...lib.tools import get_terminal_size, lookahead
class HexViewerMixin:
def hexviewer_interface(self, argp):
from ...lib.argformats import number
argp.add_argument('-A', '--no-addr', dest='hexaddr', action='store_false',
help='Do not show byte offsets in hexdump.')
argp.add_argument('-W', '--width', metavar='N', type=number, default=0,
help='Specify the number of hexadecimal characters to use in preview.')
argp.add_argument('-E', '--expand', action='store_true',
help='Do not compress sequences of identical lines in hexdump')
return argp
def hexaddr_size(self, data):
addr_width = 16
for k in range(1, 16):
if len(data) < (1 << (k << 2)):
addr_width = k
break
return addr_width
def hexdump(self, data):
import re
if self.args.width:
columns = self.args.width
else:
# this will default to 16 byte wide output if
# stdout is not a terminal or if its width can
# not be determined for other reasons.
try:
columns = get_terminal_size() or 75
except OSError:
columns = 16
else:
if self.args.hexaddr:
columns -= self.hexaddr_size(data)
columns -= 1 # for the separator
columns = (columns - 2) // 4
columns = min(columns, len(data))
lines = itertools.zip_longest(*([iter(data)] * columns))
address_width = max(self.hexaddr_size(data), 4)
previous = None
prevcount = 0
for k, (last, line) in enumerate(lookahead(lines)):
chunk = bytes(b for b in line if b is not None)
if not self.args.expand:
if chunk == previous and not last:
prevcount += 1
continue
elif prevcount > 0:
msg = F' repeats {prevcount} times '
yield F'{".":.>{address_width}}: {msg:=^{3*columns-1}} {"":=<{columns}}'
prevcount = 0
dump = ' '.join(F'{b:02X}' for b in chunk)
read = re.sub(B'[^!-~]', B'.', chunk).decode('ascii')
yield F'{k*columns:0{address_width}X}: {dump:<{3*columns}} {read:<{columns}}'
if not self.args.expand:
previous = chunk
| chubbymaggie/refinery | refinery/units/sinks/__init__.py | __init__.py | py | 2,530 | python | en | code | null | github-code | 13 |
35173678764 | #!/usr/bin/python3
"""Test module definition for Amenity class"""
import unittest
import json
import pep8
import datetime
from models.amenity import Amenity
from models.base_model import BaseModel
class TestAmenity(unittest.TestCase):
"""Check implemementation for State class"""
def test_doc_module(self):
"""Function that checks Module documentation"""
doc = Amenity.__doc__
self.assertGreater(len(doc), 1)
def test_pep8_conformance_amenity(self):
"""Function that tests amnity.py aligns to PEP8"""
pep8style = pep8.StyleGuide(quiet=True)
result = pep8style.check_files(['models/amenity.py'])
self.assertEqual(result.total_errors, 0,
"Code style errors and (or warnings) found")
def test_pep8_conformance_test_amenity(self):
"""Function that checks if test_state.py aligns to PEP8"""
pep8style = pep8.StyleGuide(quiet=True)
res = pep8style.check_files(['tests/test_models/test_amenity.py'])
self.assertEqual(res.total_errors, 0,
"Code style errors (and warnings) found")
def test_doc_constructor(self):
"""Constructor documentation"""
doc = Amenity.__init__.__doc__
self.assertGreater(len(doc), 1)
| faithnekesa/AirBnB_clone | tests/test_models/test_amenity.py | test_amenity.py | py | 1,288 | python | en | code | 0 | github-code | 13 |
73998243219 | import ast
import dis
import typing
import asyncio
import os
# Default values for block syntax
# One-time block that is called during .init() call before template is rendered
ONE_TIME_BLOCK_START = '{1{!'
"""
One-time expression that is evaluated during .init() call before template is
rendered. This block has no result and is removed from resulting render.
Defines block start
Example:
```
{1{!
def useful_function(something):
return something + ' is useful'
!}1}
```
"""
ONE_TIME_BLOCK_END = '!}1}'
"""
One-time expression that is evaluated during .init() call before template is
rendered. This block has no result and is removed from resulting render.
Defines block end
Example:
```
{1{!
def useful_function(something):
return something + ' is useful'
!}1}
"""
ONE_TIME_EXPRESSION_START = '{1{%'
"""
One-time expression that is evaluated during .init() call before template is
rendered. Expression result is inserted into the final template instead of this
expression block.
Defines block start
Example:
```
{1{%
useful_function('my shitcode')
%}1}
"""
ONE_TIME_EXPRESSION_END = '%}1}'
"""
One-time expression that is evaluated during .init() call before template is
rendered. Expression result is inserted into the final template instead of this
expression block.
Defines block end
Example:
```
{1{%
useful_function('my shitcode')
%}1}
"""
COMMENT_BLOCK_START = '{{#'
"""
Defines comment block that is entirely deleted from resulting render including
block content.
Defines block start
Example:
```
{{#
def not_useful_function(something):
return something + ' is NOT useful'
log(not_useful_function('my code'))
#}}
"""
COMMENT_BLOCK_END = '#}}'
"""
Defines comment block that is entirely deleted from resulting render including
block content.
Defines block end
Example:
```
{{#
def not_useful_function(something):
return something + ' is NOT useful'
log(not_useful_function('my code'))
#}}
"""
BLOCK_START = '{{!'
"""
Renderable expression that is evaluated during .render() call before template is
rendered. This block has no result and is removed from resulting render.
Defines block start
Example:
```
{1{!
total_usefulness = 0
!}1}
{{!
def useful_function(something):
total_usefulness += 1
return something + ' is useful ' + total_usefulness + ' times
!}}
```
"""
BLOCK_END = '!}}'
"""
Renderable expression that is evaluated during .render() call before template is
rendered. This block has no result and is removed from resulting render.
Defines block end
Example:
```
{1{!
total_usefulness = 0
!}1}
{{!
def useful_function(something):
total_usefulness += 1
return something + ' is useful ' + total_usefulness + ' times
!}}
"""
EXPRESSION_START = '{{%'
"""
Renderable expression that is evaluated during .render() call before template is
rendered. Expression result is inserted into the final result instead of this
expression block.
Defines block start
Example:
```
{{%
useful_function('my shitcode')
%}}
"""
EXPRESSION_END = '%}}'
"""
Renderable expression that is evaluated during .render() call before template is
rendered. Expression result is inserted into the final result instead of this
expression block.
Defines block end
Example:
```
{{%
useful_function('my shitcode')
%}}
"""
def findall(s: str, sub: str, ind: int=0):
"""
Find all ocurrencies for sub in s starting from ind
"""
ind = s.find(sub, ind)
while ind != -1:
yield ind
ind = s.find(sub, ind + 1)
def classenum(l: list, clazz: int):
"""
zip(l, [clazz]*len(l))
"""
return list(zip(l, [ clazz ] * len(l)))
def countsameleft(s: str, c: str):
"""
Count same characters from left
"""
count = 0
while s[count] == c:
count += 1
return count
def autotablete(code: str):
"""
Remove excessive tabs/spaces by finding minimal ident.
Example:
"""
# I don't care about cookies
code = code.rstrip()
# Skip empty codee
if len(code) == 0:
return code
# Lines
split_lines = code.split('\n')
lines = []
for line in split_lines:
# if len(line.strip()):
# lines.append(line.rstrip())
lines.append(line.rstrip())
# Find first line with nonempty length
for first_nonempty_index, first_nonempty_line in enumerate(lines):
if len(first_nonempty_line) > 0:
break
else:
return code
# Skip noident
if first_nonempty_line[0] != '\t' and first_nonempty_line[0] != ' ':
return code
# Get initial ident
ident_char = first_nonempty_line[0]
min_ident = countsameleft(first_nonempty_line, ident_char)
# Remove excessive ident from each line
for i in range(first_nonempty_index, len(lines)):
# Reident non-empty strings
if len(lines[i]):
if countsameleft(lines[i], ident_char) < min_ident:
raise SyntaxError('Ident of the first line does not match ident of the other lines')
lines[i] = lines[i][min_ident:]
return '\n'.join(lines)
class TemplateFragment:
"""
Represnts single fragment of the templste
"""
def __init__(self):
pass
def is_one_time(self) -> bool:
"""
Returns True if this fragment is one-pass type and executed inside
Template.init()
"""
return False
async def render(self, context: dict, scope: dict) -> typing.Union[str, typing.Awaitable[str]]:
"""
Renders the given fragment inside given context with passed arguemnts.
context is single for entire template and holds all values between
.render() calls.
scope is unique dict per each call of .render().
Expected result of .render() call is string or coroutine returning
string.
"""
return None
class StringTemplateFragment(TemplateFragment):
"""
Represents single template fragment containing plain string value
"""
__slots__ = (
'value'
)
def __init__(self, value: str):
super().__init__()
self.value = value
def is_one_time(self) -> bool:
return False
async def render(self, context: dict, scope: dict) -> typing.Union[str, typing.Awaitable[str]]:
return self.value
def __str__(self):
return self.value
def __repr__(self):
return self.value
class ExpressionTemplateFragment(TemplateFragment):
"""
Represents single template fragment containing executable expression that
supports eval(). Can contain both: one pass and multiple pass expressions
"""
__slots__ = (
'evaluable',
'one_time',
'expression_start_tag',
'expression_end_tag',
'source_string'
)
def __init__(self, source_string: str, one_time: bool = False, expression_start_tag: str=None,
expression_end_tag: str=None,
save_source: bool=True,
_tag_index: int=None):
super().__init__()
file_name = '<ExpressionTemplateFragment>' if _tag_index is None else f'<ExpressionTemplateFragment_{_tag_index}>'
self.evaluable = compile(autotablete(source_string), file_name, 'eval', flags=ast.PyCF_ALLOW_TOP_LEVEL_AWAIT)
self.one_time = one_time
self.expression_start_tag = expression_start_tag or (ONE_TIME_EXPRESSION_START if self.one_time else EXPRESSION_START)
self.expression_end_tag = expression_end_tag or (ONE_TIME_EXPRESSION_END if self.one_time else EXPRESSION_START)
self.source_string = source_string if save_source else None
def is_one_time(self) -> bool:
return self.one_time
async def render(self, context: dict, scope: dict) -> typing.Union[str, typing.Awaitable[str]]:
result = eval(self.evaluable, context, scope)
if asyncio.iscoroutine(result):
return await result
return result
def __str__(self):
if self.source_string is None:
return f'{self.expression_start_tag}\n{dis.dis(self.evaluable)}\n{self.expression_end_tag}'
else:
return f'{self.expression_start_tag}\n{self.source_string}\n{self.expression_end_tag}'
def __repr__(self):
if self.source_string is None:
return f'{self.expression_start_tag}\n{dis.dis(self.evaluable)}\n{self.expression_end_tag}'
else:
return f'{self.expression_start_tag}\n{self.source_string}\n{self.expression_end_tag}'
class BlockTemplateFragment(TemplateFragment):
"""
Represents single template fragment containing executable code block that
supports exec()/eval(). Can contain both: one pass and multiple pass
expressions
"""
__slots__ = (
'executable',
'one_time',
'block_start_tag',
'block_end_tag',
'source_string'
)
def __init__(self, source_string: str, one_time: bool = False, block_start_tag: str=None,
block_end_tag: str=None,
save_source: bool=True,
_tag_index: int=None):
super().__init__()
file_name = '<BlockTemplateFragment>' if _tag_index is None else f'<BlockTemplateFragment{_tag_index}>'
self.executable = compile(autotablete(source_string), file_name, 'exec', flags=ast.PyCF_ALLOW_TOP_LEVEL_AWAIT)
self.one_time = one_time
self.block_start_tag = block_start_tag or (ONE_TIME_BLOCK_START if self.one_time else BLOCK_START)
self.block_end_tag = block_end_tag or (ONE_TIME_BLOCK_END if self.one_time else BLOCK_START)
self.source_string = source_string if save_source else None
def is_one_time(self) -> bool:
return self.one_time
async def render(self, context: dict, scope: dict) -> typing.Union[str, typing.Awaitable[str]]:
result = eval(self.executable, context, scope)
if asyncio.iscoroutine(result):
await result
return None
def __str__(self):
if self.source_string is None:
return f'{self.block_start_tag}\n{dis.dis(self.executable)}\n{self.block_end_tag}'
else:
return f'{self.block_start_tag}\n{self.source_string}\n{self.block_end_tag}'
def __repr__(self):
if self.source_string is None:
return f'{self.block_start_tag}\n{dis.dis(self.executable)}\n{self.block_end_tag}'
else:
return f'{self.block_start_tag}\n{self.source_string}\n{self.block_end_tag}'
class TemplateParser:
"""
Utility class that provides template parsing funtionality.
Entire set of parameters for tag start and end literals match their
description for defualt defined values.
`strip_string` sets string trim() mode. If set to True, each part of string
not in code blocks is stripped to remove excessive spaces.
`save_source_string` enables parser to save source code inside TemplateFragment so
it can be printed later.
"""
__slots__ = (
'one_time_block_start',
'one_time_block_end',
'one_time_expression_start',
'one_time_expression_end',
'comment_block_start',
'comment_block_end',
'block_start',
'block_end',
'expression_start',
'expression_end',
'save_source_string',
'strip_string'
)
def __init__(self, one_time_block_start: str=ONE_TIME_BLOCK_START,
one_time_block_end: str=ONE_TIME_BLOCK_END,
one_time_expression_start: str=ONE_TIME_EXPRESSION_START,
one_time_expression_end: str=ONE_TIME_EXPRESSION_END,
comment_block_start: str=COMMENT_BLOCK_START,
comment_block_end: str=COMMENT_BLOCK_END,
block_start: str=BLOCK_START,
block_end: str=BLOCK_END,
expression_start: str=EXPRESSION_START,
expression_end: str=EXPRESSION_END,
strip_string: bool=True,
save_source_string: bool=True):
self.one_time_block_start = one_time_block_start
self.one_time_block_end = one_time_block_end
self.one_time_expression_start = one_time_expression_start
self.one_time_expression_end = one_time_expression_end
self.comment_block_start = comment_block_start
self.comment_block_end = comment_block_end
self.block_start = block_start
self.block_end = block_end
self.expression_start = expression_start
self.expression_end = expression_end
self.save_source_string = save_source_string
self.strip_string = strip_string
def parse(self, source: str):
"""
Perform parsing of the given source and returns list of pseudo-tokens
"""
# ---> First pass: remove comments
comment_start = classenum(list(findall(source, self.comment_block_start)), 0)
comment_end = classenum(list(findall(source, self.comment_block_end)), 1)
if len(comment_start) < len(comment_end):
raise RuntimeError(f'{self.comment_block_start} and {self.comment_block_end} tags count mismatch: {len(comment_start)} != {len(comment_end)}')
ordered_comment_tags = sorted(comment_start + comment_end, key=lambda x: x[0])
# Operate on copy
offset = 0
# Iterate over all comment open/close tags and remove these intervals from input data
if len(ordered_comment_tags):
cursor = 0
while cursor < len(ordered_comment_tags):
# Cound closing tag before opening tag
if ordered_comment_tags[cursor][1] == 1:
raise RuntimeError(f'Unmatched {self.comment_block_end} tag')
comment_start_index = ordered_comment_tags[cursor][0]
# Find closest closing tag
while True:
if cursor >= len(ordered_comment_tags):
raise RuntimeError(f'Unmatched {self.comment_block_start} tag')
if ordered_comment_tags[cursor][1] == 1:
break
cursor += 1
source = source[:comment_start_index - offset] + source[ordered_comment_tags[cursor][0] + len(COMMENT_BLOCK_END) - offset:]
offset += (ordered_comment_tags[cursor][0] + len(COMMENT_BLOCK_END)) - comment_start_index
cursor += 1
# ---> Second pass: parse all tags and sort list
tag_by_id = [
self.one_time_block_start,
self.one_time_block_end,
self.one_time_expression_start,
self.one_time_expression_end,
self.block_start,
self.block_end,
self.expression_start,
self.expression_end
]
one_time_block_start = classenum(list(findall(source, self.one_time_block_start)), 0)
one_time_block_end = classenum(list(findall(source, self.one_time_block_end)), 1)
one_time_expression_start = classenum(list(findall(source, self.one_time_expression_start)), 2)
one_time_expression_end = classenum(list(findall(source, self.one_time_expression_end)), 3)
block_start = classenum(list(findall(source, self.block_start)), 4)
block_end = classenum(list(findall(source, self.block_end)), 5)
expression_start = classenum(list(findall(source, self.expression_start)), 6)
expression_end = classenum(list(findall(source, self.expression_end)), 7)
# Complimentary check
if len(one_time_block_start) != len(one_time_block_end):
raise RuntimeError(f'{self.one_time_block_start} and {self.one_time_block_end} tags count mismatch: {len(one_time_block_start)} != {len(one_time_block_end)}')
if len(one_time_expression_start) != len(one_time_expression_end):
raise RuntimeError(f'{self.one_time_expression_start} and {self.one_time_expression_end} tags count mismatch: {len(one_time_expression_start)} != {len(one_time_expression_end)}')
if len(block_start) != len(block_end):
raise RuntimeError(f'{self.block_start} and {self.block_end} tags count mismatch: {len(block_start)} != {len(block_end)}')
if len(expression_start) != len(expression_end):
raise RuntimeError(f'{self.expression_start} and {self.expression_end} tags count mismatch: {len(expression_start)} != {len(expression_end)}')
# Sort
ordered_tags = one_time_block_start + one_time_block_end
ordered_tags += one_time_expression_start + one_time_expression_end
ordered_tags += block_start + block_end
ordered_tags += expression_start + expression_end
ordered_tags = sorted(ordered_tags, key=lambda x: x[0])
# Validate order
if len(ordered_tags):
cursor = 0
last_open_tag_id = -1
while cursor < len(ordered_tags):
if last_open_tag_id != -1:
if ordered_tags[cursor][1] - 1 != last_open_tag_id:
raise RuntimeError(f'Unmatched {tag_by_id[last_open_tag_id]} tag')
# else
last_open_tag_id = -1
else:
if ordered_tags[cursor][1] % 2 == 1:
raise RuntimeError(f'Unmatched {tag_by_id[ordered_tags[cursor][1]]} tag')
# else
last_open_tag_id = ordered_tags[cursor][1]
cursor += 1
if last_open_tag_id != -1:
if ordered_tags[cursor][1] - 1 != last_open_tag_id:
raise RuntimeError(f'Unmatched {tag_by_id[last_open_tag_id]} tag')
# ---> Third pass: split source into separate aprts depending on the type
if len(ordered_tags) == 0:
return [ StringTemplateFragment(source) ]
template_fragments = []
# Count acurrencies of each tag type
fragment_types_count = [ 0 ] * 4
last_source_index = 0
for cursor in range(0, len(ordered_tags), 2):
# Append missing string as text node
if last_source_index < ordered_tags[cursor][0]:
substring = source[last_source_index : ordered_tags[cursor][0]]
# Ignore empty strings for optimization
stripped = substring.strip()
if len(stripped):
template_fragments.append(StringTemplateFragment(stripped if self.strip_string else substring))
substring = source[ordered_tags[cursor][0] + len(tag_by_id[ordered_tags[cursor][1]]) : ordered_tags[cursor + 1][0]]
fragment_types_count[ordered_tags[cursor][1] // 2] += 1
# Skip empty blocks
if len(substring.strip()) == 0:
last_source_index = ordered_tags[cursor + 1][0] + len(tag_by_id[ordered_tags[cursor + 1][1]])
continue
if ordered_tags[cursor][1] == 0:
template_fragments.append(BlockTemplateFragment(substring, one_time=True, block_start_tag=self.one_time_block_start, block_end_tag=self.one_time_block_end, save_source=self.save_source_string, _tag_index=fragment_types_count[ordered_tags[cursor][1] // 2]))
elif ordered_tags[cursor][1] == 2:
template_fragments.append(ExpressionTemplateFragment(substring, one_time=True, expression_start_tag=self.one_time_expression_start, expression_end_tag=self.one_time_expression_end, save_source=self.save_source_string, _tag_index=fragment_types_count[ordered_tags[cursor][1] // 2]))
elif ordered_tags[cursor][1] == 4:
template_fragments.append(BlockTemplateFragment(substring, one_time=False, block_start_tag=self.one_time_block_start, block_end_tag=self.block_start, save_source=self.block_end, _tag_index=fragment_types_count[ordered_tags[cursor][1] // 2]))
elif ordered_tags[cursor][1] == 6:
template_fragments.append(ExpressionTemplateFragment(substring, one_time=False, expression_start_tag=self.expression_start, expression_end_tag=self.expression_end, save_source=self.save_source_string, _tag_index=fragment_types_count[ordered_tags[cursor][1] // 2]))
last_source_index = ordered_tags[cursor + 1][0] + len(tag_by_id[ordered_tags[cursor + 1][1]])
# Append the rest
if last_source_index < len(source):
substring = source[last_source_index : len(source)]
# Ignore empty strings for optimization
stripped = substring.strip()
if len(stripped):
template_fragments.append(StringTemplateFragment(stripped if self.strip_string else substring))
return template_fragments
class Template:
"""
Represents single template instance that can be loaded from file or input
string. Supports iterator rendering,s tring and file rendering.
Default configuration for parser is:
```
ONE_TIME_BLOCK_START is '{1{!'
ONE_TIME_BLOCK_END is '!}1}'
ONE_TIME_EXPRESSION_START is '{1{%'
ONE_TIME_EXPRESSION_END is '%}1}'
COMMENT_BLOCK_START is '{{#'
COMMENT_BLOCK_END is '#}}'
BLOCK_START is '{{!'
BLOCK_END is '!}}'
EXPRESSION_START is '{{%'
EXPRESSION_END is '%}}'
```
By default identation may vary from 0 to infinity because parser
automatically removes all excessive ident from input fragment:
```
____def myfun():
________return '13'
____aboba = 'beb'
```
Turns into:
```
def myfun():
____return '13'
aboba = 'beb'
```
Syntax of one-time blocks and expressions:
```
{1{!
global something
something = 'this code will be executed once inside .init()'
nothing = 'global keyword works as if this code block is inside function'
anything = 'global variables are shared among all code blocks'
!}1}
```
```
{1{%
f\"""
<title>{something}</title>
<content>This code is evaluated on .init() and result replaces the source code block</content>
\"""
%}1}
```
Syntax of render-time blocks and expressions:
```
{{!
# Following function will return unique value per each .render() because it closures the value of timetamp variable
timestamp = time.time()
global myfunc
def myfunc():
return timestamp
!}}
```
```
{{%
f\"""
<span>timestamp not is: {myfunc()}</span>
\"""
%}}
```
Syntax of comment blocks:
```
{{#
The following block is comment block and it's contents are removed from the resulting render
{{%
'this value is never used'
%}}
#}}
```
"""
__slots__ = (
'fragments',
'context',
'initialized'
)
def __init__(self, source: str, template_parser: TemplateParser=None, context: dict=None):
"""
Initialize template from the given source and parse it.
If given template contains one-time-init code blocks or segments, they
are evaluted with .init() call.
`template_parser` defines default aprsed to use for this Template. In
case oof None, default configuration is used.
`context` defines global context to use in this template. If it is not
set, new context is created for the template.
"""
template_parser = template_parser or TemplateParser()
self.fragments = template_parser.parse(source) if source is not None else []
self.context = context or {}
# Template should be initialized before use
self.initialized = True
for fragment in self.fragments:
if fragment.is_one_time():
self.initialized = False
break
def is_initialized(self) -> bool:
"""
Returns Triue if template was initialized. Template is initialized by
default if it does not contain any one-time init blocks.
"""
return self.initialized
async def init(self, scope: dict=None, strip_string: bool=True, none_ok: bool=False, init_ok: bool=False, wrap_scope: bool=False) -> 'Template':
"""
Performs initialization of the Template and evaluates all one-time-init
blocks.
Returns this template, so call to init() supports inline execution:
```
Template.from_file('template.thtml').init(init_ok=True).render()
```
`scope` defines the arguments dict with arguemtns that are uniquly passed
to each one-time block or expression wrapped into dict(), as locals. Set
to None to run code without locals().
`strip_string` sets enable strip result of ExpressionTemplateFragment
evaluation.
`none_ok` sets ignore mode for None result of the expression. If set to
True, None result is not used in future template rendering.
`init_ok` sets ignore already initialized state of template and does
nothing if template is already initialized.
`wrap_scope` enables scope wrapping. Scope is getting wrapped for each
fragment render.
After calling .init(), string representation of template will change and
all one-time init fragments will be replaced with string fragments or
removed depending on type.
"""
if self.initialized:
if init_ok:
return self
raise RuntimeError('Template already initialized')
to_remove = []
for i, fragment in enumerate(self.fragments):
if fragment.is_one_time():
if isinstance(fragment, BlockTemplateFragment):
to_remove.append(i)
await fragment.render(context=self.context, scope=(scope if not wrap_scope else dict(scope or {})))
elif isinstance(fragment, ExpressionTemplateFragment):
value = await fragment.render(context=self.context, scope=(scope if not wrap_scope else dict(scope or {})))
if not none_ok and value is None:
raise RuntimeError(f'Expression returned None at {fragment.evaluable.co_filename}')
# Remove self if None
if value is None:
to_remove.append(i)
continue
# To string
value = str(value)
# Remove empty
if strip_string:
value = value.strip()
if len(value) == 0:
to_remove.append(i)
continue
# Insert string instead
self.fragments[i] = StringTemplateFragment(value)
else:
raise RuntimeError(f'Unexpected type of one-time init fragment {type(fragment)}')
to_remove = set(to_remove)
self.fragments[:] = [ f for i, f in enumerate(self.fragments) if i not in to_remove ]
self.initialized = True
return self
async def render_generator(self, scope: dict=None, strip_string: bool=True, none_ok: bool=False, wrap_scope: bool=False) -> typing.AsyncGenerator[str, None]:
"""
Render given template using generator over fragments. Returns string
representation of each fragment rendered.
`scope` defines the arguments dict with arguemtns that are uniquly passed
to each one-time block or expression wrapped into dict(), as locals. Set
to None to run code without locals().
`strip_string` sets enable strip result of ExpressionTemplateFragment
evaluation.
`none_ok` sets ignore mode for None result of the expression. If set to
True, None result is not used in future template rendering.
`wrap_scope` enables scope wrapping. Scope is getting wrapped for each
fragment render.
Requires call to .init() if template was not initialized.
"""
if not self.initialized:
raise RuntimeError('Template not initialized')
for fragment in self.fragments:
if isinstance(fragment, BlockTemplateFragment):
await fragment.render(context=self.context, scope=(scope if not wrap_scope else dict(scope or {})))
else:
value = await fragment.render(context=self.context, scope=(scope if not wrap_scope else dict(scope or {})))
if not none_ok and value is None:
raise RuntimeError(f'Expression returned None at {fragment.evaluable.co_filename}')
# Remove self if None
if value is None:
continue
# To string
value = str(value)
# Remove empty
if strip_string:
value = value.strip()
if len(value) == 0:
continue
# Insert string instead
yield value
async def render_string(self, scope: dict=None, strip_string: bool=True, none_ok: bool=False, wrap_scope: bool=False) -> str:
"""
Render given template into string from fragments. Returns string
representation of entire template rendered.
`scope` defines the arguments dict with arguemtns that are uniquly passed
to each one-time block or expression wrapped into dict(), as locals. Set
to None to run code without locals().
`strip_string` sets enable strip result of ExpressionTemplateFragment
evaluation.
`none_ok` sets ignore mode for None result of the expression. If set to
True, None result is not used in future template rendering.
`wrap_scope` enables scope wrapping. Scope is getting wrapped for each
fragment render.
Requires call to .init() if template was not initialized.
"""
return ''.join([ f async for f in self.render_generator(scope, strip_string, none_ok, wrap_scope) ])
async def render_file(self, filename: str, scope: dict=None, strip_string: bool=True, none_ok: bool=False, wrap_scope: bool=False) -> None:
"""
Render given template into file from fragments.
`scope` defines the arguments dict with arguemtns that are uniquly passed
to each one-time block or expression wrapped into dict(), as locals. Set
to None to run code without locals().
`strip_string` sets enable strip result of ExpressionTemplateFragment
evaluation.
`none_ok` sets ignore mode for None result of the expression. If set to
True, None result is not used in future template rendering.
`wrap_scope` enables scope wrapping. Scope is getting wrapped for each
fragment render.
Requires call to .init() if template was not initialized.
"""
with open(filename, 'w', encoding='utf-8') as file:
async for f in self.render_generator(scope, strip_string, none_ok, wrap_scope):
file.write(f)
def from_file(file: typing.Union[str, typing.TextIO], template_parser: TemplateParser=None, context: dict=None) -> 'Template':
"""
Load and parse template from given `file`. File can be either fileIO
wrapper or string file path.
Example:
```
# Load from file object
Template.from_file(open('myfile.thtml'))
# Load from file name
Template.from_file('myfile.thtml')
```
"""
if isinstance(file, str):
with open(file, 'r', encoding='utf-8') as file_obj:
return Template(file_obj.read(), template_parser, context)
return Template(file.read(), template_parser, context)
def from_string(source: str, template_parser: TemplateParser=None, context: dict=None) -> 'Template':
"""
Load and parse template from given `source` string.
"""
return Template(source, template_parser, context)
def from_fragments(fragments: typing.List[TemplateFragment], context: dict=None) -> 'Template':
"""
Construct Template from the given list of fragments. Returns new
Template from fragments and checks if it needs .init() call.
"""
template = Template(None, None, context)
template.fragments = fragments
template.initialized = True
for fragment in template.fragments:
if fragment.is_one_time():
template.initialized = False
break
return template
def __str__(self):
return ('\n' if self.template_parser.strip_string else '').join([ str(f) for f in self.fragments ])
def __repr__(self):
return ('\n' if self.template_parser.strip_string else '').join([ str(f) for f in self.fragments ])
class FileWatcherTemplate:
"""
Class that defines cached Template wrapper based on filesystem template
watching. Template is loaded from silesystem and revalidated each time when
source template file is updated.
Interfaces all render methods from base template class and supports
additional up_to_date flag.
"""
__slots__ = (
'filename',
'timestamp',
'template',
'template_lock',
'template_parser',
'context',
'init_scope',
'init_strip_string',
'init_none_ok',
'init_wrap_scope'
)
def __init__(
self,
filename: str,
template_parser: TemplateParser=None,
context: dict=None,
init_scope: dict=None,
init_strip_string: bool=True,
init_none_ok: bool=False,
init_wrap_scope: bool=False
):
"""
Create shadow template without loading. Loading is performed with
manual call to `update()` or on `.render()` call.
`init_` parameters are used to define arguments for late `.init()` call.
"""
self.filename = filename
self.timestamp = None
self.template = None
# Lock required to update template from disk
self.template_lock = asyncio.Lock()
self.template_parser = template_parser or TemplateParser()
self.context = context
self.init_scope = init_scope
self.init_strip_string = init_strip_string
self.init_none_ok = init_none_ok
self.init_wrap_scope = init_wrap_scope
def is_up_to_date(self):
"""
Returns True if Template is up to date. Template us up to date if it was
loaded and it's file still exists and have modification date less than
it was when template has been loaded.
"""
return not (self.timestamp is None or not os.path.exists(self.filename) or os.path.getmtime(self.filename) > self.timestamp)
def get_template(self):
"""
Returns current internal instance of the template or None is no template
has been loaded yet.
"""
return self.template
def get_timestamp(self):
"""
Returns current internal template file timestamp or None is no template
has been loaded yet.
"""
return self.timestamp
async def update(self):
"""
Performs updating of the template from file. if `is_up_to_date()` returs
True, does nothing.
If failure occurs, raises error depending on the situation.
Uses async lock to perfom syncronization between calls to prevent race
condition and multiple parsings per template.
"""
if self.is_up_to_date():
return
async with self.template_lock:
# Fallback
if self.is_up_to_date():
return
# Prevent keep in memory
self.template = None
self.timestamp = None
# Load
self.template = Template.from_file(self.filename, self.template_parser, self.context)
await self.template.init(
scope=self.init_scope,
strip_string=self.init_strip_string,
none_ok=self.init_none_ok,
init_ok=True,
wrap_scope=self.init_wrap_scope
)
self.timestamp = os.path.getmtime(self.filename)
async def render_generator(self, scope: dict=None, strip_string: bool=True, none_ok: bool=False, wrap_scope: bool=False, auto_reload: bool=True) -> typing.AsyncGenerator[str, None]:
"""
Render given template using generator over fragments. Returns string
representation of each fragment rendered.
`scope` defines the arguments dict with arguemtns that are uniquly passed
to each one-time block or expression wrapped into dict(), as locals. Set
to None to run code without locals().
`strip_string` sets enable strip result of ExpressionTemplateFragment
evaluation.
`none_ok` sets ignore mode for None result of the expression. If set to
True, None result is not used in future template rendering.
`wrap_scope` enables scope wrapping. Scope is getting wrapped for each
fragment render.
Automatically reloads template on file change if `auto_reload=True`.
"""
if auto_reload:
await self.update()
return await self.template.render_generator(scope=scope, strip_string=strip_string, none_ok=none_ok, wrap_scope=wrap_scope)
async def render_string(self, scope: dict=None, strip_string: bool=True, none_ok: bool=False, wrap_scope: bool=False, auto_reload: bool=True) -> str:
"""
Render given template into string from fragments. Returns string
representation of entire template rendered.
`scope` defines the arguments dict with arguemtns that are uniquly passed
to each one-time block or expression wrapped into dict(), as locals. Set
to None to run code without locals().
`strip_string` sets enable strip result of ExpressionTemplateFragment
evaluation.
`none_ok` sets ignore mode for None result of the expression. If set to
True, None result is not used in future template rendering.
`wrap_scope` enables scope wrapping. Scope is getting wrapped for each
fragment render.
Automatically reloads template on file change if `auto_reload=True`.
"""
if auto_reload:
await self.update()
return await self.template.render_string(scope=scope, strip_string=strip_string, none_ok=none_ok, wrap_scope=wrap_scope)
async def render_file(self, filename: str, scope: dict=None, strip_string: bool=True, none_ok: bool=False, wrap_scope: bool=False, auto_reload: bool=True) -> str:
"""
Render given template into file from fragments.
`scope` defines the arguments dict with arguemtns that are uniquly passed
to each one-time block or expression wrapped into dict(), as locals. Set
to None to run code without locals().
`strip_string` sets enable strip result of ExpressionTemplateFragment
evaluation.
`none_ok` sets ignore mode for None result of the expression. If set to
True, None result is not used in future template rendering.
`wrap_scope` enables scope wrapping. Scope is getting wrapped for each
fragment render.
Requires call to .init() if template was not initialized.
"""
if auto_reload:
await self.update()
return await self.template.render_file(filename=filename, scope=scope, strip_string=strip_string, none_ok=none_ok, wrap_scope=wrap_scope)
def __str__(self):
if self.timestamp is None:
return None
return self.template.__str__()
def __repr__(self):
if self.timestamp is None:
return None
return self.template.__repr__()
| bitrate16/yatplt | yatplt.py | yatplt.py | py | 35,892 | python | en | code | 0 | github-code | 13 |
27517027041 | from takecommand import takecommand
from temperaturefile import temp
from remember import rememberfunciton
from translator import translator
from speak import speak
from rememberit import rememberit
from pronounce import pronounce
from spell import spell
from screenshot import screen
import pyaudio
from youtube import youtu
from website import webi
from wikipedia1 import wikip
from Music import music
import pyjokes
from close import closeapp
from open1 import app
##this works for mac but simple version of this won't work on mac
##https://tutor.python.narkive.com/nmyn7Fqh/why-is-startfile-unavailable-on-my-mac
def taskexe():
while True:
query = takecommand()
if 'hello' in query:
speak("Hello sir, I am friday")
speak("Your Personal AI assistant!")
speak("How may i help you ")
elif 'are you up' in query:
speak("for you sir always")
elif 'gotta go' in query:
speak("ok sir")
break
elif 'youtube' in query:
youtu(query)
elif 'google' in query:
googlo(query)
##always write the speech containing varible in single quotes then it will work
elif 'website' in query:
webi(query)
elif 'play music' in query:
music()
elif 'wikipedia' in query:
wikip(query)
elif 'whatsapp' in query:
whatsapp(query)
elif 'screenshot' in query:
screen()
elif 'open application' in query:
app(query)
elif 'close application' in query:
closeapp(query)
elif 'pause video' in query:
pyautogui.press('space')
elif 'play video' in query:
pyautogui.press('space')
elif 'restart video' in query:
pyautogui.hotkey('0')
elif 'mute video' in query:
pyautogui.hotkey('mute')
elif 'skip video' in query:
pyautogui.hotkey('1')
elif 'back' in query:
pyautogui.hotkey('j')
elif 'full screen' in query:
pyautogui.hotkey('f')
elif 'tab' in query:
pyautogui.hotkey('command', 'w')
elif 'close it' in query:
pyautogui.keyDown('command')
pyautogui.keyUp('q')
pyautogui.keyDown('q')
pyautogui.keyUp('command')
elif 'close tab' in query:
pyautogui.keyDown('command')
pyautogui.keyUp('w')
pyautogui.keyDown('w')
pyautogui.keyUp('command')
elif 'joke' in query:
get = pyjokes.get_joke()
speak(get)
elif 'repeat my words' in query:
speak("After you sire!")
repeatwords()
elif 'open library' in query:
speak('Library is ready')
dic()
speak("Exiting library")
elif 'how to spell' in query:
spell(query)
elif 'how to pronounce' in query:
pronounce(query)
elif 'set alarm' in query:
time.sleep(10)
elif 'open translator' in query:
translator(query)
elif 'remember it' in query:
rememberit(query)
elif 'what do you remember' in query:
rememberfunciton()
elif 'search' in query:
googli(query)
elif 'temperature of' in query:
temp(query)
taskexe() | pranavthakur0-0/Beginner-Python-Assistant | main.py | main.py | py | 3,562 | python | en | code | 0 | github-code | 13 |
3345399803 | import ptypes
from ptypes import *
@pbinary.littleendian
class selector(pbinary.struct):
_fields_ = [(13, 'Index'), (1, 'TI'), (2, 'RPL')]
class systemtable(pstruct.type):
_fields_ = [(pint.uint32_t, 'base'), (pint.uint16_t, 'limit')]
class systemsegment(pstruct.type):
_fields_ = [(selector, 'selector'), (systemtable, 'address')]
class descriptor(pbinary.struct):
class flags(pbinary.struct):
_fields_ = [(1, 'G'), (1, 'D/B'), (1, 'L'), (1, 'AVL')]
class access(pbinary.struct):
_fields_ = [(1, 'P'), (2, 'DPL'), (1, 'S'), (4, 'Type')]
_fields_ = [
(8, 'Base[3]'),
(flags, 'Flags'),
(4, 'Limit[High]'),
(access, 'Access'),
(8, 'Base[2]'),
(16, 'Base[Low]'),
(16, 'Limit[Low]'),
]
@pbinary.littleendian
class descriptor64(descriptor):
_fields_ = [(32, 'Reserved'), (32, 'Base[High]')] + descriptor._fields_
descriptor = pbinary.littleendian(descriptor)
@pbinary.littleendian
class general(pbinary.struct):
_fields_ = [(32, regname) for regname in ['eax', 'ecx', 'edx', 'ebx', 'esp', 'ebp', 'esi', 'edi']]
@pbinary.littleendian
class rex(pbinary.struct):
_fields_ = [(64, regname) for regname in ['rax', 'rbx', 'rcx', 'rdx', 'rsi', 'rdi', 'rbp', 'rsp']]
_fields_+= [(64, "r{:d}".format(regnum)) for regnum in range(8, 16)]
@pbinary.littleendian
class segment(pbinary.struct):
_fields_ = [(16, regname) for regname in ['cs', 'ds', 'ss', 'es', 'fs', 'gs']]
class flags(pbinary.flags):
_fields_ = [
(1, '0'),
(1, 'NT'), #Nested Task Flag
(2, 'IOPL'), #I/O Privilege Level
(1, 'OF'), #Overflow Flag
(1, 'DF'), #Direction Flag
(1, 'IF'), #Interrupt-enable Flag
(1, 'TF'), #Trap Flag
(1, 'SF'), #Sign Flag
(1, 'ZF'), #Zero Flag
(1, '0'),
(1, 'AF'), #Adjust Flag
(1, '0'),
(1, 'PF'), #Parity Flag
(1, '1'),
(1, 'CF'), #Carry Flag
]
class eflags(pbinary.flags):
_fields_ = [
(10, 'reserved'),
(1, 'ID'), #CPUID-Available
(1, 'VIP'), #Virtual Interrupt Pending
(1, 'VIF'), #Virtual Interrupt Flag
(1, 'AC'), #Alignment Check
(1, 'VM'), #V8086 Mode
(1, 'RF'), #Resume Flag
] + flags._fields_
flags = pbinary.littleendian(flags)
@pbinary.littleendian
class rflags(pbinary.flags):
_fields_ = [(10+32, 'reserved')] + eflags._fields_[1:]
eflags = pbinary.littleendian(eflags)
@pbinary.littleendian
class fpstate(pbinary.struct):
"""
Intel FPU register-space/region
https://software.intel.com/en-us/articles/x87-and-sse-floating-point-assists-in-ia-32-flush-to-zero-ftz-and-denormals-are-zero-daz
"""
default = 0x37f
_fields_ = [
(1, 'B'), # FPU Busy
(1, 'C3'), # condition-code (cc)
(3, 'TOP'), # Top of Stack Pointer (ST*)
(1, 'C2'), # cc
(1, 'C1'), # cc
(1, 'C0'), # cc
(1, 'ES'), # Error Summary
(1, 'SF'), # Fault from Stack
(1, 'PM'), # Precision
(1, 'UM'), # Underflow
(1, 'OM'), # Overflow
(1, 'ZM'), # Divided by Zero
(1, 'DM'), # Denormal(?) Operand
(1, 'IM'), # Invalid Operand
]
@pbinary.littleendian
class sse(pbinary.array):
_object_, length = 32 * 8, 8
@pbinary.littleendian
class mmx(pbinary.array):
_object_, length = 16 * 8, 8
@pbinary.littleendian
class fpu(pbinary.array):
_object_, length = 10 * 8, 8
@pbinary.littleendian
class fpctrl(pbinary.struct):
_fields_ = [
(3, 'reserved0'),
(1, 'X'), # Infinity control (0=Projective,1=Affine)
(2, 'RC'), # Rounding control (00=Round to nearest even,01=Round down towards infinity,10=Round up towards infinity,11=Round towards zero)
(2, 'PC'), # Precision control (00=Single(24),01=Reserved,10=Double(53),11=Extended(64))
(2, 'reserved1'),
(1, 'PM'), # Precision mask
(1, 'UM'), # Underflow mask
(1, 'OM'), # Overflow mask
(1, 'ZM'), # Zero Dvide mask
(1, 'DM'), # Denormalized Operand mask
(1, 'IM'), # Invalid Operation mask
]
@pbinary.littleendian
class frstor(pbinary.struct):
# FIXME: this size should be 108 bytes, not 100
_fields_ = [
(fpctrl, 'ControlWord'),
(fpstate, 'StatusWord'),
(16, 'TagWord'),
(48, 'DataPointer'),
(48, 'InstructionPointer'),
(16, 'LastInstructionOpcode'),
(fpu, 'ST'),
]
@pbinary.littleendian
class gdt(pbinary.array):
_object_, length = descriptor, 8192
@pbinary.littleendian
class ldt(pbinary.array):
_object_, length = descriptor, 8192
@pbinary.littleendian
class cr0(pbinary.flags):
_fields_ = [
(1, 'PG'), #Paging
(1, 'CD'), #Cache disable
(1, 'NW'), #Not-write through
(10, '??'),
(1, 'AM'), #Alignment mask
(1, '0'),
(1, 'WP'), #Write protect
(10, '?'),
(1, 'NE'), #Numeric error
(1, 'ET'), #Extension Type
(1, 'TS'), #Task Switched
(1, 'EM'), #Emulation
(1, 'MP'), #Monitor co-processor
(1, 'PE'), #Protected Mode Enable
]
@pbinary.littleendian
class cr3(pbinary.flags):
_fields_ = [
(20, 'Directory'),
(7, 'Ignored'),
(1, 'PCD'),
(1, 'PWT'),
(3, 'Ignored'),
]
@pbinary.littleendian
class cr4(pbinary.flags):
_fields_ = [
(10, 'reserved'),
(1, 'SMAP'), #Supervisor Mode Access Protection Enable
(1, 'SMEP'), #Supervisor Mode Execution Protection Enable
(1, '???'),
(1, 'OSXSAVE'), #XSAVE and Processor Extended States Enable
(1, 'PCIDE'), #PCID Enable
(1, 'FSGSBASE'), #FSGSBASE-Enable bit
(1, '??'),
(1, 'SMXE'), #Safer Mode Extensions Enable
(1, 'VMXE'), #Virtual Machine Extensions Enable
(2, '0'),
(1, 'OSXMMEXCPT'), #Operating System Support for Unmasked SIMD Floating-Point Exceptions
(1, 'OSFXSR'), #Operating system support for FXSAVE and FXRSTOR instructions
(1, 'PCE'), #Performance-Monitoring Counter enable
(1, 'PGE'), #Page Global Enabled
(1, 'MCE'), #Machine Check Exception
(1, 'PAE'), #Physical Address Extension
(1, 'PSE'), #Page Size Extension
(1, 'DE'), #Debugging Extensions
(1, 'TSD'), #Time Stamp Disable
(1, 'PVI'), #Protected-mode Virtual Interrupts
(1, 'VME'), #Virtual 8086 Mode Extensions
]
class tss16(pstruct.type):
class SPSS(pstruct.type):
_fields_ = [(pint.uint16_t, 'SP'), (pint.uint16_t, 'SS')]
class general(pstruct.type):
_fields_ = [(pint.uint16_t, regname) for regname in ['AX', 'CX', 'DX', 'BX', 'SP', 'BP', 'SI', 'DI']]
class segment(pstruct.type):
_fields_ = [(pint.uint16_t, regname) for regname in ['ES', 'CS', 'SS', 'DS']]
_fields_ = [
(pint.uint16_t, 'Previous Task Link'),
(dyn.clone(parray.type, length=3, _object_=SPSS), 'SPSS'),
(pint.uint16_t, 'IP'),
(flags, 'FLAG'),
(general, 'general'),
(segment, 'segment'),
(pint.uint16_t, 'LDT'),
]
class align16(pstruct.type):
_fields_ = [(pint.uint16_t, 'reserved'), (pint.uint16_t, 'value')]
class tss32(pstruct.type):
class general(pstruct.type):
_fields_ = [(pint.uint32_t, regname) for regname in ['EAX', 'ECX', 'EDX', 'EBX', 'ESP', 'EBP', 'ESI', 'EDI']]
class segment(pstruct.type):
_fields_ = [(align16, regname) for regname in ['ES', 'CS', 'SS', 'DS', 'FS', 'GS']]
class ESPSS(pstruct.type):
_fields_ = [(align16, 'SS'), (pint.uint32_t, 'ESP')]
_fields_ = [
(align16, 'Previous Task Link'),
(dyn.clone(parray.type, length=3, _object_=ESPSS), 'ESPSS'),
(pint.uint32_t, 'CR3'),
(pint.uint32_t, 'EIP'),
(eflags, 'EFLAGS'),
(general, 'general'),
(segment, 'segment'),
(align16, 'LDT'),
(pint.uint16_t, 'Reserved'),
(pint.uint16_t, 'I/O Map Base Address'),
]
class tss64(pstruct.type):
_fields_ = [
(pint.uint32_t, 'Reserved'),
(dyn.clone(parray.type, length=3, _object_=pint.uint64_t), 'RSP'),
(pint.uint64_t, 'Reserved'),
(dyn.clone(parray.type, length=8, _object_=pint.uint64_t), 'IST'),
(pint.uint64_t, 'Reserved'),
(pint.uint16_t, 'Reserved'),
(pint.uint16_t, 'I/O Map Base Address'),
]
@pbinary.littleendian
class linear32(pbinary.struct):
_fields_ = [
(10, 'directory'),
(10, 'table'),
(12, 'offset'),
]
@pbinary.littleendian
class linear32ps(pbinary.struct):
_fields_ = [
(10, 'directory'),
(22, 'offset'),
]
@pbinary.littleendian
class linear32pae(pbinary.struct):
_fields_ = [
(2, 'directory pointer'),
(9, 'directory'),
(9, 'table'),
(12, 'offset'),
]
@pbinary.littleendian
class linear64(pbinary.struct):
_fields_ = [
(16, 'reserved'),
(9, 'pml4'),
(9, 'directory ptr'),
(9, 'directory'),
(9, 'table'),
(12, 'offset'),
]
@pbinary.littleendian
class linear64ps(pbinary.struct):
_fields_ = [
(16, 'reserved'),
(9, 'pml4'),
(9, 'directory ptr'),
(9, 'directory'),
(21, 'offset'),
]
@pbinary.littleendian
class linear64pae(pbinary.struct):
_fields_ = [
(16, 'reserved'),
(9, 'pml4'),
(9, 'directory ptr'),
(30, 'offset'),
]
@pbinary.littleendian
class pde(pbinary.flags):
_fields_ = [
(3, 'Ignored'),
(1, 'G'),
(1, 'PS'),
(1, 'D'),
(1, 'A'),
(1, 'PCD'),
(1, 'PWT'),
(1, 'U/S'),
(1, 'R/W'),
(1, 'P'),
]
@pbinary.littleendian
class pte(pbinary.flags):
_fields_ = [
(3, 'Ignored'),
(1, 'G'),
(1, 'PAT'),
(1, 'D'),
(1, 'A'),
(1, 'PCD'),
(1, 'PWT'),
(1, 'U/S'),
(1, 'R/W'),
(1, 'P'),
]
@pbinary.littleendian
class pde32(pbinary.struct):
_fields_ = [
(20, 'Address'),
(pde, 'Flags'),
]
@pbinary.littleendian
class pde32ps(pbinary.struct):
_fields_ = [
(10, 'Address(Lo)'),
(9, 'Address(Hi)'),
(1, 'PAT'),
(pde, 'Flags'),
]
@pbinary.littleendian
class pte32(pbinary.struct):
_fields_ = [
(20, 'Address'),
(pte, 'Flags'),
]
@pbinary.littleendian
class pde64(pbinary.struct):
_fields_ = [
(1, 'XD'),
(11, 'M'),
(40, 'Address'),
(pde, 'Flags'),
]
@pbinary.littleendian
class pde64ps(pbinary.struct):
_fields_ = [
(1, 'XD'),
(4, 'PKE'),
(7, 'Ignored'),
(40, 'Address'),
(pde, 'Flags'),
]
@pbinary.littleendian
class pte64(pbinary.flags):
_fields_ = [
(1, 'XD'),
(4, 'PKE'),
(7, 'Ignored'),
(40, 'Address'),
(pte, 'Flags'),
]
| arizvisa/syringe | lib/ndk/cpu.py | cpu.py | py | 11,099 | python | en | code | 35 | github-code | 13 |
3076440241 | import logging
import subprocess
import sys
from .bitwarden import *
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class GitSetup():
"""
GitHub setup with SSH
https://docs.github.com/en/authentication/connecting-to-github-with-ssh
"""
@staticmethod
def get_user(gh_user: str):
return Bitwarden().rbw_get('github', gh_user)
@staticmethod
def auth_login(gh_token: str):
cmd = 'gh auth login --with-token'
token = Bitwarden().rbw_get('github', gh_token)
try:
subprocess.run(cmd, shell=True, check=True, input=token.encode())
logger.info('Auth login')
except subprocess.CalledProcessError as err:
logger.error(f'Auth login {err}')
sys.exit(1)
@staticmethod
def auth_status():
cmd = 'gh auth status'
try:
subprocess.run(cmd, shell=True, check=True)
logger.info('Auth status')
except subprocess.CalledProcessError as err:
print(err)
sys.exit(1)
@staticmethod
def add_pubkey(user: str, gh_pubkey: str):
cmd = f'gh ssh-key add /home/{user}/.ssh/id_ed25519.pub -t {gh_pubkey}'
try:
subprocess.run(cmd, shell=True, check=True)
logger.info('Add public key')
except subprocess.CalledProcessError as err:
logger.error(f'Add public key {err}')
sys.exit(1)
@staticmethod
def known_hosts():
cmd = 'ssh-keyscan github.com >> ~/.ssh/known_hosts'
try:
subprocess.run(cmd, shell=True, check=True)
logger.info('Known hosts')
except subprocess.CalledProcessError as err:
logger.error(f'Known hosts {err}')
sys.exit(1)
@staticmethod
def ssh_test():
cmd = 'ssh -T git@github.com'
res = subprocess.run(cmd, shell=True)
if res.returncode in [0, 1]:
logger.info('SSH test')
else:
logger.error('SSH test')
sys.exit(res.returncode)
@staticmethod
def config(gh_user: str, git_mail: str):
user_mail = Bitwarden().rbw_get('github', git_mail)
cmd_list = [f'git config --global user.name "{gh_user}"',
f'git config --global user.email "{user_mail}"',
f'git config --global init.defaultBranch main']
for cmd in cmd_list:
try:
subprocess.run(cmd, shell=True, check=True)
except subprocess.CalledProcessError as err:
logger.error(f'Git config {err}')
sys.exit(1)
logger.info('Git config')
| marcellbarsony/arch-post | src/post/git_setup.py | git_setup.py | py | 2,683 | python | en | code | 0 | github-code | 13 |
27205013859 | from loader import dp
from aiogram import types
from models import models
from keyboards.inline.user_settings_keyboards import settings_search_keyboard
@dp.message_handler(commands=['settings'])
@dp.message_handler(regexp="^(⚙ Настройки)$")
async def settings_handler(message: types.Message):
user = await models.UserModel.get(tg_id=message.chat.id)
if user.end_registration is False:
return await message.answer("Вы не закончили регистрацию")
settings: models.UserSearchSettings = await user.search_settings
text = "⚙️ Настройки\n\n" \
"Текущий фильтр по подбору партнеров:\n"
text += f"Пол: "
if settings.male is True:
text += "Муж.\n"
elif settings.male is False:
text += "Жен.\n"
elif settings.male is None:
text += "Неважно\n"
# text += "Муж.\n" if settings.male is True else "Жен.\n"
settings.min_age = 18 if settings.min_age is None else settings.min_age
settings.max_age = 99 if settings.max_age is None else settings.max_age
text += f"Возр. Диапазон: {settings.min_age}-{settings.max_age} лет\n"
if settings.children is True:
children_text = "✅"
elif settings.children is False:
children_text = "❌"
else:
children_text = "Неважно"
text += f"Наличие детей: {children_text}\n"
if settings.children:
settings.children_min_age = 0 if settings.children_min_age is None else settings.children_min_age
settings.children_max_age = 18 if settings.children_max_age is None else settings.children_max_age
text += f"Возр. Диапазон детей: {settings.children_min_age} - {settings.children_max_age} лет\n"
interest_place_user = await user.interest_place_companion.all()
text_place = ", ".join([i.title_interest for i in interest_place_user])
text += "Местоположение пары: " + text_place + '\n'
await message.answer(text=text, reply_markup=await settings_search_keyboard())
| Kyle-krn/DatingBotDubai | handlers/search_settings/view_settings_handler.py | view_settings_handler.py | py | 2,132 | python | en | code | 0 | github-code | 13 |
39814453420 | """Run with a subset of benchmark glaciers"""
from __future__ import division
# Before everythin els for the logger setting
import oggm
# Logger
import logging
log = logging.getLogger(__name__)
# Python imports
import os
import salem
import zipfile
# Libs
import matplotlib.pyplot as plt
# Locals
import oggm.cfg as cfg
from oggm import workflow
from oggm import tasks
from oggm.workflow import execute_entity_task
from oggm import graphics, utils
# Initialize OGGM
cfg.initialize()
# Local paths (where to write output and where to download input)
WORKING_DIR = '/home/mowglie/disk/OGGM_Runs/BENCHMARK'
PLOTS_DIR = ''
cfg.PATHS['working_dir'] = WORKING_DIR
utils.mkdir(WORKING_DIR)
# Use multiprocessing?
cfg.PARAMS['use_multiprocessing'] = True
# How many grid points around the glacier?
# Make it large if you expect your glaciers to grow large
cfg.PARAMS['border'] = 60
# Set to True for operational runs
cfg.PARAMS['continue_on_error'] = True
cfg.PARAMS['auto_skip_task'] = True
# Don't use divides for now
cfg.set_divides_db()
# Pre-download other files which will be needed later
_ = utils.get_cru_file(var='tmp')
_ = utils.get_cru_file(var='pre')
# Read in the Benchmark RGI file
rgif = 'https://dl.dropboxusercontent.com/u/20930277/rgi_benchmark.zip'
rgif = utils.file_downloader(rgif)
with zipfile.ZipFile(rgif) as zf:
zf.extractall(WORKING_DIR)
rgif = os.path.join(WORKING_DIR, 'rgi_benchmark.shp')
rgidf = salem.read_shapefile(rgif, cached=True)
# Sort for more efficient parallel computing
rgidf = rgidf.sort_values('Area', ascending=False)
log.info('Number of glaciers: {}'.format(len(rgidf)))
# Go - initialize working directories
gdirs = workflow.init_glacier_regions(rgidf) # reset=True, force=True
# Prepro tasks
task_list = [
tasks.glacier_masks,
tasks.compute_centerlines,
tasks.compute_downstream_line,
tasks.initialize_flowlines,
tasks.compute_downstream_bedshape,
tasks.catchment_area,
tasks.catchment_intersections,
tasks.catchment_width_geom,
tasks.catchment_width_correction,
]
for task in task_list:
execute_entity_task(task, gdirs)
# Climate related tasks - this will download
execute_entity_task(tasks.process_cru_data, gdirs)
# tasks.compute_ref_t_stars(gdirs)
# tasks.distribute_t_stars(gdirs)
# Inversion
execute_entity_task(tasks.prepare_for_inversion, gdirs)
execute_entity_task(tasks.volume_inversion, gdirs, glen_a=cfg.A, fs=0)
execute_entity_task(tasks.filter_inversion_output, gdirs,)
# Run
execute_entity_task(tasks.init_present_time_glacier, gdirs)
# While the above should work always, this here is no piece of fun
execute_entity_task(tasks.random_glacier_evolution, gdirs)
# Write out glacier statistics
df = utils.glacier_characteristics(gdirs)
fpath = os.path.join(cfg.PATHS['working_dir'], 'glacier_char.csv')
df.to_csv(fpath)
# Plots (if you want)
if PLOTS_DIR == '':
exit()
utils.mkdir(PLOTS_DIR)
for gd in gdirs:
bname = os.path.join(PLOTS_DIR, gd.name + '_' + gd.rgi_id + '_')
graphics.plot_googlemap(gd)
plt.savefig(bname + 'ggl.png')
plt.close()
graphics.plot_domain(gd)
plt.savefig(bname + 'dom.png')
plt.close()
graphics.plot_centerlines(gd)
plt.savefig(bname + 'cls.png')
plt.close()
graphics.plot_catchment_width(gd, corrected=True)
plt.savefig(bname + 'w.png')
plt.close()
graphics.plot_inversion(gd)
plt.savefig(bname + 'inv.png')
plt.close()
| Chris35Wills/oggm | oggm/sandbox/run_benchmark.py | run_benchmark.py | py | 3,436 | python | en | code | null | github-code | 13 |
42595792363 | import numpy as np
import matplotlib.pyplot as plt
import ipywidgets as widgets
import os
import tkinter as tk
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.colors import Normalize
def visualize_volume_SAM(data_dict, show_widget=False, show_tkinter=False, save_path="", axis='z'):
print(data_dict)
images = data_dict["image"]
labels = data_dict["label"]
masks = data_dict['sam_seg'][axis]
if axis == 'x':
max_slice = images.shape[0] - 1
elif axis == 'y':
max_slice = images.shape[1] - 1
elif axis == 'z':
max_slice = images.shape[2] - 1
max_mask_value = np.amax(masks)
# Function to update the plot based on the slider value
def get_plot(fig, slice_idx):
# Clear previous image for the GUI
fig.clear()
if axis == 'x':
image = images[slice_idx, :, :]
gt_label = labels[slice_idx, :, :]
sam_label = masks[slice_idx, :, :]
elif axis == 'y':
image = images[:, slice_idx, :]
gt_label = labels[:, slice_idx, :]
sam_label = masks[:, slice_idx, :]
elif axis == 'z':
image = images[:, :, slice_idx]
gt_label = labels[:, :, slice_idx]
sam_label = masks[:, :, slice_idx]
aspect='auto'
# Set fixed color map range for the labels
vmin, vmax = 0, max_mask_value
norm = Normalize(vmin=vmin, vmax=vmax)
axes = fig.subplots(2, 3)
(ax1, ax2, ax3), (ax4, ax5, ax6) = axes
ax1.imshow(image, cmap="gray", aspect=aspect)
ax1.set_title("Original Image")
ax1.axis("off")
label_img = ax2.imshow(gt_label, cmap="jet", aspect=aspect, norm=norm)
ax2.set_title("Ground Truth Label")
ax2.axis("off")
ax3.imshow(image, cmap="gray", aspect="equal")
ax3.imshow(gt_label, cmap="jet", alpha=0.5, aspect=aspect, norm=norm)
ax3.set_title("Ground Truth Overlay")
ax3.axis("off")
ax4.imshow(image, cmap="gray", aspect=aspect)
ax4.set_title("Original Image")
ax4.axis("off")
label_img = ax5.imshow(sam_label, cmap="jet", aspect=aspect, norm=norm)
ax5.set_title("SAM-Mask Label")
ax5.axis("off")
ax6.imshow(image, cmap="gray", aspect=aspect)
ax6.imshow(sam_label, cmap="jet", alpha=0.5, aspect=aspect, norm=norm)
ax6.set_title("SAM-Mask Overlay")
ax6.axis("off")
fig.subplots_adjust(wspace=0.2, hspace=0.2)
if show_tkinter:
canvas.draw()
# Show the ipy widget (which works in notebook environemnt
if show_widget:
fig = plt.figure(figsize=(15, 15))
slider = widgets.IntSlider(min=0, max=max_slice, step=1, value=0)
widgets.interact(lambda slice_idx: get_plot(fig, slice_idx), slice_idx=slider)
# Show the tinker GUI
if show_tkinter:
window = tk.Tk()
window.title("Volume Visualization")
fig = plt.figure(figsize=(15, 15))
canvas = FigureCanvasTkAgg(fig, master=window)
canvas.get_tk_widget().pack(side=tk.LEFT, fill=tk.BOTH, expand=1)
# Create a frame for the slider and button
control_frame = tk.Frame(window)
control_frame.pack(side=tk.RIGHT, fill=tk.Y)
# Slider
slider = tk.Scale(control_frame, from_=0, to=max_slice, orient=tk.VERTICAL,
command=lambda s: get_plot(fig, int(s)))
slider.pack(side=tk.TOP, pady=10)
# Save Image button
def save_image():
if not os.path.exists(save_path):
os.makedirs(save_path)
filename = os.path.join(save_path, f"Slice_{slider.get()}_visualization.jpg")
print('saving file to {}'.format(filename))
fig.savefig(filename)
save_button = tk.Button(control_frame, text="Save Image", command=save_image)
save_button.pack(side=tk.TOP, pady=10)
# Callback function to handle window close event
def on_close():
window.destroy()
plt.close(fig)
# Bind the callback function to the window close event
window.protocol("WM_DELETE_WINDOW", on_close)
get_plot(fig, 0)
window.mainloop()
# Otherwise just run through the volume and save the images
if not show_tkinter and not show_widget:
fig = plt.figure(figsize=(15, 15))
for i in range(max_slice):
get_plot(fig, i) | AxDante/SAAMI | saami/visualizer.py | visualizer.py | py | 4,523 | python | en | code | 11 | github-code | 13 |
2324074141 | #Вычисление индекса максимального элемента итерационным методом
#Бондаренко Даниил 122В
#функция для вычисления индекса максимального элемента
def max_idx(arr, n):
#инициализация переменных для значений индекса
max_i = 0
max_j = 0
#проходим по всему массиву
for i in range(0, n):
for j in range(0, n):
#если текущий элемент больше чем предыдущий максимальный
if arr[i][j] > arr[max_i][max_j]:
#ставим индекс текущего как индекс максиального
max_i = i
max_j = j
#выводим индекс
print('(', max_i, ',', max_j, ')')
n = int(input('enter size : ')) #ввод размера
arr = [[0] * n for i in range(n)] #инициализация массива
#ввод массива
for i in range(n):
for j in range(n):
arr[i][j] = int(input())
max_idx(arr, n)
#реализация получилась довольно громоздкой но в то же время очень понятной : нету ничего более очевидного чем сравнивать по очереди все элементы массива в поисках максимального, каждый раз перезаписывая индексы
| dzzgnr/lab10 | 1.3.3.py | 1.3.3.py | py | 1,553 | python | ru | code | 0 | github-code | 13 |
31190757165 | import os
from typing import List
import yaml
languages = {}
commands = {}
def get_command(value: str) -> List:
return commands["command"][value]
def get_string(lang: str):
return languages[lang]
for filename in os.listdir(r"./strings"):
if filename.endswith(".yml"):
language_name = filename[:-4]
commands[language_name] = yaml.safe_load(
open(r"./strings/" + filename, encoding="utf8")
)
for filename in os.listdir(r"./strings/langs/"):
if filename.endswith(".yml"):
language_name = filename[:-4]
languages[language_name] = yaml.safe_load(
open(r"./strings/langs/" + filename, encoding="utf8")
)
| Ozlembener/AasthaTGMusicBot | strings/__init__.py | __init__.py | py | 695 | python | en | code | 1 | github-code | 13 |
9373458555 | from logging import Logger
from cloudshell.cp.core.cancellation_manager import CancellationContextManager
from cloudshell.cp.core.flows.vm_details import AbstractVMDetailsFlow
from cloudshell.cp.core.request_actions.models import VmDetailsData
from cloudshell.cp.openstack.models import OSNovaImgDeployedApp
from cloudshell.cp.openstack.os_api.api import OsApi
from cloudshell.cp.openstack.os_api.services import vm_details_provider
from cloudshell.cp.openstack.resource_config import OSResourceConfig
class GetVMDetailsFlow(AbstractVMDetailsFlow):
def __init__(
self,
resource_config: OSResourceConfig,
cancellation_manager: CancellationContextManager,
os_api: OsApi,
logger: Logger,
):
super().__init__(logger)
self._resource_config = resource_config
self._cancellation_manager = cancellation_manager
self._api = os_api
def _get_vm_details(self, deployed_app: OSNovaImgDeployedApp) -> VmDetailsData:
instance = self._api.Instance.get(deployed_app.vmdetails.uid)
try:
result = vm_details_provider.create(
instance, self._resource_config.os_mgmt_net_id
)
except Exception as e:
self._logger.exception(f"Error getting VM details for {deployed_app.name}")
result = VmDetailsData(errorMessage=str(e), appName=instance.name)
return result
| QualiSystems/cloudshell-cp-openstack | cloudshell/cp/openstack/flows/vm_details.py | vm_details.py | py | 1,420 | python | en | code | 0 | github-code | 13 |
23780111134 | import random
def generar_punto():
x = random.random()
y = random.random()
return x,y
def dentro_de_circulo(xy):
x = xy[0]
y = xy[1]
return (x**2 + y**2) <= 1
N = 10000
G = sum([dentro_de_circulo(generar_punto()) for i in range(N)])
prob = G/N *4
print(f'Pi es más o menos: {prob:.6f}.')
| romanripari/UNSAMPython | Clase05/estimar_pi.py | estimar_pi.py | py | 324 | python | es | code | 0 | github-code | 13 |
15733737510 | # ============ IsBranch: 0 -> pc + 4, 1 -> BranchTargetAddress, 2 -> ALUResult ==============
import decode as de
import memory_access as ma
import instruction_fetch as fi
# ===========GLOBAL VARIABLES===============
aluResult: int = None
isBranch: int = None
def srl(a: int, b: int) -> int:
if a >= 0:
return a >> b
else:
return (a + 0x100000000) >> b
def sll(
a: int, b: int
) -> (
int
): # required as python doesnt have 32 bit limit on int,when we use left shift,python arbitrarily extends the number
if b >= 32:
return 0
else:
a = ma.dec_to_bin(a)
a = a[b:].ljust(32, "0")
return de.bin_to_dec(a)
def execute() -> int:
global aluResult, isBranch
op1: int = de.op1
op2: int = None
# Selecting op2
if de.OP2Select == 0:
op2 = de.op2
elif de.OP2Select == 1:
op2 = de.imm
elif de.OP2Select == 2:
op2 = de.immS
# opening file to write output
f = open("output.txt", "a")
if de.ALUOperation == 0:
aluResult = op1 + op2
f.write(f"EXECUTE: ADD {op1} and {op2}\n")
elif de.ALUOperation == 1:
aluResult = op1 - op2
f.write(f"EXECUTE: SUB {op1} from {op2}\n")
elif de.ALUOperation == 2:
aluResult = op1 ^ op2
f.write(f"EXECUTE: {op1} XOR {op2}\n")
elif de.ALUOperation == 3:
aluResult = op1 | op2
f.write(f"EXECUTE: {op1} OR {op2}\n")
elif de.ALUOperation == 4:
aluResult = op1 & op2
f.write(f"EXECUTE: {op1} AND {op2}\n")
elif de.ALUOperation == 5:
aluResult = sll(op1, op2)
f.write(f"EXECUTE: {op1} << {op2}\n")
elif de.ALUOperation == 6:
if op2 >= 32:
op2 = op2 % 32
aluResult = srl(op1, op2)
f.write(f"EXECUTE: {op1} >>> {op2}\n")
elif de.ALUOperation == 7:
if op2 > 32:
op2 = op2 % 32
aluResult = op1 >> op2
f.write(f"EXECUTE: {op1} >> {op2}\n")
elif de.ALUOperation == 8:
if op1 == op2:
isBranch = 1
fi.pc = de.BranchTargetAddress
f.write(f"EXECUTE:BEQ PC set to {de.BranchTargetAddress}\n")
else:
isBranch = 0
f.write("EXECUTE: Brach not taken\n")
elif de.ALUOperation == 9:
if op1 != op2:
isBranch = 1
fi.pc = de.BranchTargetAddress
f.write(f"EXECUTE:BNE PC set to {de.BranchTargetAddress}\n")
else:
isBranch = 0
f.write("EXECUTE: Brach not taken\n")
elif de.ALUOperation == 10:
if op1 >= op2:
isBranch = 1
fi.pc = de.BranchTargetAddress
f.write(f"EXECUTE:BGE PC set to {de.BranchTargetAddress}\n")
else:
isBranch = 0
f.write("EXECUTE: Brach not taken\n")
elif de.ALUOperation == 11:
if op1 < op2:
isBranch = 1
fi.pc = de.BranchTargetAddress
f.write(f"EXECUTE:BLT PC set to {de.BranchTargetAddress}\n")
else:
isBranch = 0
f.write("EXECUTE: Brach not taken\n")
# for JAL
if de.opcode == "1101111":
fi.pc = de.BranchTargetAddress
f.write(f"EXECUTE: PC set to {de.BranchTargetAddress}\n")
elif de.opcode == "1100111": # for JALR
fi.pc = aluResult
f.write(f"EXECUTE: PC set to {aluResult}\n")
f.close()
def init() -> None:
"""Initializes all global variables to their initial value"""
global aluResult, isBranch
aluResult = None
isBranch = None
| Karanraj06/riscv-32i-simulator | single_cycle/execute.py | execute.py | py | 3,577 | python | en | code | 5 | github-code | 13 |
5499368972 | import pytest
from queryset_serializer.db.models import SerializerPrefetch
from queryset_serializer.serializers import (DefaultMetaQuerySetSerializer,
QuerySetMetaSerializer,
QuerySetSerializer, get_meta,
get_meta_val)
class MockClass:
def __init__(self, **kwargs):
self.__dict__ = kwargs
class MockListSerializer:
def __init__(self, child=None, source=None, **kwargs):
self.child = child
self.source = source or child.source
self._kwargs = kwargs
self._kwargs.setdefault('source', self.source)
class MockBaseSerializer:
def __init__(self, select=[], prefetch=[], source=None, **kwargs):
self.database_relations = {'prefetch': prefetch, 'select': select}
self.source = source
self._kwargs = kwargs
self._kwargs.setdefault('source', self.source)
class TestFunctions:
test_data_get_meta = [
(MockClass(Meta=object), object),
(MockClass(Meta='MetaString'), 'MetaString'),
({'Meta': object}, object),
({'Meta': 'MetaString'}, 'MetaString'),
]
@pytest.mark.parametrize('input,output', test_data_get_meta)
def test_get_meta(self, input, output):
assert get_meta(input) == output
test_data_get_meta_val = [
(MockClass(prefetch_to_attr_prefix='P_'), 'prefetch_to_attr_prefix', 'P_'),
(MockClass(), 'prefetch_to_attr_prefix', DefaultMetaQuerySetSerializer.prefetch_to_attr_prefix),
(MockClass(prefetch_class='prefetch_class'), 'prefetch_class', 'prefetch_class'),
(MockClass(), 'prefetch_class', DefaultMetaQuerySetSerializer.prefetch_class),
(MockClass(list_serializer_class='list_serializer_class'), 'list_serializer_class', 'list_serializer_class'),
(MockClass(), 'list_serializer_class', DefaultMetaQuerySetSerializer.list_serializer_class),
(MockClass(base_serializer_class='base_serializer_class'), 'base_serializer_class', 'base_serializer_class'),
(MockClass(), 'base_serializer_class', DefaultMetaQuerySetSerializer.base_serializer_class),
(MockClass(prefetch_listing='prefetch_listing'), 'prefetch_listing', 'prefetch_listing'),
(MockClass(), 'prefetch_listing', DefaultMetaQuerySetSerializer.prefetch_listing),
]
@pytest.mark.parametrize('meta,value,output', test_data_get_meta_val)
def test_get_meta_val(self, meta, value, output):
assert get_meta_val(meta, value) == output
class TestQuerySetMetaSerializer:
mock_meta = MockClass(
list_serializer_class=MockListSerializer,
base_serializer_class=MockBaseSerializer,
prefetch_to_attr_prefix='P_'
)
test_data__set_prefetch_fields = [
(
{
'a': MockBaseSerializer(
select=['x'],
prefetch=['y', 'z']
),
'b': MockListSerializer(child=MockBaseSerializer(
select=['x'],
prefetch=['y']
)),
'c': MockBaseSerializer(),
'd': MockListSerializer(child=MockBaseSerializer()),
'some_field': None
},
['a__y', 'a__z', 'b', 'b__x', 'b__y', 'd'],
['a', 'a__x', 'c']
),
(
{},
[],
[]
),
(
{'some_field': None},
[],
[]
)
]
@pytest.mark.parametrize('declared_fields,expected_prefetch,expected_select',
test_data__set_prefetch_fields)
def test__set_prefetch_fields(self, declared_fields, expected_prefetch, expected_select):
attrs = {
'Meta': self.mock_meta,
'_declared_fields': declared_fields,
'database_relations': {'prefetch': [], 'select': []}
}
rels = QuerySetMetaSerializer._set_prefetch_fields(attrs)
assert len(rels['prefetch']) == len(expected_prefetch) == len(set(rels['prefetch']) & set(expected_prefetch))
assert len(rels['select']) == len(expected_select) == len(set(rels['select']) & set(expected_select))
test_data__set_source_prefetch_serializers = [
(
{
'a': MockBaseSerializer(),
'b': MockListSerializer(child=MockBaseSerializer()),
'c': MockBaseSerializer(source='SomeSource'),
'd': MockListSerializer(child=MockBaseSerializer(source='SomeSource')),
'e': MockListSerializer(child=MockBaseSerializer(), source='SomeSource'),
'some_field': None
},
['a', 'b', 'c', 'd', 'e'],
{
'a': 'P_a',
'b': 'P_b',
'c': 'SomeSource',
'd': 'SomeSource',
'e': 'SomeSource'
}
),
(
{},
[],
{}
),
(
{'some_field': None},
[],
{}
)
]
@pytest.mark.parametrize('declared_fields,prefetch,sources', test_data__set_source_prefetch_serializers)
def test__set_source_prefetch_serializers(self, declared_fields, prefetch, sources):
attrs = {
'Meta': self.mock_meta,
'_declared_fields': declared_fields,
'database_relations': {'prefetch': prefetch, 'select': []}
}
QuerySetMetaSerializer._set_source_prefetch_serializers(attrs)
for key, value in sources.items():
assert attrs['_declared_fields'][key].source == value
assert attrs['_declared_fields'][key]._kwargs['source'] == value
class TestQuerySetSerializer:
test_data__prepare_prefetch_list = [
(['a', 'b', 'c', 'd'], None, {
'a': SerializerPrefetch('a', prefix='PREF_'),
'b': SerializerPrefetch('b', prefix='PREF_'),
'c': SerializerPrefetch('c', prefix='PREF_'),
'd': SerializerPrefetch('d', prefix='PREF_')
}),
(['a', 'a__b', 'a__b__c'], None, {
'a': SerializerPrefetch('a', prefix='PREF_'),
'a__b': SerializerPrefetch('a__b', prefix='PREF_'),
'a__b__c': SerializerPrefetch('a__b__c', prefix='PREF_'),
})
]
@pytest.mark.parametrize('prefetch,queryset,result_prefetches', test_data__prepare_prefetch_list)
def test__prepare_prefetch_list(self, prefetch, queryset, result_prefetches):
serializer = QuerySetSerializer
serializer.database_relations = {'prefetch': prefetch, 'select': []}
prefetch_list = serializer._prepare_prefetch_list(queryset)
for item in prefetch_list:
result_prefetch = result_prefetches[item.prefetch_through]
assert result_prefetch.to_attr == item.to_attr
assert result_prefetch.prefetch_to == item.prefetch_to
| MauriceBenink/python_queryset_serializer | tests/serializer/test_queryset_serializer.py | test_queryset_serializer.py | py | 6,986 | python | en | code | 2 | github-code | 13 |
11960896730 | #!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import re
import nltk
from nltk.corpus import stopwords
from nltk.sentiment.vader import SentimentIntensityAnalyzer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
import seaborn as sns
from sklearn.metrics import accuracy_score
# In[2]:
df = pd.read_csv("TrainData.csv")
df.head(300)
# In[3]:
df['bully'].value_counts()
# In[4]:
sns.countplot(df['bully'])
# In[5]:
df["bully"].value_counts().plot(kind='pie', autopct='%1.1f%%', shadow=True, startangle=140)
# In[6]:
df["bully"].hist(bins=3)
# In[7]:
df.describe()
# In[8]:
df.corr()
# In[9]:
sns.heatmap(df.corr(), cmap ='RdYlGn', linewidths = 0.30, annot = True)
# In[10]:
X = df['Level_Text'].values
Y = df['bully'].values
# In[11]:
print(X)
# In[12]:
print(Y)
# In[13]:
vectorizer = TfidfVectorizer()
X = vectorizer.fit_transform(X)
print(X)
# In[ ]:
# In[14]:
X_train,X_test,Y_train,Y_test = train_test_split(X,Y , test_size = 0.2 , stratify = Y,random_state = 2 )
# In[15]:
#from sklearn.metrics import confusion_matrix
#confusion_matrix(Y_train,Y_test)
# In[16]:
#LogisticRegression
model = LogisticRegression()
# In[17]:
model.fit(X_train,Y_train)
# In[18]:
#Score on the training data
X_train_prediction = model.predict(X_train)
training_data_accuracy = accuracy_score(X_train_prediction ,Y_train)
# In[19]:
print('Accuracy of training data : ',training_data_accuracy)
# In[20]:
#Score on the test data
X_test_prediction = model.predict(X_test)
test_data_accuracy = accuracy_score(X_test_prediction ,Y_test)
# In[21]:
print('Accuracy of test data : ',test_data_accuracy)
# In[22]:
#Score of LogisticRegression
model.score(X_test,Y_test)
# In[23]:
#PERFORMANCE
# In[24]:
pred = model.predict(X_test)
# In[25]:
pred
# In[26]:
ac = accuracy_score(Y_test,pred)
# In[27]:
from sklearn.metrics import confusion_matrix
cf = confusion_matrix(Y_test,pred)
print('Accuracy Score is :', ac)
print('Confusion Matrix')
print('\tPredictions')
print('\t{:>5}\t{:>5}'.format(0,1))
for row_id, real_row in enumerate(cf):
print('{}\t{:>5}\t{:>5}'.format(row_id, real_row[0], real_row[1]))
# In[28]:
#Confusion Matrix Histogram
sns.set(font_scale=1.5)
def plot_conf_mat(Y_test,pred):
fig,ax = plt.subplots(figsize=(3,3))
ax = sns.heatmap(cf, annot = True, cbar = False)
plt.xlabel("True Label")
plt.ylabel("Pred Label")
plot_conf_mat(Y_test,pred)
# In[29]:
#RandomForestClassifier.............................
from sklearn.ensemble import RandomForestClassifier
# In[30]:
Rclf=RandomForestClassifier()
# In[31]:
Rclf.fit(X_train,Y_train)
# In[32]:
X_train_predi = Rclf.predict(X_train)
training_data_accu = accuracy_score(X_train_predi ,Y_train)
# In[33]:
print('Accuracy of training data : ',training_data_accu)
# In[34]:
X_test_predi = Rclf.predict(X_test)
test_data_accu = accuracy_score(X_test_predi ,Y_test)
# In[35]:
print('Accuracy of test data : ',test_data_accu)
# In[36]:
#Score RandomForestClassifier
Rclf.score(X_test,Y_test)
# In[ ]:
# In[37]:
#DecisionTreeClassifier
from sklearn.tree import DecisionTreeClassifier
# In[38]:
Clf=DecisionTreeClassifier()
# In[39]:
Clf.fit(X_train,Y_train)
# In[40]:
Clf.score(X_test,Y_test)
# In[ ]:
# In[41]:
from sklearn.svm import SVC # "Support vector classifier"
classifier = SVC(kernel='linear', random_state=0)
classifier.fit(X_train,Y_train)
# In[42]:
classifier.score(X_test,Y_test)
# In[43]:
from sklearn.neighbors import KNeighborsClassifier
knn= KNeighborsClassifier(n_neighbors=5, metric='minkowski', p=2 )
knn.fit(X_train,Y_train)
# In[44]:
knn.score(X_test,Y_test)
# In[45]:
#Multi-layer Perceptron Classifier
from sklearn.neural_network import MLPClassifier
ml = MLPClassifier(solver='lbfgs', alpha=1e-5,
hidden_layer_sizes=(5, 2), random_state=1)
ml.fit(X_train,Y_train)
# In[46]:
ml.score(X_test,Y_test)
# In[47]:
ndf = pd.DataFrame({
'name': ['LogisticRegression', 'RandomForest','DecisionTree','SVC', 'KNeighbors','MLPClassifier'],
'score': [model.score(X_test,Y_test),Rclf.score(X_test,Y_test), Clf.score(X_test,Y_test),classifier.score(X_test,Y_test),knn.score(X_test,Y_test),ml.score(X_test,Y_test)]
})
# In[48]:
ndf.to_csv('score.csv')
ndf = pd.read_csv('score.csv')
# In[ ]:
# In[49]:
# function to add value labels
def addlabels(x,y):
UPPER_OFFSET = 0.005
for i in range(len(x)):
plt.text(i,float('%.2f' % y[i]) + UPPER_OFFSET,float('%.2f' % y[i]))
# Text below each barplot with a rotation at 90°
scores = [model.score(X_test,Y_test),Rclf.score(X_test,Y_test), Clf.score(X_test,Y_test),classifier.score(X_test,Y_test),knn.score(X_test,Y_test),ml.score(X_test,Y_test)]
names = ['Logistic Regression', 'Random Forest','DecisionTree','SVC', 'KNeighbors','MLP Classifier']
# calling the function to add value labels
fig = plt.figure(figsize =(10,5))
plt.bar(names, scores)
# calling the function to add value labels
addlabels(names, scores)
# giving title to the plot
plt.title("Accuracy Score Test")
# giving X and Y labels
# plt.xlabel("Courses")
# plt.ylabel("Number of Admissions")
# visualizing the plot
plt.show()
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
| robelhossain783/Online_Cyberbullying_Detection | ClassificationAndPrediction.py | ClassificationAndPrediction.py | py | 5,547 | python | en | code | 0 | github-code | 13 |
6601744229 | # -*- coding: utf-8 -*-
from odoo import fields, models, api, _
from odoo.exceptions import ValidationError
TYPE_SELECTION = [
('fabric', 'Fabric'),
('jeans', 'Jeans'),
('cotton', 'Cotton')
]
class Supplier(models.Model):
_name = 'supplier.supplier'
_description = 'Supplier Information'
name = fields.Char(string='Supplier Name', required=True)
address = fields.Char(string='Address')
email = fields.Char(string='Email')
phone_number = fields.Char(string='Phone Number')
class Material(models.Model):
_name = 'material.material'
_description = 'Material Information'
name = fields.Char(string='Material Name', required=True)
code = fields.Char(string='Material Code', required=True)
type = fields.Selection(selection=TYPE_SELECTION, string='Material Type', required=True)
buy_price = fields.Float(string='Material Buy Price', required=True, default=0.0)
supplier_id = fields.Many2one(comodel_name='supplier.supplier', string='Supplier', required=True)
@api.constrains('buy_price')
def _buy_price_validation(self):
for record in self:
if record.buy_price < 100:
raise ValidationError(_('Buy price should not be less than 100.')) | muhardiansyah15/material_listing | material_listing/models/material_listing.py | material_listing.py | py | 1,270 | python | en | code | 0 | github-code | 13 |
3250163042 | import socket
import struct
import dpkt
from threading import Thread, Lock
mutex = Lock() ##GUARDING OUR THREADS
PORT = 25565
HOST = socket.gethostbyname(socket.gethostname()) ##GETTING IP
socketInf = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_IP) ##IPV4
socketInf.bind((HOST,PORT))
socketInf.ioctl(socket.SIO_RCVALL, socket.RCVALL_ON)#TO RECIVE ALL
def sniffing(threadNum, f): ##THREAD NUMBER AND FILE IN ARGUMENT
n = 0
for n in range(100):
data=socketInf.recvfrom(65535) ##MAX SIZE OF PACKETS
packet=data[0]
address= data[1]
header=struct.unpack('BBBBBBBBBBBBBBBBBBBB', packet[:20]) ##BY FIRST NUMBERS IN PACKET GETTING NAME OF PROTOCOL
mutex.acquire() ##WHILE PRINTING AND DATA RECORDING IN FILE
print('Packet Number: ', n, ' in thread ', threadNum)
if(header[9]==6):
f.write("ID = " + str(n) + ' in thread = ' + str(threadNum) + " TCP:\n ")
print("Protocol = TCP")
elif(header[9]==17):
f.write("ID = " + str(n) + ' in thread = ' + str(threadNum) + " UDP:\n ")
print("Protocol = UDP")
f.write("ACII 8-bit:\n " + str(dpkt.udp.UDP(packet)) + "\n HEX:\n ")
elif(header[9]==1):
f.write("ID = " + str(n) + ' in thread = ' + str(threadNum) + " ICMP:\n ")
print("Protocol = ICMP")
print(address)
f.write("{}".format(''.join(' {:02x}'.format(b) for b in packet)))
f.write("\n DEC:\n " + "{}".format(''.join(' {:3d}'.format(b) for b in packet)) + '\n\n')
print("{}".format(''.join(' {:02x}'.format(b) for b in packet)))
mutex.release() ##RELEASE
n = n + 1
def main():
f = open("log.txt", 'w')
th = Thread(target=sniffing, args=(0, f,)) ##ADD THREAD
th.start()
sniffing(1, f)
th.join() ##WAITING
if __name__ == "__main__":
main() | Asquardian/snifferWithLogs | kurs.py | kurs.py | py | 1,929 | python | en | code | 0 | github-code | 13 |
3192457058 | import argparse
from os import mkdir
from os.path import isdir, isfile, join
import matplotlib.patches as patches
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.collections import LineCollection
from mpl_toolkits.axes_grid1 import make_axes_locatable
from cooperative_transport.gym_table.envs.utils import (WINDOW_H, WINDOW_W,
obstacle_size, L)
from libs.sim_utils import make_video
def compute_interaction_forces(table_state, f1, f2):
table_center_to_player1 = np.array(
[
table_state[0] + (L/2) * table_state[2],
table_state[1] + (L/2) * table_state[3],
]
)
table_center_to_player2 = np.array(
[
table_state[0] - (L/2) * table_state[2],
table_state[1] - (L/2) * table_state[3],
]
)
inter_f = (f1 - f2) @ (
table_center_to_player1 - table_center_to_player2
)
return inter_f
def vis():
""" Visualize a HIL or standard trajectory.
NOTE: You must have a ground truth demo trajectory to compare against,
to see potential deviations in behavior for the given
map config (initial pose, goal, and obstacle layout).
To elaborate, the workflow might go like this:
1. Collect a trajectory using the HIL or running robot-robot co-policy.
During this collection, a ground truth map config is loaded, so the
trajectory rollout is given the same map config as the ground truth.
2. Run this script to visualize the trajectory rollout, and compare it
to the ground truth trajectory. Multimodal behavior might occur.
"""
# ------------------------ Directories Setup ------------------------
# Setup directories
if not isdir("results"):
mkdir("results")
if not isdir(join("results", "plots")):
mkdir(join("results", "plots"))
# ------------------------ Plotting Setup ------------------------
# INFO: provide trajectory info
subject = 2
ep = 602
planner_type = "diffusion_policy"
planner_type2 = "diffusion_policy"
planner_type3 = "bc_lstm_gmm"
planner_type4 = "cogail"
planner_type5 = "vrnn"
full_path_to_vid_save = "ep-{}-{}".format(ep, planner_type5)
# INFO: provide info for the traj from the H-H dataset
og = "datasets/rnd_obstacle_v3/random_run_name_3/ep_602.npz"
map_cfg = "demo/rnd_obstacle_v3/random_run_name_3/map_cfg/ep_{}.npz".format(ep)
map_yml = "cooperative_transport/gym_table/config/maps/rnd_obstacle_v3.yml"
# INFO: provide info for the traj from the HIL data collected starting with f, f2, f3 ...
f = "results/hil-dp/subject-{}/eval_hil_seed-88_R-planner-{}_H-real-joystick_exp-random_run_name_3_hact-True_subject-{}/eval_hil_seed-88_R-planner-{}_H-real-joystick_exp-random_run_name_3_hact-True_subject-{}-ep_{}.npz".format(subject, planner_type, subject, planner_type, subject, ep)
f2 = "results/hil-dp/subject-{}/eval_hil_seed-88_R-planner-{}_H-real-joystick_exp-random_run_name_3_hact-False_subject-{}/eval_hil_seed-88_R-planner-{}_H-real-joystick_exp-random_run_name_3_hact-False_subject-{}-ep_{}.npz".format(subject, planner_type2, subject, planner_type2, subject, ep)
f3 = "results/hil-dp/subject-{}/eval_hil_seed-88_R-planner-{}_H-real-joystick_exp-random_run_name_3_hact-False_subject-{}/eval_hil_seed-88_R-planner-{}_H-real-joystick_exp-random_run_name_3_hact-False_subject-{}-ep_{}.npz".format(subject, planner_type3, subject, planner_type3, subject, ep)
f4 = "results/hil-dp/subject-{}/eval_hil_seed-88_R-planner-{}_H-real-joystick_exp-random_run_name_3_hact-False_subject-{}/eval_hil_seed-88_R-planner-{}_H-real-joystick_exp-random_run_name_3_hact-False_subject-{}-ep_{}.npz".format(subject, planner_type4, subject, planner_type4, subject, ep)
f5 = "results/hil-dp/subject-{}/eval_hil_seed-88_R-planner-{}_H-real-joystick_exp-random_run_name_3_hact-False_subject-{}/eval_hil_seed-88_R-planner-{}_H-real-joystick_exp-random_run_name_3_hact-False_subject-{}-ep_{}.npz".format(subject, planner_type5, subject, planner_type5, subject, ep)
skip = 1 # skip every n frames in the trajectory
max_inter_f = 101 # in data
min_inter_f = -101
trajog = dict(np.load(og, allow_pickle=True))["states"][::skip, :2].reshape(-1, 1, 2)
states = dict(np.load(og, allow_pickle=True))["states"][::skip, :].reshape(-1, 1, 2)
actions = dict(np.load(og, allow_pickle=True))["actions"][::skip, :2].reshape(-1, 1, 2)
inter_fog = np.zeros((trajog.shape[0], 1))
for t in range(trajog.shape[0]):
inter_f_t = np.abs(compute_interaction_forces(dict(np.load(og, allow_pickle=True))["states"][t], dict(np.load(og, allow_pickle=True))["actions"][t, :2], dict(np.load(og, allow_pickle=True))["actions"][t, 2:]))
inter_fog[t] = inter_f_t
inter_fog = list(inter_fog[::skip][1:-1].flatten())
inter_fog = np.array([(x - min_inter_f) / (max_inter_f - min_inter_f) * (1 - (-1)) + (-1) for x in inter_fog]) * 101
traj = dict(np.load(f, allow_pickle=True))["states"][::skip, :2].reshape(-1, 1, 2)
inter_f = dict(np.load(f, allow_pickle=True))["fluency"][0]["inter_f"][::skip][1:-1]
inter_f = np.array([(x - min_inter_f) / (max_inter_f - min_inter_f) * (1 - (-1)) + (-1) for x in inter_f]) * 101
# # load traj2 traj
# traj2 = dict(np.load(traj2_f, allow_pickle=True))["states"][::skip, :2].reshape(-1, 1, 2)
# traj2_inter_f = dict(np.load(traj2_f, allow_pickle=True))["fluency"]["inter_f"][::skip][1:-1]
# traj2_inter_f = np.array([(x - min_inter_f) / (max_inter_f - min_inter_f) for x in inter_f])
traj2 = dict(np.load(f2, allow_pickle=True))["states"][::skip, :2].reshape(-1, 1, 2)
inter_f2 = dict(np.load(f2, allow_pickle=True))["fluency"][0]["inter_f"][::skip][1:-1]
inter_f2 = np.array([(x - min_inter_f) / (max_inter_f - min_inter_f) * (1 - (-1)) + (-1) for x in inter_f2]) * 101
traj3 = dict(np.load(f3, allow_pickle=True))["states"][::skip, :2].reshape(-1, 1, 2)
inter_f3 = dict(np.load(f3, allow_pickle=True))["fluency"].item()["inter_f"][::skip][1:-1]
inter_f3 = np.array([(x - min_inter_f) / (max_inter_f - min_inter_f) * (1 - (-1)) + (-1) for x in inter_f3]) * 101
traj4 = dict(np.load(f4, allow_pickle=True))["states"][::skip, :2].reshape(-1, 1, 2)
inter_f4 = dict(np.load(f4, allow_pickle=True))["fluency"].item()["inter_f"][::skip][1:-1]
inter_f4 = np.array([(x - min_inter_f) / (max_inter_f - min_inter_f) * (1 - (-1)) + (-1) for x in inter_f4]) * 101
traj5 = dict(np.load(f5, allow_pickle=True))["states"][::skip, :2].reshape(-1, 1, 2)
inter_f5 = dict(np.load(f5, allow_pickle=True))["fluency"].item()["inter_f"][::skip][1:-1]
inter_f5 = np.array([(x - min_inter_f) / (max_inter_f - min_inter_f) * (1 - (-1)) + (-1) for x in inter_f5]) * 101
max_l = max([len(trajog), len(traj), len(traj2), len(traj3), len(traj4), len(traj5)])
for t in range(5, max_l):
hspace, vspace = (WINDOW_W / 100, WINDOW_H / 100)
fig, ax = plt.subplots(figsize=(hspace, vspace), dpi=200)
plt.rcParams["figure.figsize"] = (hspace, vspace)
plt.rcParams["axes.edgecolor"] = "black"
plt.rcParams["axes.linewidth"] = 2.5
# Create a continuous norm to map from data points to colors
if traj.shape[0] < t:
segments = np.concatenate([traj[:-1, :, :2], traj[1:, :, :2]], axis=1)
else:
segments = np.concatenate([traj[:t-1, :, :2], traj[1:t, :, :2]], axis=1)
if traj2.shape[0] < t :
traj2_segments = np.concatenate([traj2[:-1, :, :2], traj2[1:, :, :2]], axis=1)
else:
traj2_segments = np.concatenate([traj2[:t-1, :, :2], traj2[1:t, :, :2]], axis=1)
if traj3.shape[0] < t :
traj3_segments = np.concatenate([traj3[:-1, :, :2], traj3[1:, :, :2]], axis=1)
else:
traj3_segments = np.concatenate([traj3[:t-1, :, :2], traj3[1:t, :, :2]], axis=1)
if traj4.shape[0] < t :
traj4_segments = np.concatenate([traj4[:-1, :, :2], traj4[1:, :, :2]], axis=1)
else:
traj4_segments = np.concatenate([traj4[:t-1, :, :2], traj4[1:t, :, :2]], axis=1)
if traj5.shape[0] < t :
traj5_segments = np.concatenate([traj5[:-1, :, :2], traj5[1:, :, :2]], axis=1)
else:
traj5_segments = np.concatenate([traj5[:t-1, :, :2], traj5[1:t, :, :2]], axis=1)
if trajog.shape[0] < t :
trajog_segments = np.concatenate([trajog[:-1, :, :2], trajog[1:, :, :2]], axis=1)
else:
trajog_segments = np.concatenate([trajog[:t-1, :, :2], trajog[1:t, :, :2]], axis=1)
norm = plt.Normalize(min_inter_f, max_inter_f)
lc = LineCollection(segments, cmap='turbo', norm=norm)
traj2_lc = LineCollection(traj2_segments, cmap='turbo', norm=norm)
traj3_lc = LineCollection(traj3_segments, cmap='turbo', norm=norm)
traj4_lc = LineCollection(traj4_segments, cmap='turbo', norm=norm)
traj5_lc = LineCollection(traj5_segments, cmap='turbo', norm=norm)
trajoglc = LineCollection(trajog_segments, cmap='turbo', norm=norm)
### INFO: Can plot ALL inter_f segments simultaneously by uncommenting the following lines; otherwise, just uncomment the LineCollection for the method of interest
# Set the values used for colormapping
# lc.set_array(inter_f)
# lc.set_linewidth(2)
# line = ax.add_collection(lc)
# traj2_lc.set_array(inter_f2)
# traj2_lc.set_linewidth(2)
# line = ax.add_collection(traj2_lc)
# traj3_lc.set_array(inter_f3)
# traj3_lc.set_linewidth(2)
# line = ax.add_collection(traj3_lc)
# traj4_lc.set_array(inter_f4)
# traj4_lc.set_linewidth(2)
# line = ax.add_collection(traj4_lc)
# traj5_lc.set_array(inter_f5)
# traj5_lc.set_linewidth(2)
# line = ax.add_collection(traj5_lc)
trajoglc.set_array(inter_fog)
trajoglc.set_linewidth(2)
line = ax.add_collection(trajoglc)
# gt = dict(np.load(gt_f, allow_pickle=True))["states"][::skip]
# load map info
map_run = dict(np.load(map_cfg, allow_pickle=True))
# table initial pose
table_init = np.zeros(2)
table_init[0] = map_run["table"].item()["x"]
table_init[1] = map_run["table"].item()["y"]
# table goal pose
table_goal = np.zeros(2)
table_goal[0] = map_run["goal"].item()["goal"][0]
table_goal[1] = map_run["goal"].item()["goal"][1]
# table obstacles as encoding
num_obs = map_run["obstacles"].item()["num_obstacles"]
obs = np.zeros((num_obs, 2))
obstacles = map_run["obstacles"].item()["obstacles"]
#colorbar
# divider = make_axes_locatable(ax)
im_ratio = WINDOW_H / WINDOW_W
fig.colorbar(line, ax=ax, orientation='vertical', label='Scaled Interaction Forces') #, fraction=0.1*im_ratio)
# ax.annotate('Dec-traj2', xy=(80, 80), xycoords='figure points')
# ax.annotate('VRNN', xy=(180, 80), xycoords='figure points')
ax.set_xlim(0, WINDOW_W)
ax.set_ylim(0, WINDOW_H)
# plot map
ca = plt.gca()
ca.add_patch(
patches.Circle(
(traj[0, :, 0], traj[0, :, 1]),
radius=obstacle_size,
facecolor=(175 / 255, 175 / 255, 175 / 255, 1.0), # black
zorder=0,
)
)
for i in range(obstacles.shape[0]):
obstacle_w = obstacle_size
obstacle_h = obstacle_size
obstacle_x = obstacles[i, 0] # - obstacle_w / 2.0
obstacle_y = obstacles[i, 1] # - obstacle_h / 2.0
if obstacle_x == 0 or obstacle_y == 0:
continue
ca.add_patch(
patches.Rectangle(
(obstacle_x - obstacle_w / 2, obstacle_y + obstacle_h / 2),
obstacle_w,
obstacle_h,
facecolor=(230 / 255, 111 / 255, 81 / 255, 1.0),
zorder=0,
)
)
ca.add_patch(
patches.Rectangle(
(table_goal[0] - 200 / 2, table_goal[1] - 250 / 2),
200,
250,
facecolor=(242 / 255, 220 / 255, 107 / 255, 1.0), # gold
zorder=0,
)
)
plt.gca().set_aspect("equal")
plt.axis("off")
plot_dir = join(
"results",
"plots",
"ep_{}-inter_f".format(ep)
)
if not isdir(plot_dir):
mkdir(plot_dir)
plot_name = join(
"results",
"plots",
"ep_" + str(ep) + "-inter_f",
"{0}".format(t),
)
plt.xlabel("xlabel", fontsize=18)
plt.ylabel("ylabel", fontsize=16)
fig.set_size_inches(10, 5)
plt.savefig(plot_name, dpi=200)
# plt.show()
plt.close('all')
# ------------------------ Video --------------------------------
make_video(plot_dir, "ep_{}-inter_f".format(ep))
def main():
vis()
if __name__ == "__main__":
main() | eleyng/traj_vis_lib | plot_inter_f.py | plot_inter_f.py | py | 13,420 | python | en | code | 0 | github-code | 13 |
71480475538 | melysegek=[]
db=0
with open("melyseg.txt","r") as bemenet:
for sor in bemenet:
melysegek.append(int(sor.strip()))
db+=1
print("1. feladat")
print("A fájl adatainak száma: ",len(melysegek))
print("2. feladat")
hely=int(input("Adjon meg egy távolságértéket! "))
print("Ezen a helyen a felszín",melysegek[hely-1],"méter mélyen van.")
print("3. feladat")
erintetlen=0
for i in melysegek:
if i==0:
erintetlen+=1;
print("Az érintetlen terület aránya: ","{:.2f}%".format((100*erintetlen)/db))
kimenet=open("godrok.txt",'w')
elozo=0;
egysor=[]
sorok=[]
godhosz=0
for ertek in melysegek:
if ertek>0:
egysor.append(str(ertek))
if ertek==0 and elozo>0:
sorok.append(egysor)
egysor=[]
godhosz+=1
elozo=ertek
for egysor in sorok:
print(" ".join(egysor), file=kimenet)
kimenet.close()
print("5. feladat")
print("A gödrök száma: ",godhosz)
print("6. feladat")
if melysegek[hely-1]==0:
print("Az adott helyen nincs gödör.")
else:
print("a)")
poz=hely
while melysegek[poz]>0:
poz-=1
kezdo=poz+2
poz=hely
while melysegek[poz]>0:
poz+=1
veg=poz
print("A godor kezdete:",kezdo,"meter, a godor vege:",veg,"meter")
print("b)")
poz=kezdo+1
while melysegek[poz]<=melysegek[poz-1] and poz<=veg:
poz+=1;
while melysegek[poz] >= melysegek[poz - 1] and poz <= veg:
poz += 1;
if poz>veg:
print("Folyamatosan melyul")
else:
print("Nem melyul folyamatosan")
print("c)")
print("A legnagyobb melysege",max(melysegek[kezdo-1:veg]),"meter")
print("d)")
terfogat=10*sum(melysegek[kezdo-1:veg])
print("A terfogata",terfogat,"m^3")
print("f)")
hossz=10*(veg-kezdo+1)
print("A vízmennyiség",terfogat-hossz,"m^3")
| Froxy555/informatika-erettsegi-python | 2021majus/godor.py | godor.py | py | 1,815 | python | hu | code | 1 | github-code | 13 |
18704204977 | """
Vamos agora retirar a concatenção e separar completamente as bases
"""
import pandas as pd
import numpy as np
from sklearn.preprocessing import LabelEncoder, StandardScaler
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv1D, MaxPooling1D, Flatten, Dense
from sklearn.metrics import classification_report
from sklearn.model_selection import train_test_split
# Carregar os dados
df = pd.read_csv('../DADOS/Adendo A.2_Conjunto de Dados_DataSet.csv')
# Separar os dados de entrada (X) e saída (y)
X = df.drop(columns=['role'])
y = df['role']
# Codificar as classes de saída
label_encoder = LabelEncoder()
y = label_encoder.fit_transform(y)
# Padronizar os dados de entrada
scaler = StandardScaler()
X = scaler.fit_transform(X)
# Separar os dados de treinamento e teste de acordo com as classes
X_train = X[y == 0] # Classe "normal" para treinamento
y_train = y[y == 0]
X_test = X[y == 1] # Classe "test-0" para teste/validação
y_test = y[y == 1]
# Converter as classes de saída em one-hot encoding
# y_train = to_categorical(y_train)
# y_test = to_categorical(y_test)
# Reshape dos dados de entrada para serem compatíveis com a camada Conv1D
X_train = X_train.reshape(X_train.shape[0], X_train.shape[1], 1)
X_test = X_test.reshape(X_test.shape[0], X_test.shape[1], 1)
# Criar o modelo da rede neural
model = Sequential()
model.add(Conv1D(128, kernel_size=7, activation='relu',
input_shape=(X_train.shape[1], 1)))
model.add(MaxPooling1D(pool_size=2))
model.add(Flatten())
model.add(Dense(64, activation='relu'))
# Alteração: 1 neurônio para classificação binária
model.add(Dense(1, activation='sigmoid'))
# Compilar o modelo
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=[
'accuracy']) # Alteração: loss='binary_crossentropy'
# Treinar o modelo
model.fit(X_train, y_train, epochs=10, batch_size=32, verbose=1)
# Avaliar o modelo no conjunto de teste
y_pred_proba = model.predict(X_test)
# Alteração: arredondar as previsões para 0 ou 1
y_pred_classes = np.round(y_pred_proba).flatten()
y_true_classes = y_test
# Calcular a acurácia
accuracy = np.mean(y_pred_classes == y_true_classes)
print(f"Accuracy: {accuracy}")
# Exibir relatório de classificação
print("Classification Report:")
print(classification_report(y_true_classes, y_pred_classes))
# Definir limite de probabilidade para classificar como anomalia
anomaly_threshold = 0.5
# Converter as probabilidades de predição em classes (0 ou 1) usando o limite de probabilidade
y_pred_classes = (y_pred_proba > anomaly_threshold).astype(int)
# Calcular a acurácia
accuracy = np.mean(y_pred_classes == y_true_classes)
print(f"Accuracy: {accuracy}")
# Exibir relatório de classificação
print("Classification Report:")
print(classification_report(y_true_classes, y_pred_classes, zero_division=1))
| accolombini/PROSPECACAO | TESTE_PY/teste29.py | teste29.py | py | 2,881 | python | pt | code | 0 | github-code | 13 |
36594566453 | from train import Cartoon, DataLoader, T, classification_report, initialize_model, load_model
import torch
from tqdm import tqdm
def get_loader(image_dir, attr_path, selected_attrs, crop_size=378, image_size=224,
batch_size=16):
"""Build and return a data loader."""
dataloader = {}
transform = []
transform.append(T.CenterCrop(crop_size))
transform.append(T.Resize(image_size))
transform.append(T.ToTensor())
transform.append(T.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)))
test_transform = T.Compose(transform)
test_dataset = Cartoon(image_dir, attr_path, selected_attrs, test_transform, mode='test')
dataloader['test'] = DataLoader(test_dataset, batch_size=1, shuffle=False, num_workers=1)
return dataloader
def set_crop_size(dataset):
if dataset == 'celeba':
crop_size = 178
else:
crop_size = 400
return crop_size
def test(model, test_loader):
model = model.to(device)
pred_list = []
true_list = []
model.eval()
with torch.no_grad():
for imgs, labels in tqdm(test_loader):
imgs, labels = imgs.to(device), labels.to(device)
pred = model(imgs)
pred = torch.round(torch.sigmoid(pred))
pred_list.append(pred.cpu().int().numpy())
true_list.append(labels.cpu().int().numpy())
pred_list = [a.squeeze().tolist() for a in pred_list]
true_list = [b.squeeze().tolist() for b in true_list]
print(classification_report(true_list, pred_list, labels=[0, 1]))
if __name__ == '__main__':
dataset = 'celeba'
crop_size = set_crop_size(dataset)
attr = 'Brown_Hair'
model_name = 'AlexNet'
num_classes = 1
feature_extract = False
pretrained = False
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
model, input_size = initialize_model(model_name, num_classes, feature_extract, use_pretrained=pretrained)
dataloaders_dict = get_loader(f'../datasets/{dataset}/images', f'../datasets/{dataset}/list_attr_{dataset}.txt', [attr], crop_size, 224)
load_model(model, '../output_pth/{}_{}-{}(F).pkl'.format(attr, dataset, model_name))
model = model.to(device)
model.eval()
pred_list = []
true_list = []
with torch.no_grad():
for x, c in dataloaders_dict['test']:
x = x.cuda()
c = c.cuda()
pred = model(x)
pred = torch.round(torch.sigmoid(pred))
pred_list.append(pred.cpu().int().numpy())
true_list.append(c.cpu().int().numpy())
pred_list = [a.squeeze().tolist() for a in pred_list]
true_list = [b.squeeze().tolist() for b in true_list]
print(classification_report(true_list, pred_list, labels=[0, 1]))
| KungHao/Image-Classification-pytorch | test.py | test.py | py | 2,786 | python | en | code | 0 | github-code | 13 |
5145789909 | # Runtime: Aim for linear
def two_sum(srt_lst, target):
low = 0
high = len(srt_lst)-1
while low <= high:
if srt_lst[low] + srt_lst[high] == target: #if they add to target
result = (low, high)
return result
elif srt_lst[low] + srt_lst[high] > target: #if too big
high -= 1
else: #if too small
low += 1
return None
| faithvillarr/CS-UY1134-Data-Structures-and-Algorithms | HW2/fmv8970_hw2_q6.py | fmv8970_hw2_q6.py | py | 418 | python | en | code | 0 | github-code | 13 |
13313184047 | import weakref
from . attributes import AttributeHandler
from . exception import GameObjectException
class GameObject:
core = None
__slots__ = ["name", "objid", "tags", "attributes", "initial_data", "aliases", "asset", 'relations', 'namespace']
def __init__(self, objid: str, name: str, initial_data=None, asset: bool = False):
self.name = name
self.aliases = list()
self.objid = objid
self.attributes = AttributeHandler(self)
self.tags = dict()
self.asset = asset
self.relations = weakref.WeakValueDictionary()
self.namespace = None
if initial_data is not None:
self.initial_data = initial_data
else:
self.initial_data = dict()
def __str__(self):
return self.objid
def __repr__(self):
return f"<{self.__class__.__name__}: {self.objid} - {self.name}>"
def load_initial(self):
# laod tags - but don't set reverse on the Tag object just yet.
for tagname in self.initial_data.get("tags", []):
tag = self.core.get_tag(tagname)
self.tags[tagname] = tag
tag.objects.add(self)
for k, v in self.initial_data.get("attributes", dict()).items():
self.attributes.load_category(k, v)
def dump_gameobject(self):
return ({
"name": self.name,
"attributes": self.attributes.dump(),
"tags": self.tags.keys()
}, self.initial_data)
def delete(self):
for tag in self.tags.values():
tag.objects.remove(self)
| volundmush/shinma | shinma/modules/core/gamedb/objects.py | objects.py | py | 1,591 | python | en | code | 0 | github-code | 13 |
1581408183 | import numpy as np
import scipy.ndimage as ndi
def seedGrowingSeg(I,sigma,seed_x,seed_y,thresh,n_rows,n_cols,slice_num):
# seedGrowingSeg segments a 2D image using region growing algorithm
# FUNCTION DESCRIPTION:
# 1) Receives 2D slice and performs pre-processing Gaussian blurring
# 2) Iteratively finds all non-segmented pixels connected to the current
# segmented region
# 3) Compares similarity of connected pixels to mean of segmented region
# 4) If smallest pixel intensity difference is below threshold value
# then this pixel is added to segmentation
# 5) If smallest pixel instensity difference is greater than
# threshold value, or if maximum number of iterations has beed
# exceeded, then algorithm has converged
# INPUTS:
# I - 2D image normalised between 0 and 1 - float
# sigma - width of Gaussian kernel for blurring - float
# seed_x - x coordinate of initial seed point - int
# seed_y - y coordinate of initial seed point - int
# thresh - threshold for convergence of segmentation - float
# n_rows - number of rows in I - int
# n_cols - number of columns in I - int
# slice_num - slice number of current slice in whole dataset - int
# OUTPUTS:
# Im_blur - output of 2D Gaussian blurring on I - float
# seg - logical segmentation output - bool
# region_size - number of pixels in segmented region - int
# FUNCTION DEPENDENCIES:
# numpy
# scipy.ndimage
# AUTHOR:
# Edward James, June 2020
#%% pre processing - 2D gaussian blurring to smooth out noise
Im_blur = ndi.gaussian_filter(I,sigma,order=0,mode='reflect')
#%% preallocate memory for segmentation output, also specify maximum
# number of iterations as 10% of image area
seg = np.zeros([n_rows,n_cols],bool)
max_num_iterations = round(n_rows*n_cols/10)
iterations = np.arange(1,max_num_iterations)
# preallocate memory for coords of segmentation logical labels
all_x_coords = np.zeros(max_num_iterations,int)
all_y_coords = np.zeros(max_num_iterations,int)
# initialise first iteration
seg[seed_y,seed_x] = 1
region_mean = Im_blur[seed_y,seed_x]
all_x_coords[0] = seed_x
all_y_coords[0] = seed_y
#%% define 2D (i.e. in-plane) 4 degree connectivity of a given pixel centred at (0,0)
# pixels are connected if their edges touch
kernel = np.zeros([4,2],int)
kernel[0,0] = 1
kernel[1,1] = -1
kernel[2,0] = -1
kernel[3,1] = 1
connectivity = np.arange(4)
#%% start iterative region growing loop
for num_iter in iterations:
# preallocate temporary matrix of all unassigned connected pixels
connected_pix = np.zeros([n_rows,n_cols])
# acquire coordinates of all connected pixels
for i in connectivity:
conn_y_coord = all_y_coords[0:num_iter] + kernel[i,0]
conn_x_coord = all_x_coords[0:num_iter] + kernel[i,1]
# loop through each of these connected pixels and add to temporary matrix
n_coords = np.arange(len(conn_x_coord))
for j in n_coords:
# if pixel address is contained in image
if 0 <= conn_y_coord[j] and conn_y_coord[j] < n_rows \
and 0 <= conn_x_coord[j] and conn_x_coord[j] < n_cols:
# if not already part of current segmentation
if seg[conn_y_coord[j],conn_x_coord[j]] != 1:
connected_pix[conn_y_coord[j],conn_x_coord[j]] = 1
# multiply blurred image by this logical mask
connected_pix_intensities = Im_blur*connected_pix
# find the pixel which has the smallest absolute intensity difference to the
# current region mean
sim_metric_all = np.reshape(abs(region_mean-connected_pix_intensities),n_cols*n_rows)
# calculate smallest current similarity metric and location of respective pixel
# in flattened array
sim_metric = min(sim_metric_all)
ind = np.argmin(sim_metric_all)
# if this absolute intensity difference is smaller than threshold then add
# this pixel to segmentation region and update region mean
if sim_metric < thresh:
# convert this 1D idx to 2D coords
[new_y_idx,new_x_idx] = np.unravel_index(ind,(n_rows,n_cols))
# and add to all_coords
all_x_coords[num_iter] = new_x_idx
all_y_coords[num_iter] = new_y_idx
# and add to segmentation
seg[new_y_idx,new_x_idx] = 1
# update region mean
region = Im_blur*seg
region_mask = region[region != 0]
region_mean = np.mean(region_mask)
else:
print("Slice {} converged".format(slice_num+1))
# break out of outer for loop
break
# calculate total number of pixels in segmentation
region_size = sum(sum(region != 0))
return [Im_blur,seg,region_size] | edggjames/RegionGrowingSegmentation | seedGrowingSeg.py | seedGrowingSeg.py | py | 5,257 | python | en | code | 2 | github-code | 13 |
3219811863 | from django.conf.urls import url
from . import views
app_name='search'
urlpatterns=[
url(r'^$',views.index,name='index'),
url(r'^result/$',views.search_result,name='search_result'),
url(r'^upload/$',views.upload,name='upload'),
url(r'^add_to_favorite/(?P<image_address>[0-9a-zA-Z\_]+)/$'\
,views.add_to_favorite,name='add_to_favorite'),
url(r'^show_favorite/$',views.show_favorite,name='show_favorite'),
] | zehuichen123/Django_practice | HCI_lab3/search/urls.py | urls.py | py | 414 | python | en | code | 0 | github-code | 13 |
72966438739 | import numpy as np
from deLavalNozzleModel import rocketEngine_deLaval
from perfectlyExpandedNozzleModel import rocketEngine_perfectlyExpanded
from aerospikeNozzleModel import rocketEngine_aerospike
from atmosphericModels import atmosPress_earth
# Propellant Generation
keroLox = {
'T_1': 4000, # combustion temp
'c_p': 10, # Specific heat at const press.
'c_v': 9.0, # Specific heat at const Vol.
'R': 287.0, # Molar mass of fuels.
}
rutherFord_mdot = 7.8 # kg/s
# Rocket Generation
engineDict_stage1_general = {
'propellantProps': keroLox,
'P_1': 100e5, # Pa, 20 bar chamber pressure
'v_1': 0.0, # Initial velocity of the propellant gas
# 'designAlt': 15e3, # m, nozzle exit design altitude
}
engineDict_stage2_general = {
'propellantProps': keroLox,
'P_1': 100e5, # Pa, 20 bar chamber pressure
'v_1': 0.0, # Initial velocity of the propellant gas
}
engineDict_stage1_delaval = {
**engineDict_stage1_general,
'engine_mdot': rutherFord_mdot,
'designAlt': 15e3, # m, nozzle exit design altitude
'overExpSeparation': 0.25
}
engineDict_stage2_delaval = {
**engineDict_stage1_general,
'engine_mdot': rutherFord_mdot, # kg/s
'designAlt': 25e3, # m, nozzle exit design altitude, # 25e3m produces an engine w/ exit diam of 1.2m
'overExpSeparation': 0.25
}
engineDict_stage1_aerospike = {
**engineDict_stage1_general,
'engine_mdot': 9*rutherFord_mdot, # I want one engine with 9x the flow rate of equiv rocket lab vehicle.
'designAlt': 25e3, # m, nozzle exit design altitude
}
engineDict_stage2_aerospike = {
**engineDict_stage2_general,
'engine_mdot': 1*rutherFord_mdot, # kg/s
'designAlt': 25e3, # m, nozzle exit design altitude
}
###### Assign Engine Properties ########
engine1_obj_deLaval = rocketEngine_deLaval(**engineDict_stage1_delaval)
engine2_obj_deLaval = rocketEngine_deLaval(**engineDict_stage2_delaval)
engine1_obj_perfectNozzle = rocketEngine_perfectlyExpanded(**engineDict_stage1_delaval)
engine2_obj_perfectNozzle = rocketEngine_perfectlyExpanded(**engineDict_stage2_delaval)
engine1_obj_constThrust = rocketEngine_deLaval(**engineDict_stage1_delaval)
designThrust = engine1_obj_constThrust.totalThrust(altitudes=[engine1_obj_deLaval.designAlt])
def constThrust(altitudes):
# Alt is not used.
return designThrust
engine1_obj_constThrust.totalThrust = constThrust
engine2_obj_constThrust = rocketEngine_deLaval(**engineDict_stage2_delaval)
# Reassign the thrust function
designThrust = engine2_obj_constThrust.totalThrust(altitudes=[engine2_obj_constThrust.designAlt])
def constThrust(altitudes):
# Alt is not used.
return designThrust
engine2_obj_constThrust.totalThrust = constThrust
engine1_obj_aerospike = rocketEngine_aerospike(**engineDict_stage1_aerospike)
engine2_obj_aerospike = rocketEngine_aerospike(**engineDict_stage2_aerospike)
###### Stage Definitions ######
stage1 = {
'stage': 1,
# 'engineObj': None,
'accel0': 1.4*9.81, # Initial acceleration of the stage.
'theta0': np.pi/2.0,
'ode_t_span': [0, 20e3],
'structural_ratio': 0.08,
'g0': 9.81, # m/s**2
}
stage2 = {
'stage': 2,
# 'engineObj': None,
'accel0': 2.0 * 9.81, # Initial acceleration of the stage.
'ode_t_span': [0, 20e3],
# 'theta': np.pi/2.0,
'structural_ratio': 0.08,
'g0': 9.81, # m/s**2
}
################## Stage properties assignment ##################
stage1_deLaval = {
**stage1,
'engineObj': engine1_obj_deLaval,
}
stage2_deLaval = {
**stage2,
'engineObj': engine2_obj_deLaval,
}
stage1_perfectThrust = {
**stage1,
'engineObj': engine1_obj_perfectNozzle,
}
stage2_perfectThrust = {
**stage2,
'engineObj': engine2_obj_perfectNozzle,
}
stage1_constThrust = {
**stage1,
'engineObj': engine1_obj_constThrust,
}
stage2_constThrust = {
**stage2,
'engineObj': engine2_obj_constThrust,
}
stage1_aerospike = {
**stage1,
'engineObj': engine1_obj_aerospike,
}
stage2_aerospike = {
**stage2,
'engineObj': engine2_obj_aerospike,
}
###### Stage Definitions ######
stages_deLaval = [stage1_deLaval, stage2_deLaval]
stages_perfectNozzle = [stage1_perfectThrust, stage2_perfectThrust]
stages_constThrust = [stage1_constThrust, stage2_constThrust]
stages_aerospike = [stage1_aerospike, stage2_aerospike]
electronRocket = {
'u0': 50.0,
'x0': 100.0, # 0.0,
'theta0': np.deg2rad(0.001), # THeta is measured from the vertical, not horizontal.
'atmos_model_func': atmosPress_earth,
'mass_payload': 150.0,
'Isp_generic': 311.0, # s # Need to update this to do it implicitly in the class. Sorry Dom.
}
electronRocket_deLaval = {
**electronRocket,
'stages': stages_deLaval
}
electronRocket_perfectNozzle = {
**electronRocket,
'stages': stages_perfectNozzle
}
electronRocket_constThrust = {
**electronRocket,
'stages': stages_constThrust
}
electronRocket_aerospike = {
**electronRocket,
'stages': stages_aerospike
}
if __name__ == '__main__':
import matplotlib.pyplot as plt
if True:
rocketEngines = {
'deLaval': engine1_obj_deLaval,
'constant thrust': engine1_obj_constThrust,
'Perfect nozzle': engine1_obj_perfectNozzle,
'aerospike': engine1_obj_aerospike,
}
for engineName, engineObj in rocketEngines.items():
print(engineName)
alts = np.linspace(100, 50e3, 100)
thrust = engineObj.totalThrust(altitudes=alts)
plt.plot(alts, thrust, label=engineName)
plt.xlabel('Altitude (m)')
plt.ylabel('Thrust (N)')
plt.legend()
plt.show()
if False:
from rocketShip_class import rocketShip
rocket1 = rocketShip(**electronRocket_deLaval)
rocket1.deltaV_total = 9.0e3 # m/s
rocket1.optimalStaging_restricted(structural_ratio=0.13)
# rocket1.optimalStaging_unrestricted()
exit(0)
print('#'*60)
rocket1 = rocketShip(**electronRocket_aerospike)
rocket1.deltaV_total = 9000.0 # m/s
rocket1.optimalStaging_restricted()
rocket1.optimalStaging_unrestricted() | grahambell92/launcherModel_gBell | rocketLab_params.py | rocketLab_params.py | py | 6,217 | python | en | code | 0 | github-code | 13 |
14646571315 | from sqlalchemy import Column, ForeignKey, Identity, Integer, Table
from . import metadata
PortalFeaturesJson = Table(
"portal_featuresjson",
metadata,
Column("customer_update", PortalCustomerUpdate, ForeignKey("PortalCustomerUpdate")),
Column("invoice_history", PortalInvoiceList, ForeignKey("PortalInvoiceList")),
Column(
"payment_method_update",
PortalPaymentMethodUpdate,
ForeignKey("PortalPaymentMethodUpdate"),
),
Column(
"subscription_cancel",
PortalSubscriptionCancel,
ForeignKey("PortalSubscriptionCancel"),
),
Column(
"subscription_pause",
PortalSubscriptionPause,
ForeignKey("PortalSubscriptionPause"),
),
Column(
"subscription_update",
PortalSubscriptionUpdate,
ForeignKey("PortalSubscriptionUpdate"),
),
Column("id", Integer, primary_key=True, server_default=Identity()),
)
__all__ = ["portal_features.json"]
| offscale/stripe-sql | stripe_openapi/portal_features.py | portal_features.py | py | 972 | python | en | code | 1 | github-code | 13 |
39772747654 | import re
email1 = 'mir.moheuddin$student.sgh.waw.pl'
email2 = 'mir.moheuddin@'
email3 = 'mm120110@student.sgh.waw.pl'
email4 = '@student.sgh.waw.pl'
email5 = 'mir.moheuddin@.pl'
email6 = 'mir@moheuddin@.pl'
def check_email(email):
path = r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b'
if re.match(path,email):
return True
else:
return False
for em in [email1, email2, email3, email4, email5, email6]:
print('{}: {}'.format(em, check_email(em)))
#for i in range(len(email4)):
#print(email4[i])
#import re
#regex = r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b'
#def check_email(email):
#if (re.fullmatch(regex, email)):
#print("Valid Email")
#else:
#print("Invalid Email")
#if __name__ == '__main__':
#email1 = 'mm120110@student.sgh.waw.pl'
#check_email(email1)
#email2 = 'mir.moheuddin$student.sgh.waw.pl'
#check_email(email2)
#email3 = 'mir.moheuddin@'
#check_email(email3)
#email4 = '@student.sgh.waw.pl'
#check_email(email4)
#email5 = 'mir.moheuddin@.pl'
#check_email(email5)
#email6 = 'mir@moheuddin@.pl'
#check_email(email6)
| mirmoheuddin22/Python_Winter_2022_3 | Email Checking.py | Email Checking.py | py | 1,143 | python | en | code | 2 | github-code | 13 |
74370842898 | # Adapted from https://github.com/BichenWuUCB/SqueezeSeg
from typing import Dict
import torch
import torch.nn as nn
import torch.nn.functional as F
class Fire(nn.Module):
def __init__(self,
inplanes: int,
squeeze_planes: int,
expand1x1_planes: int,
expand3x3_planes: int):
super(Fire, self).__init__()
self.inplanes = inplanes
self.activation = nn.ReLU(inplace=True)
self.squeeze = nn.Conv2d(inplanes, squeeze_planes, kernel_size=1)
self.expand1x1 = nn.Conv2d(squeeze_planes, expand1x1_planes,
kernel_size=1)
self.expand3x3 = nn.Conv2d(squeeze_planes, expand3x3_planes,
kernel_size=3, padding=1)
def forward(self, x: torch.Tensor):
x = self.activation(self.squeeze(x))
return torch.cat([
self.activation(self.expand1x1(x)),
self.activation(self.expand3x3(x))
], 1)
# ******************************************************************************
class SqueezeNet(nn.Module):
"""
Class for Squeezeseg. Subclasses PyTorch's own "nn" module
"""
def __init__(self,
use_range: bool=True,
use_xyz: bool=True,
use_remission: bool=True,
dropout: float=0.01,
output_stride: int=32,
**kwargs):
# Call the super constructor
super().__init__()
print("Using SqueezeNet Backbone")
self.use_range = use_range
self.use_xyz = use_xyz
self.use_remission = use_remission
self.drop_prob = dropout
self.output_stride = output_stride
# input depth calc
self.input_depth = 0
self.input_idxs = []
if self.use_range:
self.input_depth += 1
self.input_idxs.append(0)
if self.use_xyz:
self.input_depth += 3
self.input_idxs.extend([1, 2, 3])
if self.use_remission:
self.input_depth += 1
self.input_idxs.append(4)
print("Depth of backbone input = ", self.input_depth)
# stride play
self.strides = [2, 2, 2, 2]
# check current stride
current_output_stride = 1
for s in self.strides:
current_output_stride *= s
print("Original output_stride: ", current_output_stride)
# make the new stride
if self.output_stride > current_output_stride:
print("Can't do OS, ", self.output_stride,
" because it is bigger than original ", current_output_stride)
else:
# redo strides according to needed stride
for i, stride in enumerate(reversed(self.strides), 0):
if int(current_output_stride) != self.output_stride:
if stride == 2:
current_output_stride /= 2
self.strides[-1 - i] = 1
if int(current_output_stride) == self.output_stride:
break
print("New output_stride: ", int(current_output_stride))
print("Strides: ", self.strides)
# encoder
self.conv1a = nn.Sequential(nn.Conv2d(self.input_depth, 64, kernel_size=3,
stride=[1, self.strides[0]],
padding=1),
nn.ReLU(inplace=True))
self.conv1b = nn.Conv2d(self.input_depth, 64, kernel_size=1,
stride=1, padding=0)
self.fire23 = nn.Sequential(nn.MaxPool2d(kernel_size=3,
stride=[1, self.strides[1]],
padding=1),
Fire(64, 16, 64, 64),
Fire(128, 16, 64, 64))
self.fire45 = nn.Sequential(nn.MaxPool2d(kernel_size=3,
stride=[1, self.strides[2]],
padding=1),
Fire(128, 32, 128, 128),
Fire(256, 32, 128, 128))
self.fire6789 = nn.Sequential(nn.MaxPool2d(kernel_size=3,
stride=[1, self.strides[3]],
padding=1),
Fire(256, 48, 192, 192),
Fire(384, 48, 192, 192),
Fire(384, 64, 256, 256),
Fire(512, 64, 256, 256))
# output
self.dropout = nn.Dropout2d(self.drop_prob)
# last channels
self.last_channels = 512
def run_layer(self,
x: torch.Tensor,
layer: nn.Module,
skips: Dict,
output_stride: int):
y = layer(x)
if y.shape[2] < x.shape[2] or y.shape[3] < x.shape[3]:
skips[output_stride] = x.detach()
output_stride *= 2
x = y
return x, skips, output_stride
def forward(self, x):
# filter input
x = x[:, self.input_idxs]
# run cnn
# store for skip connections
skips = {}
output_stride = 1
# encoder
skip_in = self.conv1b(x)
x = self.conv1a(x)
# first skip done manually
skips[1] = skip_in.detach()
output_stride *= 2
x, skips, output_stride = self.run_layer(x, self.fire23, skips, output_stride)
x, skips, output_stride = self.run_layer(x, self.dropout, skips, output_stride)
x, skips, output_stride = self.run_layer(x, self.fire45, skips, output_stride)
x, skips, output_stride = self.run_layer(x, self.dropout, skips, output_stride)
x, skips, output_stride = self.run_layer(x, self.fire6789, skips, output_stride)
x, skips, output_stride = self.run_layer(x, self.dropout, skips, output_stride)
return x, skips
def get_last_depth(self):
return self.last_channels
def get_input_depth(self):
return self.input_depth
| Gorilla-Lab-SCUT/gorilla-3d | projects/SalsaNext/salsa/backbone/squeezenet.py | squeezenet.py | py | 5,754 | python | en | code | 7 | github-code | 13 |
35785557113 |
import time
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import threading
import random
from scipy import interpolate
import numpy as np
import csv
from DigitalFilter import *
#Global variable
WINDOWSIZE = 10
PERIOD = 0.125
Colour = {"SCR":"k","RR":"b", "HR":"r", "SST":"g"}
GRAPHWIDTH = 300
#plt is a global stuff
#plotter manual:
# init with modes
class Plotter():
def __init__(self, modes):
#plt.title("Polygraph")
#plt.axis([0, 300, -4096, 4096])
self._windowSize = WINDOWSIZE
self._currentWidth = GRAPHWIDTH
#Set modes
self._modes = modes
#Plot switch
self._doPlotting = False
#Data storageimport csv
self._dataSegment = {"SCR":[],"RR":[], "HR":[], "SST":[]} #raw data
self._filteredData = {"SCR":[],"RR":[], "HR":[], "SST":[]} #filtered data
self._changeRateData = {"SCR":[],"RR":[], "HR":[], "SST":[]} #change rate data
self._interpolatedData = {"SCR":[],"RR":[], "HR":[], "SST":[]} #interpolated data
self._dataSegmentX = {"SCR":[],"RR":[], "HR":[], "SST":[]} #raw data X axis
self._filteredDataX = {"SCR":[],"RR":[], "HR":[], "SST":[]} #filtered data X axis
self._changeRateDataX = {"SCR":[],"RR":[], "HR":[], "SST":[]} #change rate data X axis
self._interpolatedDataX = {"SCR":[],"RR":[], "HR":[], "SST":[]} #interpolated data X axis
self._dataSegmentTime = {"SCR":[],"RR":[], "HR":[], "SST":[]} #raw data timestamps
self._filteredDataTime = {"SCR":[],"RR":[], "HR":[], "SST":[]} #filtered data timestamps
self._changeRateDataTime = {"SCR":[],"RR":[], "HR":[], "SST":[]} #change rate data timestamps
self._interpolatedDataTime = {"SCR":[],"RR":[], "HR":[], "SST":[]} #interpolated data timestamps
self._usedData = {"SCR":[],"RR":[], "HR":[], "SST":[]}
self._usedDataX = {"SCR":[],"RR":[], "HR":[], "SST":[]}
self._yellowButton = []
self._redButton = []
self._firstPlot = True
plt.ion()
self._SCR, self._axSCR = plt.subplots(1,1)
self._axSCR.axis([0, 300, -4096, 4096])
SCRLine = self._axSCR.plot([],[],"k",label="SCR")
RRLine = self._axSCR.plot([],[],"b",label="RR")
HRLine = self._axSCR.plot([],[],"r",label="HR")
SSTLine = self._axSCR.plot([],[],"g",label="SST")
self._axSCR.legend(["SCR","RR","HR","SST"])
self._axSCR.hold(True)
plt.draw()
self._background = self._SCR.canvas.copy_from_bbox(self._axSCR.bbox)
def stopPlotting(self):
self._yellowButton.append(str(time.localtime().tm_hour)+":"+
str(time.localtime().tm_min)+":"+
str(time.localtime().tm_sec))
#Recording to file
row = []
if "INTERPOLATE" in self._modes["HR"]:
points = self._axSCR.plot(self._interpolatedDataX["HR"], self._interpolatedData["HR"], "m")[0]
# redraw just the points
self._axSCR.draw_artist(points)
# fill in the axes rectangle
self._SCR.canvas.blit(self._axSCR.bbox)
writer = csv.writer(open("../polygraph.csv", "wb"))
writer.writerow(["SCR","Time","X","Y","RR","Time","X","Y","HR","Time","X","Y","SST","Time","X","Y","Yellow Button", "Red Button"])
i=0
iteration = 0
while iteration<4:
iteration = 0
#SCR
if "CHANGERATE" in self._modes["SCR"]:
if i < len(self._changeRateDataX["SCR"]):
row.append("")
row.append(self._changeRateDataTime["SCR"][i])
row.append(self._changeRateDataX["SCR"][i])
row.append(self._changeRateData["SCR"][i])
else:
iteration += 1
row.append("")
row.append("")
row.append("")
row.append("")
elif "FILTER" in self._modes["SCR"]:
if i < len(self._filteredDataX["SCR"]):
row.append("")
row.append(self._filteredDataTime["SCR"][i])
row.append(self._filteredDataX["SCR"][i])
row.append(self._filteredData["SCR"][i])
else:
iteration += 1
row.append("")
row.append("")
row.append("")
row.append("")
else:
if i < len(self._dataSegmentX["SCR"]):
row.append("")
row.append(self._dataSegmentTime["SCR"][i])
row.append(self._dataSegmentX["SCR"][i])
row.append(self._dataSegment["SCR"][i])
else:
iteration += 1
row.append("")
row.append("")
row.append("")
row.append("")
#RR
if "FILTER" in self._modes["RR"]:
if i < len(self._filteredDataX["RR"]):
row.append("")
row.append(self._filteredDataTime["RR"][i])
row.append(self._filteredDataX["RR"][i])
row.append(self._filteredData["RR"][i])
else:
iteration += 1
row.append("")
row.append("")
row.append("")
row.append("")
else:
if i < len(self._dataSegmentX["RR"]):
row.append("")
row.append(self._dataSegmentTime["RR"][i])
row.append(self._dataSegmentX["RR"][i])
row.append(self._dataSegment["RR"][i])
else:
iteration += 1
row.append("")
row.append("")
row.append("")
row.append("")
#HR
if "INTERPOLATE" in self._modes["HR"]:
if i < len(self._interpolatedDataX["HR"]):
row.append("")
row.append("")
row.append(self._interpolatedDataX["HR"][i])
row.append(self._interpolatedData["HR"][i])
else:
iteration += 1
row.append("")
row.append("")
row.append("")
row.append("")
else:
if i < len(self._dataSegmentX["HR"]):
row.append("")
row.append(self._dataSegmentTime["HR"][i])
row.append(self._dataSegmentX["HR"][i])
row.append(self._dataSegment["HR"][i])
else:
iteration += 1
row.append("")
row.append("")
row.append("")
row.append("")
#SST
if "FILTER" in self._modes["SST"]:
if i < len(self._filteredDataX["SST"]):
row.append("")
row.append(self._filteredDataTime["SST"][i])
row.append(self._filteredDataX["SST"][i])
row.append(self._filteredData["SST"][i])
else:
iteration += 1
row.append("")
row.append("")
row.append("")
row.append("")
else:
if i < len(self._dataSegmentX["SST"]):
row.append("")
row.append(self._dataSegmentTime["SST"][i])
row.append(self._dataSegmentX["SST"][i])
row.append(self._dataSegment["SST"][i])
else:
iteration += 1
row.append("")
row.append("")
row.append("")
row.append("")
#Buttons
if i < len(self._yellowButton):
row.append(self._yellowButton[i])
else:
row.append("")
if i < len(self._redButton):
row.append(self._redButton[i])
else:
row.append("")
writer.writerow(row)
row = []
i += 1
plt.show(block=True)
def addData(self, data, title):
if self._firstPlot:
self._redButton.append(str(time.localtime().tm_hour)+":"+
str(time.localtime().tm_min)+":"+
str(time.localtime().tm_sec))
self._firstPlot = False
#Take raw data
self._dataSegment[title].append(data)
self._dataSegmentX[title].append(len(self._dataSegment[title]))
self._dataSegmentTime[title].append(str(time.localtime().tm_hour)+":"+
str(time.localtime().tm_min)+":"+
str(time.localtime().tm_sec))
#filter mode
if "FILTER" in self._modes[title] and len(self._dataSegment[title]) % self._windowSize == 0:
self._filteredDataX[title].append(len(self._filteredData[title]))
self._filteredData[title].append(processFilter(self._dataSegment[title][-10:]))
self._filteredDataTime[title].append(str(time.localtime().tm_hour)+":"+
str(time.localtime().tm_min)+":"+
str(time.localtime().tm_sec))
if "CHANGERATE" in self._modes[title]:
if "FILTER" in self._modes[title]:
dataX = self._filteredDataX[title]
data = self._filteredData[title]
else:
dataX = self._dataSegmentX[title]
data = self._dataSegment[title]
if len(data)>5 and (("FILTER" in self._modes[title] and len(self._dataSegment[title]) % self._windowSize == 0) or not "FILTER" in self._modes[title]):
currentIndex = dataX[-1]-3
self._changeRateDataX[title].append(len(self._changeRateData[title]))
self._changeRateData[title].append((-1*data[currentIndex+2]+
8*data[currentIndex+1]-
8*data[currentIndex-1]+
data[currentIndex-2])/(12*PERIOD))
self._changeRateDataTime[title].append(str(time.localtime().tm_hour)+":"+
str(time.localtime().tm_min)+":"+
str(time.localtime().tm_sec))
if "INTERPOLATE" in self._modes[title] and len(self._dataSegment[title]) > 5:
tck = interpolate.splrep(self._dataSegmentX[title], self._dataSegment[title], s=0)
self._interpolatedDataX[title] = np.arange(0,len(self._dataSegment[title]),len(self._dataSegment[title])/(len(self._dataSegment[title])*12.5))
self._interpolatedData[title] = interpolate.splev(self._interpolatedDataX[title], tck, der=0)
self._plotGraph(title)
def _clearData(self):
self._dataSegment = {"SCR":[],"RR":[], "HR":[], "SST":[]} #raw data
self._filteredData = {"SCR":[],"RR":[], "HR":[], "SST":[]} #filtered data
self._changeRateData = {"SCR":[],"RR":[], "HR":[], "SST":[]} #change rate data
self._interpolatedData = {"SCR":[],"RR":[], "HR":[], "SST":[]} #interpolated data
self._dataSegmentX = {"SCR":[],"RR":[], "HR":[], "SST":[]} #raw data X axis
self._filteredDataX = {"SCR":[],"RR":[], "HR":[], "SST":[]} #filtered data X axis
self._changeRateDataX = {"SCR":[],"RR":[], "HR":[], "SST":[]} #change rate data X axis
self._interpolatedDataX = {"SCR":[],"RR":[], "HR":[], "SST":[]} #interpolated data X axis
self._usedData = {"SCR":[],"RR":[], "HR":[], "SST":[]}
self._usedDataX = {"SCR":[],"RR":[], "HR":[], "SST":[]}
def _plotGraph(self,title):
#choose what data to use
if self._modes[title] == [] or "INTERPOLATE" in self._modes[title]:
self._usedDataX[title] = self._dataSegmentX[title]
self._usedData[title] = self._dataSegment[title]
elif "CHANGERATE" in self._modes[title]:
self._usedDataX[title] = self._changeRateDataX[title]
self._usedData[title] = self._changeRateData[title]
elif "FILTER" in self._modes[title]:
self._usedDataX[title] = self._filteredDataX[title]
self._usedData[title] = self._filteredData[title]
#update graph width
if len(self._usedData[title])>0 and self._usedDataX[title][-1] >= self._currentWidth:
widthBefore = self._currentWidth
self._currentWidth += GRAPHWIDTH
self._axSCR.axis([widthBefore, self._currentWidth, -4096, 4096])
#plotting
points = self._axSCR.plot(self._usedDataX[title], self._usedData[title], Colour[title])[0]
# restore background
self._SCR.canvas.restore_region(self._background)
# redraw just the points
self._axSCR.draw_artist(points)
# fill in the axes rectangle
self._SCR.canvas.blit(self._axSCR.bbox)
self._background = self._SCR.canvas
'''
#test
p = Plotter({"SCR":["FILTER","CHANGERATE"],"RR":["FILTER"], "HR":["INTERPOLATE"], "SST":["FILTER"]})
print "Initialised"
for i in range(100):
tstart = time.time()
p.addData(10*i+100*random.random(), "SCR")
p.addData(i+100*random.random(), "RR")
p.addData(40*i+100*random.random(), "HR")
p.addData(1000-50*i+100*random.random(), "SST")
#plt.pause(PERIOD)
print time.time()-tstart
p.stopPlotting()
'''
| 0xsuu/Pi_Polygraph | PlotGraph.py | PlotGraph.py | py | 14,362 | python | en | code | 0 | github-code | 13 |
9524359393 | # 导包
from typing import List
import sortingx
# 测试
class Reverser:
def __init__(self, genre: str, method: List[str], part: List[bool]=None, ret: List[bool]=None):
'''
genre: 类型名称; method: 方法列表; part: 方法是否含参数l与r; ret: 函数是否有返回值
'''
if part != None:
assert len(part) == len(method)
if ret != None:
assert len(ret) == len(method)
self.method = 'sortingx.' + genre
self.call = method # genre.def
self.part = part
self.ret = ret
def __data(self):
with open('./data/test.txt', 'rt', encoding='utf-8') as fout:
strings = fout.readlines()
self.data = [string.strip('\n') for string in strings]
self.testing = [self.data for _ in range(len(self.call))]
def __compare(self, func, index, nopart, noret):
'''
比较函数。
'''
self.count = 0
for value in self.testing[index]:
value = eval(value)
result = sorted(value, reverse=True)
if nopart and noret:
func(value, reverse=True)
self.count += (value == result)
if not nopart and noret:
func(value, 0, len(value) - 1, reverse=True)
self.count += (value == result)
if nopart and not noret:
value = func(value, reverse=True)
self.count += (value == result)
if not nopart and not noret:
value = func(value, 0, len(value) - 1, reverse=True)
self.count += (value == result)
def collect(self) -> str:
'''
收集测试结果。
'''
self.success = 0
self.__data()
for index, value in enumerate(self.call):
func = eval(self.method + '.' + value)
nopart = self.part == None or self.part[index] == False
noret = self.ret == None or self.ret[index] == False
self.__compare(func, index, nopart, noret)
if self.count == len(self.data):
self.success += 1
succ_fal = self.success == len(self.call)
return self.method, succ_fal
# 测试实例1
'''
Bubble = Reverser('bubble', ['normal', 'flag', 'bidirect'])
print(Bubble.collect())
'''
# 测试实例2
'''
Counting = Reverser('counting', ['whilediv', 'forenum', 'reverfill'], ret=[False, False, True])
print(Counting.collect())
'''
# 测试实例3
'''
Bucket = Reverser('bucket', ['numeric', 'mobase'])
print(Bucket.collect())
'''
# 测试实例4
'''
Insertion = Reverser('insertion', ['direct', 'binary'])
print(Insertion.collect())
'''
# 测试实例5
'''
Merge = Reverser('merge', ['recur', 'stack'], ret=[True, False])
print(Merge.collect())
'''
# 测试实例6
'''
Quick = Reverser('quick', ['lamb', 'recur', 'stack'], part=[False, True, True], ret=[True, False, False])
print(Quick.collect())
'''
# 测试实例7
'''
Selection = Reverser('selection', ['normal', 'withmax'])
print(Selection.collect())
'''
# 测试实例8
'''
Shell = Reverser('shell', ['donald', 'knuth', 'hibbard', 'sedgewick'])
print(Shell.collect())
'''
# 测试实例9
'''
Radix = Reverser('radix', ['lsd', 'msd'], ret=[True, True])
print(Radix.collect())
'''
# 测试实例10
'''
Heap = Reverser('heap', ['normal', 'recur'])
print(Heap.collect())
'''
# 测试实例11
'''
Tim = Reverser('tim', ['normal', 'binary'])
print(Tim.collect())
'''
# 测试实例12
'''
Tree = Reverser('tree', ['tree'], ret=[True])
print(Tree.collect())
''' | linjing-lab/sorting-algorithms | test_reverse.py | test_reverse.py | py | 3,585 | python | en | code | 4 | github-code | 13 |
41256124036 | n = 0
cont = 0
soma = 0
# quando as variáveis são iguais pode colocar tudo na mesma linha
# n = cont = soma = 0
n = int(input('Digite um número [999 para parar] :')) #flag
while n != 999:
soma += n
cont += 1
n = int(input('Digite um número [999 para parar] :'))#flag. não entra na soma
print('Vc digitou {} números e a soma é {}'.format(cont, soma))
| pfontaneda/Exercicios-Python | ex python/pythonProject1/ex064.py | ex064.py | py | 369 | python | pt | code | 0 | github-code | 13 |
74319488659 | import httpx
from ..exceptions import InfoGatherError
class PagureClient:
def __init__(self, baseurl):
self.baseurl = f"{baseurl}/api/0/"
async def _get(self, endpoint, **kwargs):
async with httpx.AsyncClient() as client:
response = await client.get(self.baseurl + endpoint, **kwargs)
return response
def _check_errors(self, response):
if response.status_code == 404:
raise InfoGatherError(f"Issue querying Pagure: {response.json().get('error')}")
elif response.status_code != 200:
raise InfoGatherError(
f"Issue querying Pagure: {response.status_code}: {response.reason_phrase}"
)
async def get_issue(self, project, issue_id, namespace=None, params=None):
response = await self._get(
"/".join(filter(None, [namespace, project, "issue", issue_id])),
params=params,
)
self._check_errors(response)
return response.json()
async def get_project(self, project, namespace=None, params=None):
response = await self._get(
"/".join(filter(None, [namespace, project])),
params=params,
)
self._check_errors(response)
return response.json()
| fedora-infra/maubot-fedora | fedora/clients/pagure.py | pagure.py | py | 1,272 | python | en | code | 0 | github-code | 13 |
17034002894 | from __future__ import print_function
import functools
import json
import os
import sys
from os.path import expanduser
from typing import Any
import six
from pathlib2 import Path
from pyparsing import (
ParseFatalException,
ParseException,
RecursiveGrammarException,
ParseSyntaxException,
)
from clearml_agent.external import pyhocon
from clearml_agent.external.pyhocon import ConfigTree, ConfigFactory
from .defs import (
Environment,
DEFAULT_CONFIG_FOLDER,
LOCAL_CONFIG_PATHS,
ENV_CONFIG_PATHS,
LOCAL_CONFIG_FILES,
LOCAL_CONFIG_FILE_OVERRIDE_VAR,
ENV_CONFIG_PATH_OVERRIDE_VAR,
)
from .defs import is_config_file
from .entry import Entry, NotSet
from .errors import ConfigurationError
from .log import initialize as initialize_log, logger
from .utils import get_options
try:
from typing import Text
except ImportError:
# windows conda-less hack
Text = Any
log = logger(__file__)
class ConfigEntry(Entry):
logger = None
def __init__(self, config, *keys, **kwargs):
# type: (Config, Text, Any) -> None
super(ConfigEntry, self).__init__(*keys, **kwargs)
self.config = config
def _get(self, key):
# type: (Text) -> Any
return self.config.get(key, NotSet)
def error(self, message):
# type: (Text) -> None
log.error(message.capitalize())
class Config(object):
"""
Represents a server configuration.
If watch=True, will watch configuration folders for changes and reload itself.
NOTE: will not watch folders that were created after initialization.
"""
# used in place of None in Config.get as default value because None is a valid value
_MISSING = object()
extra_config_values_env_key_sep = "__"
extra_config_values_env_key_prefix = [
"CLEARML_AGENT" + extra_config_values_env_key_sep,
]
def __init__(
self,
config_folder=None,
env=None,
verbose=True,
relative_to=None,
app=None,
is_server=False,
**_
):
self._app = app
self._verbose = verbose
self._folder_name = config_folder or DEFAULT_CONFIG_FOLDER
self._roots = []
self._config = ConfigTree()
self._env = env or os.environ.get("TRAINS_ENV", Environment.default)
self.config_paths = set()
self.is_server = is_server
self._overrides_configs = None
if self._verbose:
print("Config env:%s" % str(self._env))
if not self._env:
raise ValueError(
"Missing environment in either init of environment variable"
)
if self._env not in get_options(Environment):
raise ValueError("Invalid environment %s" % env)
if relative_to is not None:
self.load_relative_to(relative_to)
@property
def root(self):
return self.roots[0] if self.roots else None
@property
def roots(self):
return self._roots
@roots.setter
def roots(self, value):
self._roots = value
@property
def env(self):
return self._env
def logger(self, path=None):
return logger(path)
def load_relative_to(self, *module_paths):
def normalize(p):
return Path(os.path.abspath(str(p))).with_name(self._folder_name)
self.roots = list(map(normalize, module_paths))
self.reload()
def _reload(self):
env = self._env
config = self._config.copy()
if self.is_server:
env_config_paths = ENV_CONFIG_PATHS
else:
env_config_paths = []
env_config_path_override = ENV_CONFIG_PATH_OVERRIDE_VAR.get()
if env_config_path_override:
env_config_paths = [expanduser(env_config_path_override)]
# merge configuration from root and other environment config paths
if self.roots or env_config_paths:
config = functools.reduce(
lambda cfg, path: ConfigTree.merge_configs(
cfg,
self._read_recursive_for_env(path, env, verbose=self._verbose),
copy_trees=True,
),
self.roots + env_config_paths,
config,
)
# merge configuration from local configuration paths
if LOCAL_CONFIG_PATHS:
config = functools.reduce(
lambda cfg, path: ConfigTree.merge_configs(
cfg,
self._read_recursive(path, verbose=self._verbose),
copy_trees=True,
),
LOCAL_CONFIG_PATHS,
config,
)
local_config_files = LOCAL_CONFIG_FILES
local_config_override = LOCAL_CONFIG_FILE_OVERRIDE_VAR.get()
if local_config_override:
local_config_files = [expanduser(local_config_override)]
# merge configuration from local configuration files
if local_config_files:
config = functools.reduce(
lambda cfg, file_path: ConfigTree.merge_configs(
cfg,
self._read_single_file(file_path, verbose=self._verbose),
copy_trees=True,
),
local_config_files,
config,
)
config = ConfigTree.merge_configs(
config, self._read_extra_env_config_values(), copy_trees=True
)
config = self.resolve_override_configs(config)
config["env"] = env
return config
def resolve_override_configs(self, initial=None):
if not self._overrides_configs:
return initial
return functools.reduce(
lambda cfg, override: ConfigTree.merge_configs(cfg, override, copy_trees=True),
self._overrides_configs,
initial or ConfigTree(),
)
def _read_extra_env_config_values(self) -> ConfigTree:
""" Loads extra configuration from environment-injected values """
result = ConfigTree()
for prefix in self.extra_config_values_env_key_prefix:
keys = sorted(k for k in os.environ if k.startswith(prefix))
for key in keys:
path = (
key[len(prefix) :]
.replace(self.extra_config_values_env_key_sep, ".")
.lower()
)
result = ConfigTree.merge_configs(
result, ConfigFactory.parse_string("{}: {}".format(path, os.environ[key]))
)
return result
def replace(self, config):
self._config = config
def reload(self):
self.replace(self._reload())
def initialize_logging(self, debug=False):
logging_config = self._config.get("logging", None)
if not logging_config:
return False
# handle incomplete file handlers
deleted = []
handlers = logging_config.get("handlers", {})
for name, handler in list(handlers.items()):
cls = handler.get("class", None)
is_file = cls and "FileHandler" in cls
if cls is None or (is_file and "filename" not in handler):
deleted.append(name)
del handlers[name]
elif is_file:
file = Path(handler.get("filename"))
if not file.is_file():
file.parent.mkdir(parents=True, exist_ok=True)
file.touch()
# remove dependency in deleted handlers
root_logger = logging_config.get("root", None)
loggers = list(logging_config.get("loggers", {}).values()) + (
[root_logger] if root_logger else []
)
for logger in loggers:
handlers = logger.get("handlers", None)
if debug:
logger['level'] = 'DEBUG'
if not handlers:
continue
logger["handlers"] = [h for h in handlers if h not in deleted]
extra = None
if self._app:
extra = {"app": self._app}
initialize_log(logging_config, extra=extra)
return True
def __getitem__(self, key):
try:
return self._config[key]
except:
return None
def __getattr__(self, key):
c = self.__getattribute__('_config')
if key.split('.')[0] in c:
try:
return c[key]
except Exception:
return None
return getattr(c, key)
def get(self, key, default=_MISSING):
value = self._config.get(key, default)
if value is self._MISSING and not default:
raise KeyError(
"Unable to find value for key '{}' and default value was not provided.".format(
key
)
)
return value
def put(self, key, value):
self._config.put(key, value)
def pop(self, key, default=None):
return self._config.pop(key, default=default)
def to_dict(self):
return self._config.as_plain_ordered_dict()
def as_json(self):
return json.dumps(self.to_dict(), indent=2)
def _read_recursive_for_env(self, root_path_str, env, verbose=True):
root_path = Path(root_path_str)
if root_path.exists():
default_config = self._read_recursive(
root_path / Environment.default, verbose=verbose
)
if (root_path / env) != (root_path / Environment.default):
env_config = self._read_recursive(
root_path / env, verbose=verbose
) # None is ok, will return empty config
config = ConfigTree.merge_configs(default_config, env_config, True)
else:
config = default_config
else:
config = ConfigTree()
return config
def _read_recursive(self, conf_root, verbose=True):
conf = ConfigTree()
if not conf_root:
return conf
conf_root = Path(conf_root)
if not conf_root.exists():
if verbose:
print("No config in %s" % str(conf_root))
return conf
if verbose:
print("Loading config from %s" % str(conf_root))
for root, dirs, files in os.walk(str(conf_root)):
rel_dir = str(Path(root).relative_to(conf_root))
if rel_dir == ".":
rel_dir = ""
prefix = rel_dir.replace("/", ".")
for filename in files:
if not is_config_file(filename):
continue
if prefix != "":
key = prefix + "." + Path(filename).stem
else:
key = Path(filename).stem
file_path = str(Path(root) / filename)
conf.put(key, self._read_single_file(file_path, verbose=verbose))
return conf
@staticmethod
def _read_single_file(file_path, verbose=True):
if not file_path or not Path(file_path).is_file():
return ConfigTree()
if verbose:
print("Loading config from file %s" % file_path)
try:
return pyhocon.ConfigFactory.parse_file(file_path)
except ParseSyntaxException as ex:
msg = "Failed parsing {0} ({1.__class__.__name__}): (at char {1.loc}, line:{1.lineno}, col:{1.column})".format(
file_path, ex
)
six.reraise(
ConfigurationError,
ConfigurationError(msg, file_path=file_path),
sys.exc_info()[2],
)
except (ParseException, ParseFatalException, RecursiveGrammarException) as ex:
msg = "Failed parsing {0} ({1.__class__.__name__}): {1}".format(
file_path, ex
)
six.reraise(ConfigurationError, ConfigurationError(msg), sys.exc_info()[2])
except Exception as ex:
print("Failed loading %s: %s" % (file_path, ex))
raise
def set_overrides(self, *dicts):
""" Set several override dictionaries or ConfigTree objects which should be merged onto the configuration """
self._overrides_configs = [
d if isinstance(d, ConfigTree) else pyhocon.ConfigFactory.from_dict(d) for d in dicts
]
self.reload()
| allegroai/clearml-agent | clearml_agent/backend_config/config.py | config.py | py | 12,445 | python | en | code | 205 | github-code | 13 |
3659583629 | with open("input1.txt") as f:
lines = f.readlines();
lo = 1
hi = 4
chars = lines[0]
d = {chars[0]: 1, chars[1]: 2,chars[2]: 1}
while hi < len(chars):
if chars[hi] in d:
d[chars[hi]] += 1
else:
d[chars[hi]] = 1
if chars[lo - 1] in d:
d[chars[lo - 1]] -= 1
if d[chars[lo - 1]] == 0:
del d[chars[lo - 1]]
else:
print("what")
if len(d.keys()) == 4:
break
lo += 1
hi += 1
print(hi + 1) | felixab/AoC2022 | day_6/p_1.py | p_1.py | py | 553 | python | en | code | 0 | github-code | 13 |
2692692723 | import os
import requests
import dicttoxml
from collections import OrderedDict
from xml.dom.minidom import parseString
from bs4 import BeautifulSoup
from operator import itemgetter
from msedge.selenium_tools import Edge, EdgeOptions
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.common.exceptions import TimeoutException
#get university data from website
class SampleICU:
#initialize class
def __init__(self):
self.link = 'https://www.4icu.org/cd/' #web page link
self.options = EdgeOptions()
self.options.use_chromium = True
self.driver = Edge(options=self.options)
self.driver.get(self.link)
self.page = None
self.soup = None
self.faculties = list()
self.universities = list()
self.keys = ["rank","french_name","location","faculties","postal_code","telephone","description","name","acronym","founded"]
#web scarpe html and append university faculty data to list
def extract_faculties(self):
try:
table = self.soup.find_all('table')
paragraphs = table[len(table) - 1].find_all('p')
if len(paragraphs) != 2: return None #number of elements in factories div
else: return [x.getText() for x in paragraphs[1].find_all('strong')]
except:
print("Error occured, Could not extract faculty at " + url)
return None
#extract acronym, university name and year founded from table. Index identifies data to be retrieved
def extract_university_information(self,index):
try:
tables = self.soup.find_all(class_='table')
items = tables[0].find_all('tr')
name = items[index].find('strong').getText()
return name
except:
print("Error occured, extract_university_acronym()")
return None
#extract postal code, telephone and description
def extract_university_location(self):
postal_code = self.soup.find('span',itemprop='streetAddress')
telephone = self.soup.find('span',itemprop='telephone')
description = self.soup.find('p',itemprop='description')
try: return [postal_code.getText(),telephone.getText(),description.getText()]
except: return None
#find element by link name, click on link and change pages
def navigate_to_page(self,name):
timeout = 10 #seconds
try:
element = WebDriverWait(self.driver,timeout).until(
EC.presence_of_element_located((By.LINK_TEXT,name))
)
element.click()
return True
except:
print("Error occured, driver quit. Could not navigate to " + name + " web page")
return False
#navigate to uni page and scrape extract sample data
def get_university_data(self,university):
if not self.navigate_to_page(university[1]): self.driver.quit()
else:
self.page = requests.get(self.driver.current_url)
self.soup = BeautifulSoup(self.page.content,'html.parser')
university.append(self.extract_faculties())
university.extend(self.extract_university_location())
university.extend([self.extract_university_information(x) for x in range(1,4)]) #web scrape table information
self.driver.back()
return university
#check if university is ranked
def is_ranked(self,uni):
return True
#if uni[0].getText().isnumeric(): return True
#else: return False
#get text of ranked university
def ranked(self,rankings):
return [rankings[t].getText() for t in range(len(rankings))]
#for all ranked rows in table getText
def get_table_ranks(self,table_rank):
return [self.ranked(table_rank[x].find_all('td')) for x in range(2,10) if self.is_ranked(table_rank[x].find_all('td'))]
#return [self.ranked(table_rank[x].find_all('td')) for x in range(2,len(table_rank) - 1) if self.is_ranked(table_rank[x].find_all('td'))]
#web scrape universities rank data
def extract_ranked_universities(self,link):
self.page = requests.get(link)
self.soup = BeautifulSoup(self.page.content,'html.parser')
table_rank = self.soup.find_all('tr')
ranked = self.get_table_ranks(table_rank)
return ranked
#match value in array with key
def match_key_value(self,unis):
data = {}
for count,value in enumerate(self.keys): data[value] = unis[count]
return data
#return set of faculties in each university
def add_faculties(self,faculties):
try:
facts = faculties['faculties']
if facts:
for f in list(facts): self.faculties.append(f)
except:
print("Error occured, could not extract faculties.")
#write all data to xml files
def write_file_xml(self,data,filename):
xml = dicttoxml.dicttoxml(data)
dom = parseString(xml).toprettyxml()
try:
with open(filename,'w') as f:
f.write(dom)
except: print("Error occured, failed to write file " + filename)
#get all ranked universities
def extract_uni_data(self):
data = [self.get_university_data(x) for x in self.extract_ranked_universities(self.link)]
return sorted([self.match_key_value(x) for x in data],key=itemgetter('rank'))
#extract all universities data from website
def extract_uni_data_to_xml(self,university,faculty):
self.universities = self.extract_uni_data()
[self.add_faculties(self.universities[x]) for x in range(len(self.universities))]
self.write_file_xml(self.universities,university)
self.write_file_xml({"faculties": sorted(set(self.faculties))},faculty)
#program execute
if __name__ == "__main__":
university = 'universities.xml'
faculty = 'faculties.xml'
sample = SampleICU()
sample.extract_uni_data_to_xml(university,faculty)
sample.driver.quit()
| kingIzi/Syllabus_001 | SyllabusWebSampleData/universities_data.py | universities_data.py | py | 5,851 | python | en | code | 0 | github-code | 13 |
72397736657 | import pandas as pd
import numpy as np
from sklearn.preprocessing import MinMaxScaler
import math
from typing import List
class DataScaler():
model = None
scaler: MinMaxScaler = None
scaled_data = None
train_data = None
train_data_len = 0
dataset: pd.Series = None
x_train = []
y_train = []
x_test = None
y_test = None
interval = 6
def __init__(self, dataset: pd.Series, scaler: MinMaxScaler, interval = 6) -> None:
self.x_train = []
self.x_test = []
self.y_test = []
self.y_train = []
scaled_data = scaler.fit_transform(dataset.values.reshape(-1,1))
self.scaler = scaler
self.dataset = dataset
self.train_data_len = math.ceil(len(dataset.values)* 0.8)
self.scaled_data = scaled_data
self.train_data = scaled_data[0: self.train_data_len, :]
self.interval = interval
def createTrainingData(self):
x_train = []
y_train = []
for i in range(self.interval, len(self.train_data)):
x_train.append(self.train_data[i-self.interval:i, 0])
y_train.append(self.train_data[i, 0])
x_train = np.array(x_train)
y_train = np.array(y_train)
x_train = np.reshape(x_train, (x_train.shape[0], x_train.shape[1], 1))
self.x_train = x_train
self.y_train = y_train
def createTestData(self):
test_data = self.scaled_data[self.train_data_len-self.interval: , : ]
self.y_test = self.dataset[self.train_data_len:]
x_test = []
for i in range(self.interval, len(test_data)):
x_test.append(test_data[i-self.interval:i, 0])
x_test = np.array(x_test)
x_test = np.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
self.x_test = x_test
| bjorger/master-thesis | Models/helper/DataScaler.py | DataScaler.py | py | 1,862 | python | en | code | 0 | github-code | 13 |
72014395859 | from rest_framework import generics, status
from rest_framework.response import Response
from .serializers import SignUpSerializer
from .utils import get_user_token
class SignUpApiView(generics.GenericAPIView):
serializer_class = SignUpSerializer
__doc__ = """
SignUp Api view this api is used to create parcel owner, train operator and post master
params:
username: CharField
password: CharField
first_name: CharField
last_name: CharField
email: CharField
user_type: PARCEL_OWNER/TRAIN_OPERATOR/POST_MASTER
"""
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
if serializer.is_valid(raise_exception=True):
serializer.save()
response_data = get_user_token(request)
response_status = status.HTTP_201_CREATED
else:
response_data = {"message": "Unable to create user"}
response_status = status.HTTP_500_INTERNAL_SERVER_ERROR
return Response(response_data, status=response_status)
| viprathore/Python | mail_service/service/core/views.py | views.py | py | 1,188 | python | en | code | 0 | github-code | 13 |
31910934829 | import streamlit as st
import pandas
st.set_page_config(layout="wide")
col1, empty_col, col2 = st.columns([1.5,.5,1.5])
with col1:
st.image("images/profile_photo.png", width=450)
with col2:
st.title("Scott Caruso")
content = """
One of my greatest passions lies in the realm of technology and software development. As an avid Python enthusiast, I thrive on the art of crafting innovative applications. Whether it's building intuitive user interfaces or implementing intricate algorithms, I immerse myself in the world of coding. The process of transforming ideas into tangible software brings me immense joy and a sense of accomplishment.
"""
st.info(content)
content2 = """
Below you can find some of the apps I have built in Python. Feel free to contact me!"""
st.write(content2)
col3, empty_col,col4 = st.columns([1.5,.5,1.5])
df = pandas.read_csv("data.csv", sep=";")
with col3:
for index, row in df[:10].iterrows():
st.header(row["title"])
st.write(row["description"])
st.image("images/" + row["image"])
st.write("[Source Code](github_link)")
with col4:
for index, row in df[10:].iterrows():
st.header(row["title"])
st.write(row["description"])
st.image("images/" + row["image"])
st.write("[Source Code](github_link)") | ScottCharlie/app2-portfolio | Home.py | Home.py | py | 1,332 | python | en | code | 0 | github-code | 13 |
40439149325 | # Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Tree(object):
"""二叉树"""
def __init__(self):
self.root = None
def add(self, item):
node = TreeNode(item)
if self.root is None:
self.root = node
return
queue = [self.root]
while queue:
cur_node = queue.pop(0)
if cur_node.left is None:
cur_node.left = node
return
else:
queue.append(cur_node.left)
if cur_node.right is None:
cur_node.right = node
return
else:
queue.append(cur_node.right)
class BSTIterator(object):
def __init__(self, root):
"""
:type root: TreeNode
"""
self.nodeStack = []
self.leftBranchIntoStack(root)
def leftBranchIntoStack(self, node):
while (node):
self.nodeStack.append(node)
node = node.left
def next(self):
"""
@return the next smallest number
:rtype: int
"""
node = self.nodeStack.pop()
self.leftBranchIntoStack(node.right)
return node.val
def hasNext(self):
"""
@return whether we have a next smallest number
:rtype: bool
"""
if self.nodeStack:
return True
else:
return False
# class BSTIterator(object):
#
# def __init__(self, root):
# self.stack = []
# while root:
# self.stack.append(root)
# root = root.left
#
# def next(self):
# node = self.stack.pop()
# r = node.right
# while r:
# self.stack.append(r)
# r = r.left
# return node.val
#
# def hasNext(self):
# return len(self.stack) != 0
class BSTIterator:
def __init__(self, root):
self.stack = []
self.inorder(root)
def inorder(self,node):
if node is None:
return
self.inorder(node.left)
self.stack.append(node.val)
self.inorder(node.right)
def next(self):
"""
@return the next smallest number
"""
next = self.stack.pop(0)
return next
def hasNext(self):
"""
@return whether we have a next smallest number
"""
if self.stack == []:
return False
else:
return True
if __name__ == "__main__":
root = Tree()
root.add(15)
root.add(2)
root.add(16)
# root.add(4)
# root.add(5)
print(root.root)
obj = BSTIterator(root.root)
param_1 = obj.next()
print(param_1)
print(obj.next())
param_2 = obj.hasNext()
print(param_2) | parkerdu/leetcode | 173.py | 173.py | py | 2,874 | python | en | code | 0 | github-code | 13 |
4374278969 | import RPi.GPIO as GPIO
import time
from threading import Timer
# ピンの設定
GPIO.setwarnings(False) # 警告を無視
GPIO.setmode(GPIO.BOARD) # ピンの番号は物理ピンの番号を使う
GPIO.setup(10, GPIO.IN, pull_up_down=GPIO.PUD_DOWN) # 10番ピンをプルダウンかつ入力に設定
GPIO.setup(8,GPIO.OUT, initial=GPIO.HIGH) # ピン番号8番を出力に設定する.初期値はHIGH
# LEDをOFFにする処理
def led_off():
GPIO.output(8, GPIO.HIGH)
# ボタンが押されたときに実行する関数
def button_callback(channel):
if(channel==10):
print("Button was pushed!")
GPIO.output(8, GPIO.LOW)
# 1秒後にLEDをOFF
Timer(1, led_off).start()
# 入力ピンの信号の立ち上がりでイベントを発生させる
GPIO.add_event_detect(10,GPIO.RISING,callback=button_callback, bouncetime=1000)
try:
# キーボード入力待ち
message = input("Press enter to quit\n\n")
except KeyboardInterrupt:
print('Keyboard Interrupted')
finally:
# GPIO設定を初期化
GPIO.cleanup()
| nmiri-nagoya-nsaito/iot_training_20181113 | gpio_event.py | gpio_event.py | py | 1,087 | python | ja | code | 0 | github-code | 13 |
22118250648 | """
HOME-MADE TIGHT-BINDING CODE FOR TWISTED BILAYER GRAPHENE
Contains all relevant constants, functions and classes.
Is meant to be imported as a module in other python scripts.
"""
import numpy as np #arrays
import matplotlib.pyplot as plt #plotting
import matplotlib #plot settings
from numba import jit #just-in-time compiled code (faster code).
from scipy import sparse #sparse matrices (efficient data storage)
from scipy import linalg #linear algebra routines for small matrices
from scipy import constants #convenient shortcut for physical constants (hbar, e, c ...)
from scipy.sparse import linalg as sparse_linalg #linear algebra for big sparse matrices
np.set_printoptions(linewidth=200,suppress=True) #increase "print" linewidth and suppress scientific notation
#plot settings
matplotlib.rc('text',usetex=True)
plt.rcParams['figure.figsize'] = (4.9,3.5)
plt.rcParams['font.size'] = 11.0
plt.rcParams['font.family'] = 'serif'
plt.rcParams['font.serif'] = 'Palatino'
plt.rcParams['axes.titlesize'] = 'medium'
plt.rcParams['figure.titlesize'] = 'medium'
plt.rcParams['text.usetex'] = True
plt.rcParams['figure.dpi'] = 200 #to preview plot in jupyter
plt.rcParams['figure.autolayout'] = True
#twist angle of commensurable structures, as a function of a pair of integers p,q
twist_angle=lambda p,q: np.arccos((3*p**2+3*p*q+q**2/2)/(3*p**2+3*p*q+q**2))
#number of positions inside unit cell per sublayer: N_\ell (notation I used in my TG)
number_of_sublattice_positions=lambda p,q: p**2+p*q+q**2//3 if q%3==0 else 3*p**2+3*p*q+q**2
#rotation matrix in xy plane (the plane of the material
rot=lambda theta: np.array([[np.cos(theta),-np.sin(theta),0.0],[np.sin(theta),np.cos(theta),0.0],[0.0,0.0,1.0]])
#magnetic flux quantum (2.06e-15)
phi0=constants.h/(2*constants.e)
def superlattice(p,q):
"""
Given the pair of integers p,q, return the correspondent crystal (basis r + primitive vectors L1,L2):
r, L1, L2 = superlattice(p,q)
"""
a=2.46 #Angstrom
s3=np.sqrt(3)
a1=a*np.array([s3/2,-1/2,0.0])
a2=a*np.array([s3/2,1/2,0.0])
def positions(i11,i12,i21,i22):
A=[]
D=i11*i22-i21*i12
ms=(0,i11,i21,i11+i21)
ns=(0,i12,i22,i12+i22)
for m in range(min(ms),max(ms)+1):
for n in range(min(ns),max(ns)+1):
mm=(m*i22-i21*n)/D
nn=(i11*n-m*i12)/D
if 0<=mm<1 and 0<=nn<1:
A.append(m*a1+n*a2)
B=[p+(a1+a2)/3 for p in A]
return A+B
if q%3==0:
i11=p+q//3
i12=q//3
i21=-q//3
i22=p+2*q//3
else:
i11=p
i12=p+q
i21=-p-q
i22=2*p+q
L1=i11*a1+i12*a2
L2=i21*a1+i22*a2
r1=positions(i11,i12,i21,i22)
Mac=np.array([[a1[0],a2[0],0.0],[a1[1],a2[1],0.0],[0.0,0.0,1.0]])
Mca=np.linalg.inv(Mac)
i11r,i12r,_=np.round(Mca@rot(-twist_angle(p,q))@Mac@[i11,i12,0]).astype('int')
i21r,i22r,_=np.round(Mca@rot(-twist_angle(p,q))@Mac@[i21,i22,0]).astype('int')
r2=positions(i11r,i12r,i21r,i22r)@rot(twist_angle(p,q)).T + np.array([0.0,0.0,3.35])
return np.concatenate((r1,r2)),L1,L2 #r,L1,L2
@jit(nopython=True)
def hopping_max_index(L1,L2,max_distance):
"""
This function uses some analytic geometry to calculate the maximum index necessary for iteration
in the hoppings function below, for a diamond shape ("losango regular") unit cell.
"""
M=-1 #maximum index of iteration
min_dist=-1.
while min_dist<max_distance:
M+=1
min_dist=np.inf
for O in [-M*(L1+L2),-M*L1+(M+1)*L2,(M+1)*L1-M*L2,(M+1)*(L1+L2)]:
for v in [L1,L2]:
for P in [0*L1,L1,L2,L1+L2]:
parameter=np.dot(v,P-O)/np.dot(v,v)
dist=np.linalg.norm(O+parameter*v-P)
if min_dist>dist:
min_dist=dist
return M
@jit(nopython=True)
def hoppings(r,L1,L2,nhops,max_distance):
"""
Only looks for i<j hoppings. Returns (:,8) array.
hops = hoppings(r,L1,L2,nhops,max_distance)
hops[:,0] #rows
hops[:,1] #columns
hops[:,2:5] #initial position vectors
hops[:,5::] #final position vectors
"""
hops=np.zeros((nhops,8))
ix=0 #keep track of the number of hoppings
M=hopping_max_index(L1,L2,max_distance)
for i in range(r.shape[0]):
for j in range(i+1,r.shape[0]):
for m in range(-M,M+1):
for n in range(-M,M+1):
if np.linalg.norm(r[j,:]-r[i,:]+m*L1+n*L2)<max_distance:
hops[ix,0] = i
hops[ix,1] = j
hops[ix,2:5] = r[i,:]
hops[ix,5:8] = r[j,:] + m*L1+n*L2
ix+=1
return hops[0:ix]
def hoppings_onsite(L1,L2,max_distance):
"""Generate hopping from a site to itself and its periodic repetitions. Returns (nhops,3) array."""
M=hopping_max_index(L1,L2,max_distance)
lat_vecs=[]
for m in range(-M,M+1):
for n in range(-M,M+1):
R=m*L1+n*L2
if np.linalg.norm(R)<max_distance:
lat_vecs.append(R)
return np.array(lat_vecs)
distances_intra=[x*2.46 for x in [0.01,0.58,1.01,1.16,1.53,1.74,2.01,2.09,2.31]]
def t_intra(x1,y1,x2,y2):
"""
Intralayer hoppings of graphene.
(x1,y1) and (x2,y2) are the initial and final positions, respectively.
"""
a=2.46 #Angstrom. Lattice constant of graphene
x=x2-x1
y=y2-y1
r=np.sqrt(x**2+y**2)/a
t=np.zeros_like(r)
t[(0.3 < r) * (r < 0.8 )] = -2.8922 #1st neighbors = 0.5774
t[(0.8 < r) * (r < 1.1 )] = 0.2425 #2nd neighbors = 1.0
t[(1.1 < r) * (r < 1.3 )] = -0.2656 #3rd neighbors = 1.1547
t[(1.3 < r) * (r < 1.6 )] = 0.0235 #4th neighbors = 1.5275
t[(1.6 < r) * (r < 1.8 )] = 0.0524 #5th neighbors = 1.7321
t[(1.8 < r) * (r < 2.05)] = -0.0209 #6th neighbors = 2.0
t[(2.05 < r) * (r < 2.1 )] = -0.0148 #7th neighbors = 2.0817
t[(2.1 < r) * (r < 2.35)] = -0.0211 #8th neighbors = 2.3094
return t
def t_inter(x1,y1,x2,y2,l1,l2,theta):
"""
Interlayer hoppings of graphene.
l1 and l2 are the sublayers associated to the initial (x1,y1) and final (x2,y2) positions, respectively.
theta is the twist angle.
"""
a=2.46 #Angstrom. Lattice constant of graphene
x=x2-x1
y=y2-y1
r=np.sqrt(x**2+y**2)
l0, l3, l6 = 0.3155, -0.0688, -0.0083
xi0, xi3, xi6 = 1.7543, 3.4692, 2.8764
x3, x6 = 0.5212, 1.5206
k0, k6 = 2.0010, 1.5731
rn=r/a
V0=l0 * np.exp(-xi0 * rn**2) * np.cos(k0*rn)
V3=l3 * rn**2 * np.exp(-xi3 * (rn-x3)**2)
V6=l6 * np.exp(-xi6 * (rn-x6)**2) * np.sin(k6 * rn)
c3 = lambda x: 4*x**3-3*x # cosseno do arco triplo
c6 = lambda x: 32*x**6-48*x**4+18*x**2-1 # cosseno do arco sextuplo
xx=np.nan_to_num(x/r) #cosseno direcional na direção x
yy=np.nan_to_num(y/r) #cosseno direcional na direção y
cos3=l1*c3(xx)-l2*c3(xx*np.cos(theta)+yy*np.sin(theta)) #termo chato que muda de sinal dependendo da sub-rede
cos6=c6(xx)+c6(xx*np.cos(theta)+yy*np.sin(theta)) #termo par, não depende da sub-rede
return V0+V3*cos3+V6*cos6
def hopping_parameters(hops,hops_onsite,N,theta,t_intra,t_inter):
"""
Calculates off-site (t) and on-site (t_onsite) hopping parameters t(r).
"""
intra=np.abs(hops[:,4]-hops[:,7])==0.0
inter=np.invert(intra)
l1=np.array([1 if i<N else -1 for i in hops[inter,0]])
l2=np.array([1 if 2*N<=j<3*N else -1 for j in hops[inter,1]])
t=np.zeros(hops.shape[0])
t[intra]=t_intra(hops[intra,2],hops[intra,3],hops[intra,5],hops[intra,6])
t[inter]=t_inter(hops[inter,2],hops[inter,3],hops[inter,5],hops[inter,6],l1,l2,theta)
t_onsite=t_intra(0,0,hops_onsite[:,0],hops_onsite[:,1])
return t,t_onsite
def hamiltonian(hops,hops_onsite,t,t_onsite,N,interlayer=0.0,V=0.0):
"""
Hamiltonian matrix H(k) as a function of a point k=(kx,ky,kz) in reciprocal space.
"""
#off-diag hoppings
i=hops[:,0].astype('int')
j=hops[:,1].astype('int')
r1=hops[:,2:5]
r2=hops[:,5:8]
tt=np.copy(t)
inter=np.abs(hops[:,4]-hops[:,7])!=0.0
tt[inter]=tt[inter]*interlayer
#onsite hoppings
I4N=sparse.eye(4*N)
#electric-field
Vdiag=V*np.concatenate((np.ones(2*N),-np.ones(2*N)))
Vmat=sparse.diags(Vdiag)
#full matrix
def H(k):
data=tt*np.exp(1j*(r2-r1)@k)
off_diag=sparse.coo_matrix((data, (i, j)),shape=(4*N,4*N))
off_diag.eliminate_zeros()
onsite=I4N * np.sum(t_onsite * np.exp(1j*hops_onsite@k))
return onsite + off_diag + off_diag.getH() + Vmat/2
return H
def eigenenergies(Hk):
"""
Only eigenenergies, for small matrices.
"""
return linalg.eigh(Hk,eigvals_only=True)
def eigenenergies_sparse(Hk,nbands,Ef):
"""
Only eigenenergies, for big sparse matrices (scipy.sparse).
"""
e=sparse_linalg.eigsh(Hk,k=nbands,sigma=Ef,return_eigenvectors=False)
e.sort()
return e
def eigenstates(Hk):
"""
Calculates eigenstates, for small matrices.
"""
return linalg.eigh(Hk,eigvals_only=False)
def eigenstates_sparse(Hk,nbands,Ef):
"""
Calculates eigenstates, for big sparse matrices (scipy.sparse).
"""
e=sparse_linalg.eigsh(Hk,k=nbands,sigma=Ef,return_eigenvectors=True)
e.sort()
return e
def kticks(pts_per_line_segment):
"""
K-point path index ticks for plots (might be better understood by seeing the code for band structure plots in other scripts).
"""
ticks=np.array([sum(pts_per_line_segment[0:i])%sum(pts_per_line_segment) for i in range(len(pts_per_line_segment)+1)])
ticks[-1]+=sum(pts_per_line_segment)-1
return ticks
def kpath(kpts,pts_per_line_segment,endpoint=False):
"""
Parameterized path (MAT-36, remember?) of k-points in reciprocal space.
gamma,ell,ticks=kpath(kpts,pts_per_line_segment)
plt.xticks(ell[ticks],['G','K','M' ...])
"""
gamma=[] #path of kpoints in R² space (remember MAT-36)
ell=[] #lengths of the length parameterized path (again, remember MAT-36)
for n in range(len(kpts)-1):
if n==len(kpts)-2:
t=np.linspace(0,1,pts_per_line_segment[n],endpoint=endpoint).reshape(-1,1)
else:
t=np.linspace(0,1,pts_per_line_segment[n],endpoint=False).reshape(-1,1)
gamma_n=kpts[n]+t*(kpts[n+1]-kpts[n])
ell_n=np.linalg.norm(gamma_n-kpts[n],axis=-1)
if n>=1:
ell_n+=ell[-1][-1]+(ell[-1][-1]-ell[-1][-2])
gamma.append(gamma_n)
ell.append(ell_n)
gamma=np.concatenate(gamma)
ell=np.concatenate(ell)
return gamma,ell,kticks(pts_per_line_segment) #gamma,ell,index ticks
def bands(H,gamma):
"""
Electronic bands, for small matrices.
Also works for big matrices, but may be too slow.
"""
return np.array([eigenenergies(H(k).toarray()) for k in gamma])
def bands_sparse(H,gamma,nbands,Ef):
"""
Electronic bands, for big matrices.
"""
return np.array([eigenenergies_sparse(H(k),nbands,Ef) for k in gamma])
def bands_with_layer_character(H,gamma):
"""
Electronic bands, for small matrices. Calculates the layer character for colormap plots.
Also works for big matrices, but may be too slow.
"""
e=[]
c1=[]
c2=[]
N=H(np.zeros(3)).shape[0]//4
for k in gamma:
ek,psik=eigenstates(H(k).toarray())
psik1=psik[0:2*N,:]
psik2=psik[2*N::,:]
c1k=(np.abs(psik1)**2).sum(axis=0)
c2k=(np.abs(psik2)**2).sum(axis=0)
e.append(ek)
c1.append(c1k)
c2.append(c2k)
e=np.array(e)
c1=np.array(c1)
c2=np.array(c2)
return e,c1,c2
def bands_with_layer_character_sparse(H,gamma,nbands,Ef):
"""
Electronic bands, for big matrices. Calculates the layer character for colormap plots.
"""
e=[]
c1=[]
c2=[]
N=H(np.zeros(3)).shape[0]//4
for k in gamma:
ek,psik=eigenstates_sparse(H(k),nbands,Ef)
psik1=psik[0:2*N,:]
psik2=psik[2*N::,:]
c1k=(np.abs(psik1)**2).sum(axis=1)
c2k=(np.abs(psik2)**2).sum(axis=1)
e.append(ek)
c1.append(c1k)
c2.append(c2k)
e=np.array(e)
c1=np.array(c1)
c2=np.array(c2)
return e,c1,c2
class TwistedBilayerGraphene:
def __init__(self,p,q,sparse=None):
"""
Initializes all relevant constant parameters.
"""
theta=twist_angle(p,q)
N=number_of_sublattice_positions(p,q)
r,L1,L2=superlattice(p,q)
L3=np.array([0,0,1])
G1=2*np.pi*np.cross(L2,L3)/np.dot(L1,np.cross(L2,L3))
G2=2*np.pi*np.cross(L3,L1)/np.dot(L1,np.cross(L2,L3))
#K[0],M[0],Kp[0],Mp[0],K[1],...
kpt=lambda m,n: m*G1+n*G2
Gamma=kpt(0,0)
K=kpt(1/3,-1/3),kpt(1/3,2/3),kpt(-2/3,-1/3)
Kp=kpt(2/3,1/3),kpt(-1/3,1/3),kpt(-1/3,-2/3)
M=kpt(1/2,0),kpt(0,1/2),kpt(-1/2,-1/2)
Mp=kpt(1/2,1/2),kpt(-1/2,0),kpt(0,-1/2)
self.path_GMKG=[Gamma,M[0],K[0],Gamma]
self.path_KGMKp=[K[0],Gamma,M[1],Kp[1]]
if N<=100 and not sparse: #rough estimate
self._bands=lambda H,gamma,nbands,Ef: bands(H,gamma)
self._bands_with_layer_character=lambda H,gamma,nbands,Ef: bands_with_layer_character(H,gamma)
else:
self._bands=bands_sparse
self._bands_with_layer_character=bands_with_layer_character_sparse
self.theta=theta
self.N=N
self.L1=L1
self.L2=L2
self.r=r
self.p=p
self.q=q
self.G1=G1
self.G2=G2
self.kpt=kpt
self.Gamma=Gamma
self.K=K
self.Kp=Kp
self.M=M
self.Mp=Mp
def generate_magnetic_unit_cell(self,q): #this is "irreversible"
"""
(Experimental) Extended unit cell for inclusion of magnetic field. This is "irreversible".
"""
self.qmag=q
x,y,_=self.L2
theta=np.arctan2(y,x)
self.r=np.concatenate([self.r+n*self.L1 for n in range(q)])@rot(np.pi/2-theta).T
self.L2=rot(np.pi/2-theta)@self.L2
self.L1=rot(np.pi/2-theta)@(q*self.L1)
self.N=q*self.N
#K[0],M[0],Kp[0],Mp[0],K[1],...
L1,L2=self.L1,self.L2
L3=np.array([0,0,1])
G1=2*np.pi*np.cross(L2,L3)/np.dot(L1,np.cross(L2,L3))
G2=2*np.pi*np.cross(L3,L1)/np.dot(L1,np.cross(L2,L3))
kpt=lambda m,n: m*G1+n*G2
Gamma=kpt(0,0)
K=kpt(1/3,-1/3),kpt(1/3,2/3),kpt(-2/3,-1/3)
Kp=kpt(2/3,1/3),kpt(-1/3,1/3),kpt(-1/3,-2/3)
M=kpt(1/2,0),kpt(0,1/2),kpt(-1/2,-1/2)
Mp=kpt(1/2,1/2),kpt(-1/2,0),kpt(0,-1/2)
self.path_GMKG=[Gamma,M[0],K[0],Gamma]
self.path_KGMKp=[K[0],Gamma,M[1],Kp[1]]
self.G1=G1
self.G2=G2
self.kpt=kpt
self.Gamma=Gamma
self.K=K
self.Kp=Kp
self.M=M
self.Mp=Mp
#bands calc
if self.N==1:
self._bands=lambda H,gamma,nbands,Ef: bands(H,gamma)
else:
self._bands=bands_sparse
def calc_hops(self,max_distance,t_intra=t_intra,t_inter=t_inter):
"""
Calculate the hoppings.
"""
self.hops=hoppings(self.r,self.L1,self.L2,400*self.N,max_distance)
self.hops_onsite=hoppings_onsite(self.L1,self.L2,max_distance)
self.t,self.t_onsite=hopping_parameters(self.hops,self.hops_onsite,self.N,self.theta,t_intra,t_inter)
def include_peierls_substitution(self): #this is "irreversible"
"""
(Experimental) Change hopping parameters for inclusion of magnetic field. This is irreversible.
"""
S=np.linalg.norm(np.cross(self.L1,self.L2))
B=phi0/S
peierls=lambda hops: (2*np.pi/phi0)*(B/2)*(hops[:,5]+hops[:,2])*(hops[:,6]-hops[:,3])
self.t=self.t*np.exp(1j*(2*np.pi/phi0)*(B/2)*(self.hops[:,5]+self.hops[:,2])*(self.hops[:,6]-self.hops[:,3]))
#for now, we ignore changes at periodic repetitions ("on-site")
self.B=B
def set_hamiltonian(self,interlayer=0.0,V=0.0):
"""
Define the hamiltonian matrix H(k) as a function of a point k=(kx,ky,kz) in reciprocal space.
"""
self.H=hamiltonian(self.hops,self.hops_onsite,self.t,self.t_onsite,self.N,interlayer,V)
def set_kpath(self,kpts,pts_per_line_segment,endpoint=False):
"""
Define the parametrized path (MAT-36, remember?) of k-points in reciprocal space.
Some convenient constants can be used (kpts=self.path_GMKG, self.path_KGMKp).
"""
self.gamma,self.ell,self.kticks=kpath(kpts,pts_per_line_segment,endpoint)
def calc_bands(self,nbands,Ef):
"""
Calcule the electronic bands (eigenenergies). Must set hamiltonian (self.set_hamiltonian) and kpath (self.set_kpath) before.
"""
self.bands=self._bands(self.H,self.gamma,nbands,Ef)
def calc_bands_and_layer_characters(self,nbands,Ef):
"""
Calcule the electronic bands (eigenenergies) *and* eigenfunctions. Must set hamiltonian (self.set_hamiltonian) and kpath (self.set_kpath) before.
"""
self.bands,self.layer1_character,self.layer2_character=self._bands_with_layer_character(self.H,self.gamma,nbands,Ef)
| vgmduarte/twisted_graphene_TG | twisted_graphene.py | twisted_graphene.py | py | 17,517 | python | en | code | 1 | github-code | 13 |
4022717687 | import cv2
import numpy as np
# 이미지 불러오기
image = cv2.imread('images/5.jpg', cv2.IMREAD_GRAYSCALE)
# 가우시안 블러 적용
# 이미지에 가우시안 블러를 적용하여 잡음을 제거
blurred = cv2.GaussianBlur(image, (5, 5), 0)
# 캐니 윤곽선 검출
# 최소 및 최대 임계값을 인자로 받으며, 이 임계값 범위 내의 경계값을 가진 픽셀을 윤곽선으로 간주
edges = cv2.Canny(blurred, 50, 150)
# 검출된 윤곽선 출력
cv2.imshow('Edges', edges)
# 선 검출
lines = cv2.HoughLinesP(edges, 1, np.pi/180, threshold=10, minLineLength=10, maxLineGap=5)
# 검출된 직선 그리기
# 점찍기
vertices = []
if lines is not None:
for line in lines:
x1, y1, x2, y2 = line[0]
vertices.append((x1, y1)) # 점 찍기
# vertices.append((x2, y2)) # 점 찍기
cv2.line(image, (x1, y1), (x2, y2), (0, 255, 0), 2)
cv2.imshow('circle', image)
# 좌표를 시각화 하기
print(type(vertices[0])) # check
# 폰트 및 텍스트 속성 설정
font = cv2.FONT_HERSHEY_SIMPLEX
font_scale = 0.3
color = (255, 0, 0) # 파란색
thickness = 1
for vertex in vertices:
position = vertex
text = str(vertex) # 화면에 보여줄 좌표값
cv2.putText(image, text, position, font, font_scale, color, thickness)
cv2.circle(image, vertex, 2, (0, 255, 0), 0)
# 점찍기 위한 리스트 삽입
# vertices = []
# for line in lines:
# x1, y1, x2, y2 = line[0]
# vertices.append((x1, y1))
# vertices.append((x2, y2))
# 해당 끝 점에 좌표 찍기
# 꼭짓점 표시
# for vertex in vertices:
# cv2.circle(image, vertex, 5, (0, 255, 0), -1)
# 결과 이미지 출력
cv2.imshow('Final', image)
cv2.waitKey(0)
cv2.destroyAllWindows()
| DeveloperSeJin/StudyImageProcessing | ProjectYYJ.py | ProjectYYJ.py | py | 1,763 | python | ko | code | 1 | github-code | 13 |
1797366577 | from pydantic import BaseModel, validator
from pathfinder_network.datamodel.company_id import CompanyId
class CompanyIdSet(BaseModel):
"""
A set of CompanyIds of size 1 or larger.
Attributes:
company_ids (list[CompanyId | str]): A list of CompanyIds or strings representing CompanyIds
of size 1 or larger.
"""
company_ids: list[CompanyId | str]
@validator("company_ids")
def check_company_ids(cls, v: list[CompanyId | str]) -> list[CompanyId]:
if len(v) < 1:
raise ValueError("CompanyIdSet must contain at least one CompanyId")
validated_company_ids = []
for item in v:
if isinstance(item, str):
company_id = CompanyId(value=item)
validated_company_ids.append(company_id)
elif not isinstance(item, CompanyId):
raise ValueError(f"{item} is not a valid CompanyId")
else:
validated_company_ids.append(item)
return validated_company_ids
def __str__(self) -> str:
return f"[{', '.join(map(str, self.company_ids))}]"
def __repr__(self) -> str:
return f"CompanyIdSet(company_ids={self.company_ids})"
def __eq__(self, other: object) -> bool:
if isinstance(other, CompanyIdSet):
return self.company_ids == other.company_ids
elif isinstance(other, list):
return [str(company_id) for company_id in self.company_ids] == other
else:
return NotImplemented
| JohnVonNeumann/pathfinder_network | pathfinder_network/datamodel/company_id_set.py | company_id_set.py | py | 1,564 | python | en | code | 2 | github-code | 13 |
13140009811 | import sys
import time
import multiprocessing
from tradingkit.data.feed.feeder import Feeder
from tradingkit.pubsub.core.event import Event
from tradingkit.pubsub.core.publisher import Publisher
from tradingkit.pubsub.core.subscriber import Subscriber
from tradingkit.pubsub.event.book import Book
from tradingkit.pubsub.event.candle import Candle
from tradingkit.pubsub.event.order import Order
from tradingkit.pubsub.event.position import Position
from tradingkit.pubsub.event.trade import Trade
class AggregatorFeeder(Feeder, Subscriber, Publisher):
def __init__(self, feeders):
super().__init__()
self.feeders = feeders
self.pr, self.pw = multiprocessing.Pipe()
def subscribed_events(self) -> list:
return [Order, Book, Trade, Candle, Position]
def on_event(self, event: Event):
parent = multiprocessing.parent_process()
if parent is None: # main process, dispatch event
self.dispatch(event)
elif parent.is_alive(): # child process and parent alive, send by pipe to parent
self.pw.send(event)
else: # parent is dead, error
raise ValueError("Parent process ended, ending child too.")
def feed(self):
children = []
for feeder in self.feeders:
feeder.register(self)
child = multiprocessing.Process(target=feeder.feed)
child.start()
children.append(child)
event = self.pr.recv()
while event:
self.dispatch(event)
event = self.pr.recv()
if len(multiprocessing.active_children()) < len(self.feeders):
raise ValueError("Feeder process ended, ending main process too")
raise ValueError("All feeders ended")
| logictraders/tradingkit | src/tradingkit/data/feed/aggregator_feeder.py | aggregator_feeder.py | py | 1,771 | python | en | code | 3 | github-code | 13 |
3129863640 | from django.shortcuts import redirect, render, get_object_or_404
from . import models
from .forms import UploadImageForm
# Create your views here.
def SubirImage(request):
if request.method == 'POST':
form = UploadImageForm(request.POST, request.FILES)
if form.is_valid():
form.save()
id = form.instance.pk
return redirect("imagenlinks", id)
else:
form = UploadImageForm()
imagenes = models.Imagenes.objects.all()
return render (request, "imagenes/imagenes.html", {'form':form, 'imagenes':imagenes})
def imagelinks(request, id):
imagen = get_object_or_404(models.Imagenes, id=id)
descarga = models.Imagenes.objects.filter(id=id)
data = {"formu":UploadImageForm(instance=imagen), 'descargas':descarga}
return render(request, 'imagenes/linkimagenes.html', data)
| JsonAndrx/archinonproyecto | imagenes/views.py | views.py | py | 858 | python | en | code | 0 | github-code | 13 |
26377015608 | #!python
from utils import *
def _f(x, y, z): return x & y | ~x & z
def _g(x, y, z): return x & y | x & z | y & z
def _h(x, y, z): return x ^ y ^ z
def _f1(a, b, c, d, k, s, X): return rol(a + _f(b, c, d) + X[k], s, 32)
def _f2(a, b, c, d, k, s, X): return rol(a + _g(b, c, d) + X[k] + 0x5a827999, s, 32)
def _f3(a, b, c, d, k, s, X): return rol(a + _h(b, c, d) + X[k] + 0x6ed9eba1, s, 32)
def padding(msg, forged_len=None):
if forged_len is None:
bit_len = len(msg) * 8
else:
bit_len = forged_len * 8
index = (bit_len >> 3) & 0x3f
pad_len = 120 - index
if index < 56:
pad_len = 56 - index
padding = b'\x80' + b'\x00'*63
return padding[:pad_len] + int_bytes(bit_len, 'little', size=8)
def digest(message_string, A=0x67452301, B=0xefcdab89, C=0x98badcfe, D=0x10325476, forged_size=None):
msg = bytes(message_string)
msg_bytes = msg + padding(msg, forged_size)
for i in range(0, len(msg_bytes), 64):
block = msg_bytes[i:i+64]
a, b, c, d = A, B, C, D
x = []
for j in range(0, 64, 4):
x.append(bytes_int(block[j:j+4],'little'))
a = _f1(a,b,c,d, 0, 3, x)
d = _f1(d,a,b,c, 1, 7, x)
c = _f1(c,d,a,b, 2,11, x)
b = _f1(b,c,d,a, 3,19, x)
a = _f1(a,b,c,d, 4, 3, x)
d = _f1(d,a,b,c, 5, 7, x)
c = _f1(c,d,a,b, 6,11, x)
b = _f1(b,c,d,a, 7,19, x)
a = _f1(a,b,c,d, 8, 3, x)
d = _f1(d,a,b,c, 9, 7, x)
c = _f1(c,d,a,b,10,11, x)
b = _f1(b,c,d,a,11,19, x)
a = _f1(a,b,c,d,12, 3, x)
d = _f1(d,a,b,c,13, 7, x)
c = _f1(c,d,a,b,14,11, x)
b = _f1(b,c,d,a,15,19, x)
a = _f2(a,b,c,d, 0, 3, x)
d = _f2(d,a,b,c, 4, 5, x)
c = _f2(c,d,a,b, 8, 9, x)
b = _f2(b,c,d,a,12,13, x)
a = _f2(a,b,c,d, 1, 3, x)
d = _f2(d,a,b,c, 5, 5, x)
c = _f2(c,d,a,b, 9, 9, x)
b = _f2(b,c,d,a,13,13, x)
a = _f2(a,b,c,d, 2, 3, x)
d = _f2(d,a,b,c, 6, 5, x)
c = _f2(c,d,a,b,10, 9, x)
b = _f2(b,c,d,a,14,13, x)
a = _f2(a,b,c,d, 3, 3, x)
d = _f2(d,a,b,c, 7, 5, x)
c = _f2(c,d,a,b,11, 9, x)
b = _f2(b,c,d,a,15,13, x)
a = _f3(a,b,c,d, 0, 3, x)
d = _f3(d,a,b,c, 8, 9, x)
c = _f3(c,d,a,b, 4,11, x)
b = _f3(b,c,d,a,12,15, x)
a = _f3(a,b,c,d, 2, 3, x)
d = _f3(d,a,b,c,10, 9, x)
c = _f3(c,d,a,b, 6,11, x)
b = _f3(b,c,d,a,14,15, x)
a = _f3(a,b,c,d, 1, 3, x)
d = _f3(d,a,b,c, 9, 9, x)
c = _f3(c,d,a,b, 5,11, x)
b = _f3(b,c,d,a,13,15, x)
a = _f3(a,b,c,d, 3, 3, x)
d = _f3(d,a,b,c,11, 9, x)
c = _f3(c,d,a,b, 7,11, x)
b = _f3(b,c,d,a,15,15, x)
# update state
A = (A + a) & 0xffffffff
B = (B + b) & 0xffffffff
C = (C + c) & 0xffffffff
D = (D + d) & 0xffffffff
return int_bytes(A, endianness='little',size=4) + \
int_bytes(B, endianness='little',size=4) + \
int_bytes(C, endianness='little',size=4) + \
int_bytes(D, endianness='little',size=4)
def keyed_hash(key, msg):
return digest(key + msg)
if __name__ == '__main__':
def Check(msg, sig):
#print (msg, digest(msg),sig)
print (bytes_hexstr(digest(msg))==sig)
Check(b'', '31d6cfe0d16ae931b73c59d7e0c089c0')
Check(b'a', 'bde52cb31de33e46245e05fbdbd6fb24')
Check(b'abc', 'a448017aaf21d8525fc10ae87aa6729d')
Check(b'message digest',
'd9130a8164549fe818874806e1c7014b')
Check(b'abcdefghijklmnopqrstuvwxyz',
'd79e1c308aa5bbcdeea8ed63df412da9')
Check(b'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789',
'43f8582f241db351ce627e153e7f0e4')
Check(b'12345678901234567890123456789012345678901234567890123456789012345678901234567890',
'e33b4ddc9c38f2199c3e7b164fcc0536')
| MarkLuk/cryptopals | MD4.py | MD4.py | py | 3,961 | python | en | code | 0 | github-code | 13 |
43084011122 | #
# @lc app=leetcode.cn id=2124 lang=python3
#
# [2124] 检查是否所有 A 都在 B 之前
#
# @lc code=start
class Solution:
def checkString(self, s: str) -> bool:
a_status = False
b_status = False
for i in range(len(s)):
if s[i] == "a":
if b_status:
return False
if not a_status:
a_status = True
else:
if not b_status:
b_status = True
return True
# @lc code=end
| Guo-xuejian/leetcode-practice | 2124.检查是否所有A都在B之前.py | 2124.检查是否所有A都在B之前.py | py | 536 | python | en | code | 1 | github-code | 13 |
13603790025 | from sprite_object import * #108
from npc import *
from player import * #remove
class ObjectHandler: #108
def __init__(self, game):
self.game = game
self.player = Player(self) #remove
self.sprite_list = []
self.npc_list = []
self.npc_sprite_path = 'resources/sprites/npc/'
self.static_sprite_path = 'resources/sprites/static_sprites/'
self.anim_sprite_path = 'resources/sprites/animated_sprites/' #108
add_sprite = self.add_sprite #109
add_npc = self.add_npc
#sprite map ADD AROUND SPRITES AND WHAT NOT, grid is 18 by 30
#add_sprite(SpriteObject(game, path=self.static_sprite_path + 'candlebra.png', pos=(1,1))) #109
add_sprite(SpriteObject(game, path=self.static_sprite_path + '1.png', pos=(18.5,2))) #109 GOOD DONT LOOK OR TAKES EYES
add_sprite(SpriteObject(game, path=self.static_sprite_path + '2.png', pos=(19.5,8.5))) #109 good always watches no eyes
add_sprite(SpriteObject(game, path=self.static_sprite_path + '3.png', pos=(18.5,14))) #109 good leave me alone
add_sprite(SpriteObject(game, path=self.static_sprite_path + '4.png', pos=(2.75,22.5))) #109 good trees picture
add_sprite(SpriteObject(game, path=self.static_sprite_path + '5.png', pos=(14,14))) #109 good help me
add_sprite(SpriteObject(game, path=self.static_sprite_path + '6.png', pos=(5,30))) #109 cant run
add_sprite(SpriteObject(game, path=self.static_sprite_path + '7.png', pos=(7,1.5))) #109 GOOD NO NO NO
add_sprite(SpriteObject(game, path=self.static_sprite_path + '8.png', pos=(18,30))) #109 GOOD IT FOLLOWS
add_sprite(SpriteObject(game, path=self.static_sprite_path + 'tree.png', pos=(1.25,1.25), scale=2, shift = -0.15)) # SMALL TREE GOOD
add_sprite(SpriteObject(game, path=self.static_sprite_path + 'tree.png', pos=(3,3.25), scale=4, shift = -0.30)) # BIG TREE GOOD
add_sprite(SpriteObject(game, path=self.static_sprite_path + 'tree.png', pos=(10.5,10), scale=2, shift = -0.15)) # SMALL TREE GOOD
add_sprite(SpriteObject(game, path=self.static_sprite_path + 'tree.png', pos=(18.5 ,10), scale=2, shift = -0.15)) # SMALL TREE GOOD
add_sprite(SpriteObject(game, path=self.static_sprite_path + 'tree.png', pos=(2.75,18), scale=2, shift = -0.15)) # SMALL TREE GOOD
add_sprite(SpriteObject(game, path=self.static_sprite_path + 'tree.png', pos=(3,20.25), scale=4, shift = -0.30)) #109 BIG TREE GOOD
add_sprite(SpriteObject(game, path=self.static_sprite_path + 'tree.png', pos=(18.5,28.25), scale=4, shift = -0.30)) #109 BIG TREE GOOD
add_sprite(SpriteObject(game, path=self.static_sprite_path + 'tree.png', pos=(8,13), scale=4, shift = -0.30)) #109 BIG TREE
add_sprite(AnimatedSprite(game)) #109
add_sprite(AnimatedSprite(game, pos=(1.5, 1.5)))
add_sprite(AnimatedSprite(game, pos=(1.5, 7.5)))
add_sprite(AnimatedSprite(game, pos=(5.5, 3.25)))
add_sprite(AnimatedSprite(game, pos=(5.5, 4.75)))
add_sprite(AnimatedSprite(game, pos=(7.5, 2.5)))
add_sprite(AnimatedSprite(game, pos=(7.5, 5.5)))
add_sprite(AnimatedSprite(game, pos=(14.5, 1.5)))
add_sprite(AnimatedSprite(game, pos=(14.5, 4.5)))
add_sprite(AnimatedSprite(game, path=self.anim_sprite_path + 'red_light/0.png', pos=(18.5, 5.5))) #WAS 14.5
add_sprite(AnimatedSprite(game, path=self.anim_sprite_path + 'red_light/0.png', pos=(18.5, 7.5))) #14.5
add_sprite(AnimatedSprite(game, path=self.anim_sprite_path + 'red_light/0.png', pos=(12.5, 7.5)))
add_sprite(AnimatedSprite(game, path=self.anim_sprite_path + 'red_light/0.png', pos=(9.5, 7.5)))
#npc map
add_npc(NPC(game))
def update(self):
[sprite.update() for sprite in self.sprite_list] #110
[npc.update() for npc in self.npc_list]
def add_npc(self,npc):
self.npc_list.append(npc)
def add_sprite(self, sprite): #109
self.sprite_list.append(sprite) #109
| mnothman/pygameHackathon | object_handler.py | object_handler.py | py | 4,139 | python | en | code | 0 | github-code | 13 |
43238688912 | import subprocess
from path import path
from datetime import datetime
import json
import bson
from pyquery import PyQuery
from urlparse import urlparse
import logging
from sensitive import grab, UPLOAD_COMMAND
from db import DB
FRONT_PAGE = 'http://news.ycombinator.com/'
SECOND_PAGE = 'http://news.ycombinator.com/news2'
PAGES = (FRONT_PAGE, SECOND_PAGE)
DUMP_PATH = path('rewindhn-dump.json')
class MongoEncoder(json.JSONEncoder):
'''custom JSONEncoder that additionally handles dates and ObjectIds'''
def default(self, obj):
if isinstance(obj, datetime):
return obj.isoformat()
if isinstance(obj, bson.objectid.ObjectId):
return str(obj)
return json.JSONEncoder.default(self, obj)
def parse(html):
'''return a list of dictionaries describing the stories on the front page'''
elements = []
p = PyQuery(html)
# 90s markup woohoo!
anchors = p('.title:nth-child(3) a:nth-child(1)')
for a in anchors:
# have to re-wrap here, because PyQuery just exposes internal lxml objects upon getting iterated
a = PyQuery(a)
subtext = a.closest('tr').next().find('.subtext')
if not subtext:
# More link
continue
children = map(PyQuery, subtext.children())
try:
span, submitted, comments = children[0], children[1], children[-1]
except IndexError:
# filter out ads
continue
comments = comments.text().rpartition(' ')[0]
comments = int(comments) if comments else 0
url = a.attr('href')
elements.append({
'pos': len(elements) + 1,
'title': a.text(),
'url': url,
'domain': urlparse(url).netloc.rpartition('www.')[2],
'comments': comments,
'submitter': submitted.text(),
'points': int(span.text().split()[0]),
'id': int(span.attr('id').split('_', 1)[1]),
'ago': submitted[0].tail.split('ago')[0].strip(),
})
logging.warning('parsed %s elements', len(elements))
return elements
def do_parse():
'''go through everything and see if it's been inserted into cleaned'''
grabbed = list(DB.grabbed.find().sort('created_at', 1))
cleaned = set((x['idx'], x['page']) for x in DB.cleaned.find())
for page, _ in enumerate(PAGES):
for idx, g in enumerate(p for p in grabbed if p['page'] == page):
if not (idx, page) in cleaned:
new = clean(g)
new['idx'] = idx
DB.cleaned.insert(new)
def clean(page):
page.pop('_id', None)
page['created_at'] = page['created_at'].isoformat()
page['posts'] = parse(page['html'])
return page
def upload():
'''upload an entire dump to S3'''
all_ = DB.cleaned.find()
j = json.dumps(list(all_), cls=MongoEncoder)
DUMP_PATH.write_text(j)
subprocess.call('gzip -9 -c %s > rewindhn.gz' % DUMP_PATH, shell=True)
subprocess.call(UPLOAD_COMMAND, shell=True)
def grab_pages():
DB.grabbed.ensure_index('created_at')
grabbed = [{'html': grab(page), 'created_at': datetime.utcnow(), 'page':i} for i, page in enumerate(PAGES)]
logging.warning('grabbed %s pages', len(grabbed))
logging.warning(DB.grabbed.insert(grabbed))
def main():
grab_pages()
do_parse()
upload()
if __name__ == '__main__':
main()
| doda-zz/rewindhn | scrape.py | scrape.py | py | 3,480 | python | en | code | 61 | github-code | 13 |
22386997979 | import pickle
from os import listdir
import os
from os.path import isfile, join
import seaborn as sns
import matplotlib.pyplot as plt
import numpy as np
import re
def smooth(x, N):
x = np.insert(x, len(x), x[len(x) - 1] * (np.ones(N - 1) + 0.05 * (np.random.rand(N - 1) - 0.5)))
cumsum = np.cumsum(np.insert(x, 0, 0))
return (cumsum[N:] - cumsum[:-N]) / float(N)
sns.set()
envs = ['CartPole.png', 'MountainCar.png']
dirs_CP = [d[0] for d in os.walk('./') if d[0].startswith('./CP')]
dirs_MC = [d[0] for d in os.walk('./') if d[0].startswith('./MC')]
axis_ranges = [[-5, 205],[-0.55,0.75]]
smooth_values= [10,100]
for env, directories in enumerate([dirs_CP, dirs_MC]):
f, axes = plt.subplots(2, 2, figsize=(10, 10))
# f.set_figheight(5)
# f.set_figwidth(5)
exp_replay = {'off': axes[0, 0], 'prioritized': axes[0,1], 'uniform': axes[1,env]}
plot_titles = {'off': 'no experience replay', 'prioritized': 'prioritized experience replay', 'uniform': 'uniform experience replay'}
coloring = {'off': 'r', 'soft': 'b', 'hard': 'g'}
lines = {}
errors_upper = {}
errors_lower = {}
for directory in directories:
files = [f for f in listdir(directory) if isfile(join(directory, f)) and f.startswith('steps_run')]
if directory.startswith('./MC'):
files = [f for f in listdir(directory) if isfile(join(directory, f)) and f.startswith('maxdistance')]
runs = []
for file in files:
runs.append(pickle.load(open(directory + '/' + file, "rb")))
stderr = [np.std(x) for x in zip(*runs)]
a = [x[0] for x in runs]
runs = [sum(x) for x in zip(*runs)]
runs = [x / 25 for x in runs]
runs_lower = np.array(smooth([x - y for x, y in zip(runs, stderr)], smooth_values[env]))
runs_upper = np.array(smooth([x + y for x, y in zip(runs, stderr)], smooth_values[env]))
runs = np.array(smooth(runs, smooth_values[env]))
lines[directory] = runs
errors_upper[directory] = runs_upper
errors_lower[directory] = runs_lower
for x in lines.keys():
for item in exp_replay.keys():
if x.endswith(item):
result = re.search('_(.*)_', x).group(1)
exp_replay[item].plot(lines[x], label=result, color=coloring[result])
exp_replay[item].set_title(plot_titles[item])
# print(runs_lower)
exp_replay[item].fill_between(range(len(lines[x])), errors_lower[x], errors_upper[x], alpha=0.15, color=coloring[result])
exp_replay[item].legend(loc=2)
exp_replay[item].set_ylim(axis_ranges[env][0], axis_ranges[env][1])
exp_replay[item].legend()
handles, labels = exp_replay[item].get_legend_handles_labels()
# sort both labels and handles by labels
labels, handles = zip(*sorted(zip(labels, handles), key=lambda t: t[0]))
labels = [label if label != 'off' else 'no target network' for label in labels]
exp_replay[item].legend(handles, labels, loc=2)
axes[1, 1 - env].set_visible(False)
if not os.path.exists('./images'):
os.makedirs('./images')
plt.savefig('./images/' + envs[env], dpi=300)
| OscarLigthart/RL-Project | Project/plot.py | plot.py | py | 3,282 | python | en | code | 0 | github-code | 13 |
40397852445 | family = {"rick": 43, 'beth': 13, 'morty': 5, 'summer': 8}
def movie_tickets(family):
cost = 0
for age in family.values():
if age < 3:
cost += 0
elif age <= 12:
cost += 10
else:
cost += 15
return cost
final_cost = movie_tickets(family)
print(f"Family's total cost: {final_cost}$")
#Bonus:
family = {}
while True:
value_input = input("Enter family member's name and age separated by space ('e' for exit): ")
split_input = value_input.split()
split_input.remove(split_input[-1]) if split_input[-1] == '' else None
if value_input == 'e':
break
else:
key, value = split_input
family[key] = int(value)
final_cost = movie_tickets(family)
print(f"Family's total cost: {final_cost}$") | Lidkin/DI_Bootcamp | Week2/Day3/Exercises_XP/exercise2.py | exercise2.py | py | 803 | python | en | code | 0 | github-code | 13 |
17499090249 | n = int(input())
y = 0
list_keys = []
k = 0
max_val = 0
d1 = dict()
for i in range (n):
s = input().split()
x = s[0]
y = int(s[1])
if x not in d1:
d1[x]=y
else:
d1[x]+=y
max_val = max(d1.values())
list_keys = list(d1.keys())
list_keys.sort()
for i in list_keys:
if max_val == d1[i]:
print(i ,"is", "lucky!")
else:
k = max_val - d1[i]
print(i, "has to receive" , k , "tenge")
| 21B030702/PP2 | lab 2/6.py | 6.py | py | 447 | python | ko | code | 0 | github-code | 13 |
277075411 | import catalog
import schedule
def update(update_courses=True, update_degrees=True, update_schedule=True):
if update_courses:
catalog.courses.update_db()
if update_degrees:
catalog.degrees.update_db()
if update_schedule:
schedule.update_db(newest_terms=2)
| CDavantzis/SSW690-Project | src/app/db/__init__.py | __init__.py | py | 296 | python | fa | code | 1 | github-code | 13 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.