seq_id stringlengths 4 11 | text stringlengths 113 2.92M | repo_name stringlengths 4 125 ⌀ | sub_path stringlengths 3 214 | file_name stringlengths 3 160 | file_ext stringclasses 18
values | file_size_in_byte int64 113 2.92M | program_lang stringclasses 1
value | lang stringclasses 93
values | doc_type stringclasses 1
value | stars int64 0 179k ⌀ | dataset stringclasses 3
values | pt stringclasses 78
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
12117516603 | """
有道翻译
"""
from selenium.webdriver import Chrome, ChromeOptions
from selenium.webdriver.common.by import By
import time
option = ChromeOptions()
option.add_argument("--headless")
with Chrome(options=option) as driver:
driver.get("https://fanyi.youdao.com/")
input_txt = driver.find_element(By.XPATH,'//div[@id="js_fanyi_input"]')
while True:
input_txt.send_keys(input("请输入需要翻译的内容:"))
# 如果网速慢,需要强制等待,
time.sleep(1)
result = driver.find_element(By.XPATH, '//div[@id="js_fanyi_output"]//span[@class="tgt color_text_1"]').text
print(result)
input_txt.clear()
| 15149295552/Code | Month07/day17_python/exercise04.py | exercise04.py | py | 676 | python | en | code | 1 | github-code | 13 |
30604937036 | import random
from pcom.commands.base_command import BaseCommand
from pcom.errors import PComError
from typing import List
class EthernetCommandWrapper(BaseCommand):
def __init__(self, base_command: BaseCommand):
super().__init__(plc_id=base_command._plc_id, protocol=base_command.protocol)
self.base_command = base_command
self.command_id = self._create_command_id()
def _create_command_id(self) -> List[int]:
return [random.randint(0, 99), random.randint(0, 99)]
def get_bytes(self) -> bytearray:
frame = self.__get_header()
base_command_bytes = self.base_command.get_bytes()
frame.extend(base_command_bytes)
return frame
def __get_header(self) -> bytearray:
base_command_bytes = self.base_command.get_bytes()
header = list(self.command_id)
header.extend([self.base_command.protocol, 0])
command_size = self._to_little_endian(len(base_command_bytes))
header.extend(command_size)
return bytearray(header)
def _to_little_endian(self, word) -> List[int]:
return [word & 255, word >> 8]
def parse_reply(self, buffer: bytearray) -> bytearray:
super().parse_reply(buffer)
command_buffer = buffer[6:]
return self.base_command.parse_reply(command_buffer)
def _validate_reply(self, buffer: bytearray) -> None:
header = buffer[:6]
header_bytes = self.__get_header()
expected = header_bytes[:4]
actual = header[:4]
if actual != expected:
raise PComError("Ethernet header mismatch. Expected: '%s' found: '%s'" % (expected, actual), buffer)
| metalsartigan/pypcom | src/pcom/plc/ethernet_command_wrapper.py | ethernet_command_wrapper.py | py | 1,663 | python | en | code | 1 | github-code | 13 |
5871993071 | import numpy as np
def solve(grid : np.array) -> None:
def handle_col(r,c):
nonlocal last_value
value = grid[r, c]
if value != '?':
last_value = value
return
if last_value != '?' and value == '?':
grid[r, c] = last_value
n_rows, n_cols = grid.shape
for c in range(0, n_cols):
last_value = '?'
for r in range(0, n_rows):
handle_col(r,c)
last_value = '?'
for r in range(n_rows-1, -1, -1):
handle_col(r,c)
# Now handle blank columns
# Copy to right
for c in range(1, n_cols):
if grid[0,c] == '?':
grid[:,c] = grid[:, c-1]
# Copy to left (from right)
for c in range(n_cols-2, -1, -1):
if grid[0, c] == '?':
grid[:, c] = grid[:, c + 1]
return
def main():
file_base = "small"
ext = ""
file_base = "large"
input_file_name = f"A-{file_base}-practice{ext}.in"
output_file_name = f"A-{file_base}-practice{ext}.out"
with open(output_file_name, "w") as output_file,open(input_file_name) as input_file:
n_cases = int(input_file.readline())
for i in range( n_cases):
R, C = map(int, input_file.readline().split(" "))
grid = np.empty( shape=(R,C), dtype=np.chararray)
for r in range(0,R):
for col,ch in enumerate(input_file.readline().strip()):
grid[r][col] = ch
solve(grid)
output_file.write(f"Case #{i+1}:\n")
for r in range(0, R):
output_file.write("".join(grid[r]))
output_file.write("\n")
if __name__ == "__main__":
main() | eric7237cire/CodeJam | 2017/1A/A.py | A.py | py | 1,721 | python | en | code | 7 | github-code | 13 |
709643745 | from Engine import *
mouseX = 0;
mouseY = 0;
def CloseFunc():
print("Closing...");
def MousePos(pos):
print(pos);
mouseX = pos[0];
mouseY = pos[1];
def KeyDown(key, mods):
pass;
mainWindow = Window();
mainWindow.OpenWindow(480,480,"Hello PyGame!");
renderer.OpenSurface(480, 480, mainWindow);
splash = LoadImage("Splash_480.png");
assets.LoadImage("Splash_480.png","Splash");
GetEventSystem().SetCloseHandler(CloseFunc);
EventSys.SetMousePosHandler(MousePos);
EventSys.SetKeyDownHandler(KeyDown);
box = Sprite((255,0,255), Vector2(0,0), (480, 480), "Splash");
while mainWindow.UpdateWindow():
renderer.Background((255,255,255));
#renderer.DrawImage(assets.GetAsset("Splash"), (0,0));
renderer.DrawFilledRect((255,0,255),0,0,mouseX,mouseY);
print(mouseX, mouseY);
#box.DrawSelf();
mainWindow.CloseWindow()
| NPEX42/GamePy | Game.py | Game.py | py | 833 | python | en | code | 0 | github-code | 13 |
11276636159 | import scrapy
import csv
class routeSpider(scrapy.Spider):
name = "areaScrape"
allowed_domains = ['www.mountainproject.com']
def start_requests(self):
with open('data2.csv', 'rt') as allLinks:
allLinks = csv.reader(allLinks)
for link in allLinks:
url = str(*link)
yield scrapy.Request(url, self.parse)
def parse(self, response):
count_routes= len(response.xpath('//*[@id="left-nav-route-table"]/tr'))
while count_routes > 0:
yield {
'area_name': response.xpath('//*[@id="single-area-picker-name"]/text()').extract_first(),
'route_name': response.xpath('//*[@id="left-nav-route-table"]/tr[' + str(count_routes) + ']/td[2]/a/text()').extract(),
'route_link': response.xpath('//*[@id="left-nav-route-table"]/tr[' + str(count_routes) + ']/td[2]/a/@href').extract_first()
}
count_routes = count_routes - 1
| swanjson/mountainSpider | area2routeNameLink.py | area2routeNameLink.py | py | 863 | python | en | code | 0 | github-code | 13 |
15639216283 | #! /usr/bin/env python
import os
# don't use gpu
os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
import numpy as np
import numpy as np
import matplotlib.pyplot as plt
from scaledgp import ScaledGP
from scipy import signal
from progress.bar import Bar
import random
from utils import *
BASE_PATH = os.path.expanduser('~/Documents')
class ModelService(object):
def __init__(self,xdim,odim,use_obs, use_service = True):
self.xdim=xdim
self.odim=odim
self.use_obs = use_obs
self.verbose = True
self.config = {}
def reconfigure_cb(self, config):
self.N_data = config["N_data"]
self.verbose = config["learning_verbose"]
self.N_updates = config["N_updates"]
self.config["meta_batch_size"] = config["meta_batch_size"]
self.config["data_horizon"] = config["data_horizon"]
self.config["test_horizon"] = config["test_horizon"]
self.config["learning_rate"] = config["learning_rate"]
self.config["min_datapoints"] = config["min_datapoints"]
self.config["save_data_interval"] = config["save_data_interval"]
def predict(self,req):
# overload
return None
def train(self,goal):
# overload
return None
def add_data(self,req):
# overload
return None
def scale_data(self,x,xmean,xstd):
if (xstd == 0).any():
return (x-xmean)
else:
return (x - xmean) / xstd
def unscale_data(self,x,xmean,xstd):
if (xstd == 0).any():
return x + xmean
else:
return x * xstd + xmean
class ModelGPService(ModelService):
def __init__(self,xdim,odim,use_obs=False,use_service=True):
ModelService.__init__(self,xdim,odim,use_obs,use_service)
# note: use use_obs and observations with caution. model may overfit to this input.
model_xdim=self.xdim//2
if self.use_obs:
model_xdim += self.odim
model_ydim=self.xdim//2
self.m = ScaledGP(xdim=model_xdim,ydim=model_ydim)
self.y = np.zeros((0,model_ydim))
self.Z = np.zeros((0,model_xdim))
self.N_data = 400
def rotate(self,x,theta):
x_body = np.zeros((2,1))
x_body[0] = x[0] * np.cos(theta) + x[1] * np.sin(theta)
x_body[1] = -x[0] * np.sin(theta) + x[1] * np.cos(theta)
return x_body
def make_input(self,x,obs):
# format input vector
theta = obs[0]
x_body = self.rotate(x[2:-1,:],theta)
if self.use_obs:
Z = np.concatenate((x_body,obs[1:,:])).T
else:
Z = np.concatenate((x_body)).T
#normalize input by mean and variance
# Z = (Z - self.Zmean) / self.Zvar
return Z
def predict(self,req):
x = np.expand_dims(req.x, axis=0).T
obs = np.expand_dims(req.obs, axis=0).T
# format the input and use the model to make a prediction.
Z = self.make_input(x,obs)
y, var = self.m.predict(Z)
# theta = np.arctan2(x[3]*x[4],x[2]*x[4])
theta=obs[0]
y_out = self.rotate(y.T,-theta)
resp = Predict_Model(y_out.flatten(),var.T.flatten())
return resp
def train(self, goal=None):
success = True
if goal is not None:
# goal was cancelled
if self._action_service.is_preempt_requested():
print("Preempt training request")
self._action_service.set_preempted()
success = False
# train model. this gets called by the training thread on timer_cb() in adaptive_clbf_node.
if success and self.Z.shape[0] > 0 and self.Z.shape[0] == self.y.shape[0]:
self.m.optimize(self.Z,self.y)
if goal is not None:
self._train_result.model_trained = True
self._action_service.set_succeeded(self._train_result)
else:
if goal is not None:
self._train_result.model_trained = False
self._action_service.set_succeeded(self._train_result)
def add_data(self,req):
x_next = np.expand_dims(req.x_next, axis=0).T
x = np.expand_dims(req.x, axis=0).T
mu_model = np.expand_dims(req.mu_model, axis=0).T
obs = np.expand_dims(req.obs, axis=0).T
dt = req.dt
# add a sample to the history of data
x_dot = (x_next[2:-1,:]-x[2:-1,:])/dt
ynew = x_dot - mu_model
Znew = self.make_input(x,obs)
# theta = np.arctan2(x[3]*x[4],x[2]*x[4])
theta=obs[0]
ynew_rotated = self.rotate(ynew,theta)
self.y = np.concatenate((self.y,ynew_rotated.T))
self.Z = np.concatenate((self.Z,Znew))
# throw away old samples if too many samples collected.
if self.y.shape[0] > self.N_data:
self.y = self.y[-self.N_data:,:]
self.Z = self.Z[-self.N_data:,:]
# self.y = np.delete(self.y,random.randint(0,self.N_data-1),axis=0)
# self.Z = np.delete(self.Z,random.randint(0,self.N_data-1),axis=0)
if self.verbose:
print("obs", obs)
print("ynew",ynew)
print("ynew_rotated", ynew_rotated)
print("Znew",Znew)
print("x_dot",x_dot)
print("mu_model",mu_model)
print("dt",dt)
print("n data:", self.y.shape[0])
# print("prediction error:", self.predict_error)
# print("predict var:", self.predict_var)
| YimingShu-teay/balsa-reproduction | balsa_reproduction/model_service.py | model_service.py | py | 5,532 | python | en | code | 2 | github-code | 13 |
13366176946 | from django.http.response import HttpResponse
from django.shortcuts import redirect, render
from .forms import *
from django.views import View
# Create your views here.
def index(request):
return render(request,'index.html')
def inventoryHome(request):
if request.method=='POST':
form=InventoryForm(request.POST)
if form.is_valid():
form.save()
return redirect('inventory')
else:
return HttpResponse('not valid')
else:
form=InventoryForm()
inventoryData=Inventory.objects.all()
return render(request,'inventory.html',{'form':form,'inventoryData':inventoryData})
def inventoryUpdate(request,id):
if request.method=='POST':
form=InventoryForm(request.POST,instance=Inventory.objects.get(pk=id))
if form.is_valid():
form.save()
return redirect('inventory')
else:
form=InventoryForm(instance=Inventory.objects.get(pk=id))
return render(request,'inventoryUpdate.html',{'form':form})
def inventoryDelete(req,id):
objDep=Inventory.objects.get(pk=id)
objDep.delete()
return redirect('inventory')
def purchaseItem(request,id):
if request.method=='POST':
quantityPost=request.POST['quantity']
print(quantityPost)
obj=Inventory.objects.filter(id=id)
obj1=obj[0]
if obj1.quantity >int(quantityPost):
obj1.quantity -= int(quantityPost)
obj1.save()
return redirect('inventory')
else:
return HttpResponse('not valid')
else:
form=InventoryForm(instance=Inventory.objects.get(pk=id))
return render(request,'purchase.html',{'form':form})
# class inventoryHome(View):
# form_class = InventoryForm
# model = Inventory
# template = 'inventory.html'
# def get(self, request, *args, **kwargs):
# form = self.form_class()
# inventoryData = self.model.objects.select_related().all()
# context = {'form': form,'inventoryData':inventoryData}
# return render(request,'inventory.html',context)
# def post(self, request, *args, **kwargs):
# form = self.form_class(request.POST)
# if form.is_valid():
# form.save()
# form = self.form_class()
# return redirect('inventory')
# else:
# return HttpResponse('not valid')
| jithinvv4u/DRF2 | inventory/views.py | views.py | py | 2,410 | python | en | code | 0 | github-code | 13 |
8869511265 | import random
class wordjumblegame(object):
def __init__(self, name, level) -> None:
self.name = name
self.points = 0
self.level = level
self.words = self.loadwords(self.level)
def __str__(self):
return self.name
def __repr__(self):
return self.name
def banner(self):
print("-------------------------------------------------------")
print(" WELCOME TO WORD JUMBLE GAME ")
print("-------------------------------------------------------")
print(" The computer presents a jumbled word. ")
print(" You need to guess it ")
print("-------------------------------------------------------")
print("NAME :", self.name)
print("LEVEL :", self.level)
print("*******************************************************")
print("\n")
def loadwords(self, level):
path = str(level) + ".txt"
with open(path) as file:
temp = file.readlines()
return [s.strip() for s in temp]
def jumble(self, word):
temp = list(word)
random.shuffle(temp)
return ''.join(temp)
def run(self):
self.banner()
random.shuffle(self.words)
for word in self.words:
jword = self.jumble(word)
print("Jumbled Word -> ", jword)
uword = input("Can you guess? ")
if(uword == word):
self.points += 1
print("Correct!")
else:
print("Incorrect.")
print("\n")
self.printinfo()
def score(self):
return self.points
def printinfo(self):
print("*******************************************************")
print("NAME :", self.name)
print("LEVEL :", self.level)
print("SCORE :", self.points)
if(self.points > 6):
print("RESULT : Excellent Playing")
elif(3 <= self.points <= 6):
print("RESULT : Good Playing")
else:
print("RESULT : Needs Improvement")
print("-------------------------------------------------------")
# --------------------------------------------------------
if __name__ == "__main__":
# Inital test
'''
p = wordjumblegame("Anil", 2)
print(getattr(p, "words"))
p.run()
print(p.score())
'''
# Multiplayer test
D = {"Anil": 2, "Sunil": 1, "Ram": 1}
players = [wordjumblegame(player, level) for player, level in D.items()]
for player in players:
player.run()
results = {}
for player in players:
key = getattr(player, "name")
score = player.score()
level = getattr(player, "level")
results[key] = {"score":score, "level":level}
print(results)
| mindful-ai/oracle-python-feb2023 | 15_example_03/15_example_03/wordjumblegame_oop.py | wordjumblegame_oop.py | py | 2,998 | python | en | code | 0 | github-code | 13 |
9440095039 |
import json, time
from sys import argv
import bs4
from tqdm import tqdm
from utils import save_obj
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
"""
This parameter has been changed in the past. Check Google images result page source code if this doesn't work.
"""
html_string = "div[class='rg_meta notranslate']"
"""
Function to create headless webdriver
"""
def init_driver():
options = webdriver.ChromeOptions()
options.add_argument('headless')
driver = webdriver.Chrome(chrome_options=options)
driver.wait = WebDriverWait(driver, 5)
return driver
"""
Google images lookup
"""
def lookup(driver, query):
driver.get('https://www.google.ie/imghp')
try:
box = driver.wait.until(EC.presence_of_element_located((By.NAME, "q")))
button = driver.wait.until(EC.element_to_be_clickable((By.NAME, "btnG")))
box.send_keys(query)
button.click()
except TimeoutException:
print('Box or button not found on Google Images')
"""
Uses BeautifulSoup to parse the urls of images
"""
def urls_list(driver, string):
src = driver.page_source
soup = bs4.BeautifulSoup(src, 'html.parser')
raw = soup.select(string)
urls = [json.loads(row.contents[0])['ou'] for row in raw]
if len(urls) == 0:
print("Need to change html_string parameter in google_images_scraper.py")
return urls
"""
Uses the other functions to return a list of urls
"""
def url_extractor(animal):
query = animal + ' flag'
driver = init_driver()
lookup(driver, query)
time.sleep(1)
urls = urls_list(driver, html_string)
driver.quit()
return urls
def scraper(file):
with open(file) as f:
lst = f.read().splitlines()
urls_of_imgs = {}
for i in tqdm(lst):
urls_of_imgs[i] = url_extractor(i)
save_obj(urls_of_imgs, file[:-4] + '_dict')
if __name__ == "__main__":
scraper(argv[1])
| piyushjaingoda/National-Flag-Recognition-using-Machine-Learning-Techniques | web_scraping/google_images_scraper.py | google_images_scraper.py | py | 2,128 | python | en | code | 3 | github-code | 13 |
74879554577 | # coding=utf-8
import logging
from collections import defaultdict
from django.core.management import BaseCommand
from django.db.models import Count
from benchmark.models import ResultAuthor, Result
class Command(BaseCommand):
def handle(self, *args, **options):
try:
return self._process(*args, **options)
except Exception as e:
self.stderr.write('Command failed: ().'.format(e))
logging.exception(e)
def _process(self, *args, **options):
duplicities = defaultdict(set)
for author in ResultAuthor.objects.all():
duplicities[author.login].add(author)
for login, authors in filter(lambda pair: len(pair[1]) > 1, duplicities.items()):
target = ResultAuthor.objects.filter(login=login).earliest('x_created')
Result.objects.filter(author__login=login).update(author=target)
ResultAuthor.objects.filter(result_author__isnull=True).delete()
| thejoeejoee/VUT-FIT-IFJ-2017-server | benchmark/management/commands/merge_authors.py | merge_authors.py | py | 976 | python | en | code | 0 | github-code | 13 |
72259460818 | #! /usr/bin/env python
# -*- coding: utf-8
import time
import cv2
import numpy as np
from math import pi, sin, cos, asin, acos
import csv
from perception.wedge.gelsight.util.Vis3D import ClassVis3D
from perception.wedge.gelsight.gelsight_driver import GelSight
from controller.gripper.gripper_control import Gripper_Controller
from controller.ur5.ur_controller import UR_Controller
from controller.mini_robot_arm.RX150_driver import RX150_Driver
import collections
urc = UR_Controller()
grc = Gripper_Controller()
urc.start()
grc.start()
# pose0 = np.array([-0.51, 0.376, 0.409, -1.416, -1.480, -1.031])
# pose0 = np.array([-0.539, 0.312, 0.29, -1.787, -1.604, -0.691])
pose0 = np.array([-0.505, -0.219, 0.235, -1.129, -1.226, 1.326])
grc.gripper_helper.set_gripper_current_limit(0.6)
rx150 = RX150_Driver(port="/dev/ttyACM0", baudrate=1000000)
rx150.torque(enable=1)
print(rx150.readpos())
def rx_move(g_open):
values = [2048, 2549, 1110, 1400, 3072, g_open]
x = 320
y = 90
end_angle = -30. / 180. * np.pi
rx150.gogo(values, x, y, end_angle, 320, 90, end_angle, 3072, timestamp=30)
# rx_move(2000)
# sensor_id = "W03"
sensor_id = "Fabric0"
IP = "http://rpigelsightfabric.local"
# N M fps x0 y0 dx dy
tracking_setting = (10, 14, 5, 16, 41, 27, 27)
n, m = 150, 200
# Vis3D = ClassVis3D(m, n)
def read_csv(filename=f"config_{sensor_id}.csv"):
rows = []
with open(filename, "r") as csvfile:
csvreader = csv.reader(csvfile)
header = next(csvreader)
for row in csvreader:
rows.append((int(row[1]), int(row[2])))
return rows
corners = tuple(read_csv())
# corners=((252, 137), (429, 135), (197, 374), (500, 380))
gs = GelSight(
IP=IP,
corners=corners,
tracking_setting=tracking_setting,
output_sz=(400, 300),
id="right",
)
gs.start()
def test_combined():
# grc.follow_gripper_pos = 0.8
# grc.follow_gripper_pos = 1
a = 0.15
v = 0.08
urc.movel_wait(pose0, a=a, v=v)
rx_move(1200)
c = input()
rx_move(810)
grc.follow_gripper_pos = 1
time.sleep(0.5)
depth_queue = []
cnt = 0
dt = 0.05
pos_x = 0.5
# dt = 0.5
# dy = 0.002
# dz = 0.004
# th = np.arctan(dy/dz)\
vel = [0.00, 0.008, 0, 0, 0, 0]
while True:
img = gs.stream.image
# get pose image
pose_img = gs.pc.pose_img
# pose_img = gs.pc.frame_large
if pose_img is None:
continue
# depth_current = gs.pc.depth.max()
# depth_queue.append(depth_current)
#
# if len(depth_queue) > 2:
# depth_queue = depth_queue[1:]
#
# if depth_current == np.max(depth_queue):
pose = gs.pc.pose
cv2.imshow("pose", pose_img)
if gs.pc.inContact:
# if cnt % 4 < 2:
# # grc.follow_gripper_pos = 1
# rx_move(810)
# else:
a = 0.02
v = 0.02
kp = .03
# kp_rot = .2
# pos_x = (2*pose[0] + (1 - pose[1])*np.tan(pose[2]))/2
pos_x = (pose[0] + (1 - pose[1])*np.tan(pose[2]))
# pos_x = pose[0]
# e = (pos_x-0.5)*kp
# vel = [0, (pos_x-0.3)*kp, -0.008, 0, 0, 0]
# vel = [0, (pos_x-0.6)*kp, -0.008, kp_rot*gs.pc.pose[2], 0, 0]
# vel = [0, e*np.cos(th) - dy, -e*np.sin(th) - dz, kp_rot*gs.pc.pose[2], 0, 0]
vel = [(pos_x-0.2)*kp, 0.008, -(pos_x-0.2)*kp*.2, 0, 0, 0]
vel = np.array(vel)
# grc.follow_gripper_pos = .885
# grc.follow_gripper_pos = .88
# rx_move(830)
# urc.speedl([(pose[0]-0.2)*kp, 0.008, 0, 0, 0, 0], a=a, t=dt*2)
ur_pose = urc.getl_rt()
if ur_pose[0] < -0.7:
vel[0] = max(vel[0], 0.)
if ur_pose[0] > -0.3:
vel[0] = min(vel[0], 0.)
if ur_pose[2] < .08:
vel[2] = 0.
if ur_pose[1] > .3:
vel[0] = min(vel[0], 0.)
vel[1] = 0.
print("sliding vel ", vel[0], "posx ", pos_x)
vel = np.array(vel)
urc.speedl(vel, a=a, t=dt*2)
time.sleep(dt)
# # get tracking image
# tracking_img = gs.tc.tracking_img
# if tracking_img is None:
# continue
# slip_index_realtime = gs.tc.slip_index_realtime
# print("slip_index_realtime", slip_index_realtime)
# cv2.imshow("marker", tracking_img[:, ::-1])
# cv2.imshow("diff", gs.tc.diff_raw[:, ::-1] / 255)
# if urc.getl_rt()[0] < -.45:
# break
# cnt += 1
c = cv2.waitKey(1) & 0xFF
if c == ord("q"):
break
if __name__ == "__main__":
try:
test_combined()
finally:
del gs
| nehasunil/deformable_following | following.py | following.py | py | 4,892 | python | en | code | 0 | github-code | 13 |
20809119299 | import pytest
import redis
import time
from crypto_tulips.dal.services.contract_service import ContractService, ContractFilter
from crypto_tulips.dal.objects.contract import Contract
now = int(time.time())
c1 = Contract('tcs_hash1', 'tcs_sig1', 'tc_matt', 100, 0.5, 1, 1000, now, now + 900)
c2 = Contract('tcs_hash2', 'tcs_sig2', 'tc_matt', 50, 0.3, 1, 10000, (now + 1000), now + 9000)
c3 = Contract('tcs_hash3', 'tcs_sig3', 'tc_denys', 1239, 0.7, 1, 100040, (now + 2000), now + 90040)
c4 = Contract('tcs_hash4', 'tcs_sig4', 'tc_naween', 10430, 0.1, 1, 13040, (now + 3000), now + 12040)
c5 = Contract('tcs_hash5', 'tcs_sig5', 'tc_william', 54, 0.4, 1, 15404, (now + 4000), now + 14404)
@pytest.mark.first
def setup():
ContractService.store_contract(c1)
ContractService.store_contract(c2)
ContractService.store_contract(c3)
ContractService.store_contract(c4)
ContractService.store_contract(c5)
def test_get_contract_by_hash():
contract = ContractService.get_contract_by_hash(c1._hash)
assert c1.get_sendable() == contract.get_sendable()
# FILTER TESTS
def test_get_contracts_by_amount_range():
cf = ContractFilter(ContractFilter.AMOUNT, 100, 1000)
contracts = ContractService.get_contracts_by_filter([cf], True)
assert len(contracts) == 1
assert contracts[0].get_sendable() == c1.get_sendable()
cf.maximum = 10000
contracts = ContractService.get_contracts_by_filter([cf], True)
assert len(contracts) == 2
def test_get_contracts_by_rate_range():
cf = ContractFilter(ContractFilter.RATE, 0.5, 0.7)
contracts = ContractService.get_contracts_by_filter([cf], True)
assert len(contracts) == 2
def test_get_contracts_by_created_range():
cf = ContractFilter(ContractFilter.CREATED, now, now + 3000)
contracts = ContractService.get_contracts_by_filter([cf], True)
assert len(contracts) == 4
def test_get_contracts_by_duration_range():
cf = ContractFilter(ContractFilter.DURATION, 1000, 13040)
contracts = ContractService.get_contracts_by_filter([cf], True)
assert len(contracts) == 3
def test_get_contracts_by_multiple():
cf1 = ContractFilter(ContractFilter.AMOUNT, 50, 1000) # c2, c5, c1
contracts = ContractService.get_contracts_by_filter([cf1], True)
assert len(contracts) == 3
cf2 = ContractFilter(ContractFilter.RATE, 0.4, 0.5) # c1, c5
contracts = ContractService.get_contracts_by_filter([cf1, cf2], True)
assert len(contracts) == 2
cf3 = ContractFilter(ContractFilter.DURATION, 15404, 15404) # c5
contracts = ContractService.get_contracts_by_filter([cf1, cf2, cf3], True)
assert len(contracts) == 1
contracts = ContractService.get_contracts_by_filter([cf1, cf2, cf3], False) # None
assert len(contracts) == 0
# END OF FILTER TESTS
def test_get_all_contracts_by_owner():
contracts = ContractService.get_all_contracts_by_owner('tc_matt')
assert len(contracts) == 2
for contract in contracts:
assert contract.owner == 'tc_matt'
contracts = ContractService.get_all_contracts_by_owner('tc_denys')
assert len(contracts) == 1
for contract in contracts:
assert contract.owner == 'tc_denys'
contracts = ContractService.get_all_contracts_by_owner('bad_key')
assert len(contracts) == 0
@pytest.mark.last
def end_closing():
r = redis.Redis()
r.flushdb() | StevenJohnston/py-crypto-tulips | crypto_tulips/dal/services/tests/test_contract_service.py | test_contract_service.py | py | 3,349 | python | en | code | 1 | github-code | 13 |
40840615783 | # -*- coding: utf-8 -*-
#
# This software is licensed under
# CeCILL FREE SOFTWARE LICENSE AGREEMENT
# This software comes in hope that it will be useful but
# without any warranty to the extent permitted by applicable law.
# (C) M. Couprie <coupriem@esiee.fr>, 2011
# Université Paris-Est, Laboratoire d'Informatique Gaspard-Monge, Equipe A3SI, ESIEE Paris, 93162, Noisy le Grand CEDEX
# arrays, histograms
from pink import python_component_missing # this is an exception class
from pink import imview as imview
from pink import cpp as pink
import pink.windowing as wd
try:
import numpy as np
except:
print("error: could not import numpy, try to install python-numpy")
raise python_component_missing
try:
import matplotlib.pyplot as plt
except:
print("error: could not import matplotlib, try to install python-numpy and python-matplotlib")
raise python_component_missing
## Uncomment for debugging
wd.options.debug=True
# conversion : from Pink image to numpy array
def image_2_array(img):
N = img.size.prod()
a = np.empty(N)
for i in range(N): a[i] = img[i]
a.resize(img.size)
return a
# conversion : from numpy array to Pink image
def array_2_image(arr):
S = list(arr.shape)
N = arr.size
img = pink.char_image(S)
arr.resize(N)
for i in range(N):
img[i] = int(arr[i])
return img
# histogram
def pink_histogram(img,mask=None):
if img.imtype() != 'uint8_t':
print("histogram: error")
raise BaseException
h = np.zeros(256)
N = img.size.prod()
if mask == None:
for i in range(N): h[img[i]] = h[img[i]] + 1
else:
for i in range(N):
if mask[i] != 0: h[img[i]] = h[img[i]] + 1
return h
# show histogram
def show_histo(img):
print("ici01")
h = pink_histogram(img)
print("ici02")
x = np.array(range(256))
y = np.zeros(256)
fig = plt.figure()
ax1 = fig.add_subplot(111) # 1 row, 1 column, first (upper) plot
ax1.vlines(x, y, h, lw=2)
ax1.grid(True)
ax1.axhline(0, color='black', lw=1)
plt.show()
return
im = pink.readimage("../images/uo.pgm")
imview(im)
ar = image_2_array(im)
im2 = array_2_image(ar)
imview(im2)
if im==im2:
print("Test succeded; the two files are equal")
show_histo(im)
# LuM end of file
| technolapin/sable | pink/tutorial/python/MC-TP6/solution/test_arrays.py | test_arrays.py | py | 2,336 | python | en | code | 2 | github-code | 13 |
37159835074 | from customtkinter import *
import tkinter
from tkinter import messagebox
import sqlite3
from PIL import Image,ImageTk
import time
loginP = CTk()
loginP.title("patient registration page")
loginP.resizable(0, 0)
loginP.state('zoomed')
loginP.iconbitmap("istock.ico")
set_default_color_theme('green')
#####creating databaseTable for patient records
try:
conn = sqlite3.connect('admins2.db') #Creates table
c = conn.cursor() #Query lai excute garxa
c.execute("""CREATE TABLE patient_records(
first_name text,
last_name text,
patient_id integer,
address text,
contact integer
)""")
conn.commit()
conn.close()
except:
pass
def save():
conn = sqlite3.connect('admins2.db')
c = conn.cursor()
c.execute("INSERT INTO patient_records VALUES (:f_name, :l_name,:address, :contact,:patient_id)",{'f_name':f_name_box.get(),'l_name':l_name_box.get(),'address':address_box.get(),'contact':cont_box.get(),'patient_id':patient_id_box.get()})
messagebox.showinfo("Patient Details","Saved Sucessfully")
conn.commit()
conn.close()
f_name_box.delete(0, END)
l_name_box.delete(0, END)
patient_id_box.delete(0, END)
address_box.delete(0, END)
dob_box.delete(0, END)
cont_box.delete(0,END)
sex_combobox.set("")
########to display from database
conn = sqlite3.connect('admins2.db')
c = conn.cursor()
c.execute("SELECT *,oid FROM patient_records")
rec = c.fetchall()
# print(records)
i = len(rec) - 1
name_label = CTkLabel(frame3,text=str(rec[i][0])+' '+str(rec[i][1]), font=("century gothic bold",15),text_color='royalblue')
name_label.place(x=270,y=90)
address_label = CTkLabel(frame3,text = str(rec[i][2])+'\n'+str(rec[i][3])+'\n'+"Record ID. : "+str(rec[i][5]) , font=("century gothic",12),text_color='royalblue')
address_label.place(x=270,y=112)
conn.commit()
conn.close()
######### Creating database for medicine records ########
try:
conn = sqlite3.connect('admins2.db') #Creates table
c = conn.cursor() #Query lai excute garxa
c.execute("""CREATE TABLE medicine_records(
medicine_name text,
quantity integer,
rate integer,
total integer)""")
conn.commit()
conn.close()
except:
pass
def add():
conn = sqlite3.connect('admins2.db')
c = conn.cursor()
c.execute("INSERT INTO medicine_records VALUES (:med_name, :quant, :rate,:total)",{'med_name':med_name_box.get(),'quant':quantity_box.get(),'rate':rate_name_box.get(),'total':int(quantity_box.get())*int(rate_name_box.get())})
# messagebox.showinfo("Medicine Details","Added Sucessfully")
conn.commit()
conn.close()
med_type_box.set('')
rate_name_box.delete(0,END)
quantity_box.delete(0, END)
med_name_box.delete(0, END)
exp_date_box.delete(0, END)
issued_date_box.delete(0, END)
dose_box.delete(0, END)
def tbl():
table = CTkFrame(loginP, height=580, width=1200, bg_color='white')
table.place(x=815, y=280)
try:
# Try fetching datas from database
conn = sqlite3.connect('admins2.db')
c = conn.cursor()
c.execute("SELECT oid, medicine_name, quantity, rate,total from medicine_records")
lst = c.fetchall()
conn.commit()
conn.close()
except:
# Empty list if list doesn't Exist
lst = []
finally:
# Table headings
lst.insert(0, ('ID', 'Medicine Name', 'Quantity', 'Rate', 'Total'))
# creating a Table
total_rows = len(lst)
total_columns = len(lst[0])
for i in range(total_rows):
if i == 0:
# table headings
fontt = ('Century Gothic', 15, 'bold')
jus = CENTER
bgc = 'white'
else:
# table datas
fontt = ('Century Gothic', 13)
jus = LEFT
bgc = 'yellow'
for j in range(total_columns):
# width for all columns
if j == 0:
wid = 40
elif j == 1:
wid = 150
elif j == 2:
wid = 100
elif j == 3:
wid = 70
elif j == 4:
wid = 80
else:
wid = 8
e = CTkEntry(table, width=wid, font=fontt, justify=jus)
e.grid(row=i, column=j)
e.insert(0, lst[i][j])
e.configure(state=DISABLED)
####calling table function
tbl()
login_head = CTkLabel(loginP, text="Patient Registration Form", text_color="black", font=("times", 34),bg_color="cyan4",width=50, height=50)
login_head.pack(fill="both")
my_image2 = Image.open("health2.png")
resized_image = my_image2.resize((2600, 1420))
converted_image = ImageTk.PhotoImage(resized_image)
mylable3 = tkinter.Label(loginP, image=converted_image, width=2700, height=1390)
mylable3.pack()
frame2 = CTkFrame(master=loginP ,width=760, height=200, corner_radius=11, border_width=3, border_color='cyan4',bg_color='paleturquoise')
frame2.place(relx=0.31, rely=0.25, anchor=tkinter.CENTER)
name = CTkLabel(frame2, text=" Patient Name :", font=("century gothic", 15))
name.place(x=5, y=20)
f_name_box = CTkEntry(frame2, border_width=2, placeholder_text='First Name', width=100, height=21)
f_name_box.place(x=145, y=25)
l_name_box = CTkEntry(frame2, border_width=2, placeholder_text="Last Name", width=100, height=21)
l_name_box.place(x=255, y=25)
patient_id = CTkLabel(frame2,text="Patient ID :",font=("century gothic", 15))
patient_id.place(x=408,y=20)
patient_id_box = CTkEntry(frame2,border_width=2,placeholder_text='Enter ID of patient',width=213,height=21)
patient_id_box.place(x=522,y=23)
address = CTkLabel(frame2,text=" Address : ",font=("century gothic", 15))
address.place(x=5,y=60)
address_box = CTkEntry(frame2,border_width=2,placeholder_text="Enter the address",width=210,height=21)
address_box.place(x=145,y=65)
dob = CTkLabel(frame2, text="Date of Birth :", font=("century gothic", 15))
dob.place(x=408, y=60)
dob_box = CTkEntry(frame2, border_width=2, placeholder_text='Enter the date of birth', width=213, height=21)
dob_box.place(x=522, y=65)
sex_label = CTkLabel(frame2, text=" Sex :", font=("century gothic", 15))
sex_label.place(x=5, y=140)
def optionmenu_callback(choice):
sex_box = choice
sex_combobox = CTkOptionMenu(frame2,values=["Male", 'Female'],command=optionmenu_callback,bg_color='transparent',height=21,width=213)
sex_combobox.place(x=145,y=145)
sex_combobox.set("") # set initial value
cont = CTkLabel(frame2, text=" Contact No. :", font=("century gothic", 15))
cont.place(x=5, y=100)
cont_box = CTkEntry(frame2, border_width=2, placeholder_text='Enter the Contact No.', width=213, height=21)
cont_box.place(x=145, y=105)
save_btn = CTkButton(frame2,text='Save',fg_color='teal',font=('century gothic bold',20),text_color='black',height=35,width=325,command=save)
save_btn.place(x=410,y=140)
frame4 = CTkFrame(master=loginP, width=760, height=250, corner_radius=11, border_width=3, border_color='cyan4',bg_color='paleturquoise')
frame4.place(relx=0.31, rely=0.6, anchor=tkinter.CENTER)
med_type = CTkLabel(frame4, text=" Medicine Type :", font=("century gothic", 15))
med_type.place(x=5, y=20)
def optionmenu_callback(choice2):
med_type_box = choice2
med_type_box = CTkOptionMenu(frame4,values=["Liquid", 'Tablets','Semi-Solid'],command=optionmenu_callback,height=21,width=213)
med_type_box.place(x=145,y=25)
med_type_box.set("")
med_name = CTkLabel(frame4, text=" Medicine Name :", font=("century gothic", 15))
med_name.place(x=5, y=60)
med_name_box = CTkEntry(frame4, border_width=2, placeholder_text='Enter the name of Medicine', width=213, height=21)
med_name_box.place(x=145, y=65)
rate_name = CTkLabel(frame4, text=" Rate :", font=("century gothic", 15))
rate_name.place(x=5, y=100)
rate_name_box = CTkEntry(frame4, border_width=2, placeholder_text='Enter the Rate', width=213, height=21)
rate_name_box.place(x=145, y=105)
quantity = CTkLabel(frame4, text=" Quantity :", font=("century gothic", 15))
quantity.place(x=5, y=140)
quantity_box = CTkEntry(frame4, border_width=2, placeholder_text='Enter the Quantity', width=213, height=21)
quantity_box.place(x=145, y=145)
dose = CTkLabel(frame4, text=" Dosage :", font=("century gothic", 15))
dose.place(x=5, y=180)
dose_box = CTkEntry(frame4, border_width=2, placeholder_text='Enter the Dosage', width=213, height=21)
dose_box.place(x=145, y=185)
issued_date = CTkLabel(frame4, text=" Issued Date :", font=("century gothic", 15))
issued_date.place(x=408, y=20)
issued_date_box = CTkEntry(frame4, border_width=2, placeholder_text='Issued Date', width=213, height=21)
issued_date_box.place(x=522, y=25)
exp_date = CTkLabel(frame4, text=" Expiry Date :", font=("century gothic", 15))
exp_date.place(x=405, y=60)
exp_date_box = CTkEntry(frame4, border_width=2, placeholder_text='Expiry Date', width=213, height=21)
exp_date_box.place(x=522, y=65)
add_btn = CTkButton(frame4,text='Add',fg_color='teal',font=('century gothic bold',20),text_color='black',height=35,width=325,command=add)
add_btn.place(x=410,y=175)
##########FRAME3########
frame3 = CTkFrame(master=loginP, width=460, height=500, corner_radius=11, border_width=3, border_color='cyan4',bg_color='paleturquoise')
frame3.place(relx=0.81, rely=0.44, anchor=tkinter.CENTER)
pharmacy_head1 = CTkLabel(master=frame3, text="Pharmacy", text_color="royalblue", font=("century gothic bold", 13))
pharmacy_head1.place(x=10,y=60)
pharmacy_head2 = CTkLabel(master=frame3, text="Name and Details", text_color="royalblue", font=("century gothic bold", 13))
pharmacy_head2.place(x=270,y=60)
date_head3 = CTkLabel(master=frame3, text="Date & Time", text_color="royalblue", font=("century gothic bold", 13))
date_head3.place(x=10,y=5)
date_head4 = CTkLabel(frame3,text=(time.asctime()),text_color=('royalblue'),font=("century gothic",11))
date_head4.place(x=10,y=25)
pharmacy_name = CTkLabel(master=frame3, text="My Local Pharmacy", text_color="royalblue", font=("century gothic bold", 15))
pharmacy_name.place(x=15,y=90)
pharmacy_details = CTkLabel(master=frame3, text=" 345 Main Street,\n Maitidevi,\n Kathmandu", text_color="royalblue", font=("century gothic",12))
pharmacy_details.place(x=15,y=112)
def delete():
conn = sqlite3.connect('admins2.db')
c = conn.cursor()
c.execute("DELETE from medicine_records WHERE oid = " + empty_id_box.get())
messagebox.showinfo("DELETE RECORDS","Deleted Sucessfully")
conn.commit()
conn.close()
empty_id_box.delete(0,END)
def closetab():
conn = sqlite3.connect('admins2.db')
c = conn.cursor()
c.execute("DELETE FROM medicine_records")
conn.commit()
conn.close()
loginP.destroy()
def update():
record_id = empty_id_box.get()
conn = sqlite3.connect('admins2.db')
c = conn.cursor()
c.execute("""UPDATE medicine_records SET
medicine_name = :medi,
quantity= :quant,
rate = :rat,
total = :tot
WHERE oid = :oid""",
{
'medi':med_name_editor.get(),
'quant': quantity_editor.get(),
'rat':rate_editor.get(),
'tot': int(quantity_editor.get())*int(rate_editor.get()),
'oid':record_id
}
)
conn.commit()
conn.close()
editor.destroy()
empty_id_box.delete(0, END)
def edit():
global editor
editor = CTk()
editor.title('Update Data')
editor.geometry('300x400')
conn = sqlite3.connect('admins2.db')
c = conn.cursor()
record_id = empty_id_box.get()
c.execute("SELECT * FROM medicine_records WHERE oid="+record_id)
records = c.fetchall()
global med_name_editor
global quantity_editor
global rate_editor
# global total_editor
med_name_label = CTkLabel(editor, text="Medicine Name :", font=("century gothic", 15))
med_name_label.grid(row=0, column=0, pady=(10, 0))
quantity_label = CTkLabel(editor, text="Quantity :", font=("century gothic", 15))
quantity_label.grid(row=1, column=0)
rate_label = CTkLabel(editor,text="Rate :", font=("century gothi", 15))
rate_label.grid(row=2, column=0)
total_label = CTkLabel(editor, text="Total :", font=("century gothic", 15))
total_label.grid(row=3, column=0)
med_name_editor = CTkEntry(editor, width=100)
med_name_editor.grid(row=0,column=1,padx=20,pady=(10,0))
quantity_editor= CTkEntry(editor, width=100)
quantity_editor.grid(row=1,column=1)
rate_editor = CTkEntry(editor, width=100)
rate_editor.grid(row=2, column=1)
total_editor = CTkEntry(editor, width=100)
total_editor.grid(row=3, column=1)
for record in records:
med_name_editor.insert(0, record[0])
quantity_editor.insert(0, record[1])
rate_editor.insert(0, record[2])
total_editor.insert(0, record[3])
edit_btn = CTkButton(editor, text='UPDATE', command=update,height=40, font=("century gothic bold", 17),text_color='black')
edit_btn.grid(row=6, column=0, columnspan=2, pady=10, padx=10, ipadx=125)
conn.commit()
conn.close()
editor.mainloop()
##########FRAME5 ########
frame5 = CTkFrame(master=loginP, width=1250, height=100, corner_radius=11, border_width=3, border_color='cyan4',bg_color='paleturquoise')
frame5.place(relx=0.5, rely=0.87, anchor=tkinter.CENTER)
delete_btn = CTkButton(frame5,text='DELETE',fg_color='teal',font=('century goyhic',20),text_color='black',height=50,width=315,command=delete)
delete_btn.place(x=25,y=26)
empty_id_box = CTkEntry(frame5,border_width=3,border_color='teal',placeholder_text=' Enter ID',width=90,height=50)
empty_id_box.place(x=345,y=26)
update_btn = CTkButton(frame5,text='UPDATE',fg_color='teal',font=('century goyhic',20),text_color='black',height=50,width=310,command=edit)
update_btn.place(x=440,y=26)
def query():
P = CTk()
P.title("Medicine Records")
P.geometry('470x400')
set_default_color_theme('green')
med_name_label = CTkLabel(P, text="Medicine Name", font=("century gothic bold", 15), text_color='royalblue')
med_name_label.place(x=20, y=4)
quantity_label = CTkLabel(P, text="Quantity", font=("century gothic bold", 15), text_color='royalblue')
quantity_label.place(x=155, y=4)
rate_label = CTkLabel(P, text="Rate", font=("century gothic bold", 15), text_color='royalblue')
rate_label.place(x=240, y=4)
total_label = CTkLabel(P, text="Total", font=("century gothic bold", 15), text_color='royalblue')
total_label.place(x=305, y=4)
id_label = CTkLabel(P, text="ID", font=("century gothic bold", 15), text_color='royalblue')
id_label.place(x=380, y=4)
conn = sqlite3.connect('admins2.db')
c = conn.cursor()
c.execute("SELECT *,oid FROM medicine_records")
records = c.fetchall()
print_record=''
for record in records:
print_record += str(record[0])+'\t\t'+str(record[1])+ '\t' +str(record[2])+ ' \t' +str(record[3])+ ' \t' +str(record[4])+"\n\n"
query_label = CTkLabel(P,text=print_record,font=("century gothic",15))
query_label.place(x=40,y=40)
# query_label.grid(row=14,column=0,columnspan=2)
conn.commit()
conn.close()
P.mainloop()
show_all_btn = CTkButton(frame5,text='SHOW ALL',fg_color='teal',hover_color="royalblue",font=('century goyhic',20),text_color='black',height=50,width=200,command=query)
show_all_btn.place(x=805,y=26)
closetab_btn = CTkButton(frame5,text='CLOSE',fg_color='teal',hover_color="firebrick",font=('century goyhic',20),text_color='black',height=50,width=200,command=closetab)
closetab_btn.place(x=1030,y=26)
switch_var = StringVar(value="light")
def switch_event():
set_appearance_mode(switch_var.get())
dark_switch = CTkSwitch(loginP, text="Dark mode",font=("Century Gothic",15),button_color='lightgrey',progress_color='dimgrey', command=switch_event,variable=switch_var, onvalue="dark", offvalue="light")
dark_switch.pack(anchor='s',side='left',padx=15)
tail1 = CTkLabel(loginP, text=(time.asctime()),text_color=('black',"white"),font=("century gothic",11))
tail1.pack(side=RIGHT,anchor='n',padx=10)
loginP.mainloop()
| NirajanMahato/Pharmacy-Management-System-Gen-IV- | phar_login.py | phar_login.py | py | 16,366 | python | en | code | 0 | github-code | 13 |
15071349300 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Mapping fastq to reference genome
1. rRNA, spikein, optional
2. genome
"""
import os
import sys
import re
import io
import glob
import json
import tempfile
import shlex
import subprocess
import logging
import pandas as pd
import pysam
import pybedtools
from utils_parser import *
from helper import *
class Alignment(object):
"""Run alignment using bowtie/bowtie2/STAR, SE reads"""
def __init__(self, fqs, path_out, smp_name, genome, spikein=None,
multi_cores=1, unique_only=False, aligner='bowtie',
align_to_rRNA=False, path_data=None, overwrite=False):
self.fqs = fqs
self.path_out = path_out
self.smp_name = smp_name
self.genome = genome
self.spikein = spikein
self.multi_cores = multi_cores
self.unique_only = unique_only
self.aligner = aligner
self.align_to_rRNA = align_to_rRNA
self.path_data = path_data
self.overwrite = overwrite
## wrapper parameters
self.args = self.get_args()
def get_args(self):
"""Parse parameters"""
args = {'fqs' : self.fqs,
'path_out' : self.path_out,
'smp_name' : self.smp_name,
'genome' : self.genome,
'spikein' : self.spikein,
'multi_cores' : self.multi_cores,
'unique_only' : self.unique_only,
'aligner' : self.aligner,
'align_to_rRNA' : self.align_to_rRNA,
'path_data' : self.path_data,
'overwrite' : self.overwrite
}
assert isinstance(args['fqs'], list)
assert is_path(args['path_out'])
assert isinstance(args['smp_name'], str)
assert isinstance(args['genome'], str)
assert isinstance(args['multi_cores'], int)
assert isinstance(args['unique_only'], bool)
assert isinstance(args['aligner'], str)
assert isinstance(args['align_to_rRNA'], bool)
assert isinstance(args['overwrite'], bool)
return args
def get_align_index(self):
"""Get genome index"""
args = self.args
# spikein index
if args['spikein'] == args['genome'] or args['spikein'] is None:
idxes = []
else:
sp_idx = idx_picker(args['spikein'], path_data=args['path_data'],
aligner=args['aligner'])
idxes = [sp_idx, ]
# genome index
if args['align_to_rRNA'] is True:
sg_idx = idx_grouper(args['genome'], path_data=args['path_data'],
aligner=args['aligner'])
idxes.extend(sg_idx) # add genome list
else:
sg_idx = idx_picker(args['genome'], path_data=args['path_data'],
aligner=args['aligner'])
idxes.append(sg_idx) # add genome item
# remove possible duplicates, zero-records
idxes = list(filter(None.__ne__, idxes)) # remove None
idxes = list(dict.fromkeys(idxes)) # keep orders
if len(idxes) == 0:
raise ValueError('genome index not exists: %s' % args['path_data'])
return idxes
def init_dir(self, fq, idx):
"""Prepare directory, file name for each fastq and index"""
assert os.path.exists(fq)
assert isinstance(idx, str)
args = self.args
fq_prefix = file_prefix(fq)[0] #
fq_prefix = re.sub('\.clean|\.nodup|\.cut', '', fq_prefix)
fq_type = seq_type(fq)
idx_name = os.path.basename(idx)
fq_path = os.path.join(args['path_out'], fq_prefix)
assert is_path(fq_path) # create sub-directory
map_suffix = os.path.join(fq_path, '%s.map_%s' % (fq_prefix, idx_name))
unmap_suffix = os.path.join(fq_path, '%s.not_%s' % (fq_prefix, idx_name))
map_bam = map_suffix + '.bam'
map_bed = map_suffix + '.bed'
map_log = map_suffix + '.%s.log' % args['aligner']
unmap_fq = unmap_suffix + '.%s' % fq_type
return [fq_prefix, map_bam, map_bed, map_log, unmap_fq]
def bam_index(self, bam):
"""Run index"""
assert isinstance(bam, str)
assert os.path.exists(bam)
bai = bam + '.bai'
if self.overwrite is False and os.path.exists(bai):
pass
else:
pysam.index(bam)
return bai
def bam_to_bed(self, bam):
"""Convert bam to bed"""
assert isinstance(bam, str)
assert os.path.exists(bam)
bed = os.path.splitext(bam)[0] + '.bed'
if self.overwrite is False and os.path.exists(bed):
pass
else:
pybedtools.BedTool(bam).bam_to_bed().saveas(bed)
return bed
def wrap_log(self, log):
"""Wrapper alignment log file, save as json"""
assert isinstance(log, str)
assert os.path.getsize(log) > 0
args = self.args
j_file = Alignment_log(log, args['unique_only']).saveas() # save as json
def bowtie_se(self, fq, idx):
"""Run bowtie, default kwargs"""
assert os.path.exists(fq)
assert isinstance(idx, str)
args = self.args
prefix, map_bam, map_bed, map_log, unmap_fq = self.init_dir(fq, idx)
para_fq = '-f' if seq_type(fq) == 'fasta' else '-q'
para_bowtie = '-v 2 -m 1' if args['unique_only'] is True else '-v 2 -k 1'
if os.path.exists(map_bam) and args['overwrite'] is False:
logging.info('file exists, alignment skipped: %s' % map_bam)
else:
c1 = 'bowtie %s %s -p %s --mm --best --sam --no-unal --un %s %s \
%s' % (para_fq, para_bowtie, args['multi_cores'], unmap_fq,
idx, fq)
c2 = 'samtools view -bhS -F 0x4 -@ %s -' % args['multi_cores']
c3 = 'samtools sort -@ %s -o %s -' % (args['multi_cores'], map_bam)
with open(map_log, 'wt') as ff:
p1 = subprocess.Popen(shlex.split(c1), stdout=subprocess.PIPE,
stderr=ff)
p2 = subprocess.Popen(shlex.split(c2), stdin=p1.stdout,
stdout=subprocess.PIPE)
p3 = subprocess.Popen(shlex.split(c3), stdin=p2.stdout)
px = p3.communicate()
# process log file
self.wrap_log(map_log)
return [map_bam, unmap_fq]
def bowtie2_se(self, fq, idx):
"""Run bowtie2, default kwargs"""
assert os.path.exists(fq)
assert isinstance(idx, str)
args = self.args
prefix, map_bam, map_bed, map_log, unmap_fq = self.init_dir(fq, idx)
para_fq = '-f' if seq_type(fq) == 'fasta' else '-q'
para_bowtie2 = '-q 10' if args['unique_only'] is True else ''
if os.path.exists(map_bam) and args['overwrite'] is False:
logging.info('file exists, alignemnt skipped: %s' % map_bam)
else:
c1 = 'bowtie2 %s -p %s --mm --no-unal --un %s \
-x %s %s' % (para_fq, args['multi_cores'], unmap_fq, idx, fq)
c2 = 'samtools view -bhS -F 0x4 -@ %s %s -' % (args['multi_cores'],
para_bowtie2)
c3 = 'samtools sort -@ %s -o %s -' % (args['multi_cores'], map_bam)
with open(map_log, 'wt') as ff:
p1 = subprocess.Popen(shlex.split(c1), stdout=subprocess.PIPE,
stderr=ff)
p2 = subprocess.Popen(shlex.split(c2), stdin=p1.stdout,
stdout=subprocess.PIPE)
p3 = subprocess.Popen(shlex.split(c3), stdin=p2.stdout)
px = p3.communicate()
# process log file
self.wrap_log(map_log)
return [map_bam, unmap_fq]
def star_se(self, fq, idx):
"""Run STAR, default kwargs"""
assert os.path.exists(fq)
assert isinstance(idx, str)
args = self.args
prefix, map_bam, map_bed, map_log, unmap_fq = self.init_dir(fq, idx)
para_star = '--outFilterMismatchNoverLmax 0.05 --seedSearchStartLmax 20'
if args['unique_only'] is True:
para_star = '--outFilterMismatchNoverLmax 0.07 --outFilterMultimapNmax 1'
freader = 'zcat' if is_gz(fq) else '-'
map_prefix = os.path.splitext(map_bam)[0]
if os.path.exists(map_bam) and args['overwrite'] is False:
logging.info('file exists, alignemnt skipped: %s' % map_bam)
else:
c1 = 'STAR --runMode alignReads \
--genomeDir %s \
--readFilesIn %s \
--readFilesCommand %s \
--outFileNamePrefix %s \
--runThreadN %s \
--limitOutSAMoneReadBytes 1000000 \
--genomeLoad LoadAndRemove \
--limitBAMsortRAM 10000000000 \
--outSAMtype BAM SortedByCoordinate \
--outReadsUnmapped Fastx %s' % (idx, fq, freader, map_prefix,
args['multi_cores'], para_star)
p1 = subprocess.run(shlex.split(c1))
# rename exists file
os.rename(map_prefix + 'Aligned.sortedByCoord.out.bam', map_bam)
os.rename(map_prefix + 'Unmapped.out.mate1', unmap_fq)
os.rename(map_prefix + 'Log.final.out', map_log)
# process log file
self.wrap_log(map_log)
return [map_bam, unmap_fq]
def align_se_batch(self, fq, idxes):
"""Run alignment in batch mode, for multiple genome indexes
one fastq file to mulitple indexes
"""
assert os.path.exists(fq)
assert isinstance(idxes, list)
args = self.args
# choose aligner
if args['aligner'].lower() == 'star':
align_se = self.star_se
elif args['aligner'].lower() == 'bowtie2':
align_se = self.bowtie2_se
elif args['aligner'].lower() == 'bowtie':
align_se = self.bowtie_se
else:
raise ValueError('unknown aligner: %s' % args['aligner'])
# iterate indexes
bam_files = []
fq_input = fq
for idx in idxes:
bam_map, fq_unmap = align_se(fq_input, idx)
fq_input = fq_unmap
bam_files.append(bam_map)
# output
return bam_files
def run(self):
"""Run alignments"""
args = self.args
fqs = args['fqs']
idxes = self.get_align_index()
out_bam_files = []
for fq in fqs:
logging.info('mapping: %s ' % fq)
fq_prefix = file_prefix(fq)[0]
fq_prefix = re.sub('\.clean|\.nodup|\.cut', '', fq_prefix)
bam_files = self.align_se_batch(fq, idxes)
out_bam_files.append(bam_files) # bam files
# rep path
bam_path_out = os.path.dirname(bam_files[0])
Alignment_stat(bam_path_out).saveas()
# merge bam files
merged_path_out = os.path.join(args['path_out'], args['smp_name'])
merged_bam_files = [] # map to multiple indexes
if len(out_bam_files) > 1: #
assert is_path(merged_path_out)
for i in range(len(out_bam_files[0])): #
se_bam_files = [b[i] for b in out_bam_files]
merged_suffix = str_common(se_bam_files, suffix=True)
merged_suffix = re.sub('^_[12]|_R[12]', '', merged_suffix)
merged_bam_name = args['smp_name'] + merged_suffix
merged_bam_file = os.path.join(merged_path_out, merged_bam_name)
merged_bed_file = re.sub('.bam$', '.bed', merged_bam_file)
if os.path.exists(merged_bam_file) and args['overwrite'] is False:
logging.info('file exists: %s' % merged_bam_name)
else:
tmp = bam_merge(se_bam_files, merged_bam_file)
pybedtools.BedTool(merged_bam_file).bam_to_bed().saveas(merged_bed_file)
merged_bam_files.append(merged_bam_file)
# merge_map_wrapper(path_out_merge)
Alignment_stat(merged_path_out).saveas()
out_bam_files.append(merged_bam_files)
# create short names for each genome bam
genome_bam_files = [f[-1] for f in out_bam_files] # last one
genome_bed_files = []
for bam_from in genome_bam_files:
bam_to = os.path.join(os.path.dirname(bam_from),
filename_shorter(bam_from))
if not os.path.exists(bam_to):
os.symlink(os.path.basename(bam_from), bam_to)
if not os.path.exists(bam_to + '.bai'):
if not os.path.exists(bam_from + '.bai'):
pysam.index(bam_from)
os.symlink(os.path.basename(bam_from) + '.bai',
bam_to + '.bai')
# rename *.bed
bed_from = os.path.splitext(bam_from)[0] + '.bed'
bed_to = os.path.splitext(bam_to)[0] + '.bed'
if os.path.exists(bed_from) and not os.path.exists(bed_to):
os.symlink(os.path.basename(bed_from), bed_to)
genome_bed_files.append(bed_to)
return genome_bam_files
class Alignment_log(object):
"""Wrapper log file of aligner, bowtie, bowtie2, STAR
report: total reads, unique mapped reads, multiple mapped reads
Bowtie2:
10000 reads; of these:
10000 (100.00%) were unpaired; of these:
166 (1.66%) aligned 0 times
2815 (28.15%) aligned exactly 1 time
7019 (70.19%) aligned >1 times
98.34% overall alignment rate
Bowtie:
# reads processed: 10000
# reads with at least one reported alignment: 3332 (33.32%)
# reads that failed to align: 457 (4.57%)
# reads with alignments suppressed due to -m: 6211 (62.11%)
or:
# reads processed: 10000
# reads with at least one reported alignment: 9543 (95.43%)
# reads that failed to align: 457 (4.57%)
STAR:
*final.Log.out
Started job on | Sep 12 11:08:57
Started mapping on | Sep 12 11:11:27
Finished on | Sep 12 11:11:29
Mapping speed, Million of reads per hour | 18.00
Number of input reads | 10000
Average input read length | 73
UNIQUE READS:
Uniquely mapped reads number | 47
Uniquely mapped reads % | 0.47%
Average mapped length | 51.66
Number of splices: Total | 5
Number of splices: Annotated (sjdb) | 0
Number of splices: GT/AG | 3
Number of splices: GC/AG | 0
Number of splices: AT/AC | 0
Number of splices: Non-canonical | 2
Mismatch rate per base, % | 2.14%
Deletion rate per base | 0.04%
Deletion average length | 1.00
Insertion rate per base | 0.00%
Insertion average length | 0.00
MULTI-MAPPING READS:
Number of reads mapped to multiple loci | 83
% of reads mapped to multiple loci | 0.83%
Number of reads mapped to too many loci | 19
% of reads mapped to too many loci | 0.19%
UNMAPPED READS:
% of reads unmapped: too many mismatches | 0.02%
% of reads unmapped: too short | 98.31%
% of reads unmapped: other | 0.18%
CHIMERIC READS:
Number of chimeric reads | 0
% of chimeric reads | 0.00%
"""
def __init__(self, log, unique_only=False):
self.log = log
self.unique_only = unique_only
# stat
if isinstance(log, Alignment_log):
self.stat = log.stat
elif isinstance(log, dict):
self.stat = log
elif isinstance(log, io.TextIOWrapper):
self.stat = self._log_parser()
elif os.path.isfile(log):
self.stat = self._log_parser()
else:
raise ValueError('not supported file')
def _is_file(self):
"""Check the log file is exists, not empty
"""
if os.path.isfile(self.log):
return True
else:
return False
def _is_non_empty(self):
"""Check if log file is empty"""
if os.path.getsize(self.log) > 0:
return True
else:
return False
def _bowtie_log(self):
"""Wrapper bowtie log"""
dd = {}
with open(self.log, 'rt') as ff:
for line in ff:
if not ':' in line or line.startswith('Warning'):
continue
num = line.strip().split(':')[1]
value = num.strip().split(' ')[0]
value = int(value)
if 'reads processed' in line:
dd['total'] = value
elif 'at least one reported alignment' in line:
dd['map'] = value
elif 'failed to align' in line:
dd['unmap'] = value
elif 'alignments suppressed due to -m' in line:
dd['multiple'] = value
else:
pass
# unique_only
dd['unique'] = dd['map']
dd['multiple'] = 0 #no multiple
# if self.unique_only is True or 'multiple' in dd:
# pass
# else:
# dd['multiple'] = 0
dd['unmap'] = dd['total'] - dd['map']
return dd
def _bowtie2_log(self):
"""Wrapper bowtie2 log"""
dd = {}
with open(self.log, 'rt') as ff:
for line in ff:
value = line.strip().split(' ')[0]
if '%' in value:
continue
value = int(value)
if 'reads; of these' in line:
dd['total'] = value
elif 'aligned 0 times' in line:
dd['unmap'] = value
elif 'aligned exactly 1 time' in line:
dd['unique'] = value
elif 'aligned >1 times' in line:
dd['multiple'] = value
else:
pass
if self.unique_only is True:
dd['map'] = dd['unique']
# dd['multiple'] = 0
else:
# unique and multiple
dd['map'] = dd['unique'] + dd['multiple']
dd['unmap'] = dd['total'] - dd['map']
return dd
def _star_log(self):
"""Wrapper STAR *Final.log"""
dd = {}
with open(self.log, 'rt') as ff:
for line in ff:
value = line.strip().split('|')
if not len(value) == 2:
continue
value = value[1].strip()
if 'Number of input reads' in line:
dd['total'] = int(value)
elif 'Uniquely mapped reads number' in line:
dd['unique'] = int(value)
elif 'Number of reads mapped to multiple loci' in line:
dd['multiple'] = int(value)
else:
pass
if self.unique_only is True:
dd['map'] = dd['unique']
# dd['multiple'] = 0
else:
# unique and multiple
dd['map'] = dd['unique'] + dd['multiple']
dd['unmap'] = dd['total'] - dd['map']
return dd
def _log_parser(self):
"""Read log file as dict
delimiter:
bowtie: ":"
bowtie2: ":"
STAR: "|"
extra trimming
1. trim "(10.00%)"
2. trim "blank" at both tails
"""
log = self.log
log_lines = []
if isinstance(log, dict):
return Alignment_log(log)
elif isinstance(log, io.TextIOWrapper):
for r in log:
if r.startswith('Warning'): # skip warnings
continue
log_lines.append(r.strip())
elif os.path.exists(log):
with open(self.log, 'rt') as ff:
for r in ff:
if r.startswith('Warning'):
continue
log_lines.append(r.strip())
else:
raise ValueError('unknown file format')
# parsing log file
line = log_lines[0] # the first line
if line.startswith('#'):
dd = self._bowtie_log()
elif 'reads; of these' in line:
dd = self._bowtie2_log()
elif '|' in line:
dd = self._star_log()
else:
raise ValueError('unknown file format')
return dd
def _tmp(self):
"""Create a temp file"""
tmpfn = tempfile.NamedTemporaryFile(prefix='tmp',
suffix='.json',
delete=False)
return tmpfn.name
def saveas(self, _out=None):
"""Make a copy of statistics of mapping results"""
log = self.log
if _out is None:
# _out = self._tmp()
_out = os.path.splitext(log)[0] + '.json'
dd = self.stat
with open(_out, 'wt') as fo:
json.dump(dd, fo, indent=4, sort_keys=True)
return _out
class Alignment_stat(object):
"""Parse mapping reads in directory
1. for each rep bam, parse json files,
2. merged bam files, count bam lines
"""
def __init__(self, path):
self.path = path
if isinstance(path, Alignment_stat):
self.stat = path.stat
elif isinstance(path, dict):
self.stat = path
elif os.path.exists(path):
if not self._get_json_files() is False:
self.stat = self.count_json_files()
elif not self._get_bam_files() is False:
self.stat = self.count_bam_files()
else:
raise ValueError('No bam and json files found: %s' % path)
else:
raise ValueError('unknown format')
def _is_non_empty(self, fn):
"""Check if log file is empty"""
if os.path.getsize(fn) > 0:
return True
else:
return False
def _get_json_files(self):
path = self.path
j_files = sorted(glob.glob(path + '/*.json'), key=len)
j_files = [f for f in j_files if self._is_non_empty(f)] # not empty files
if len(j_files) > 0:
return j_files
else:
# raise ValueError('No json files detected in: %s' % path)
return False
# parse *.json files
def count_json_files(self):
path = self.path
prefix = os.path.basename(path) # sample name
j_files = self._get_json_files() # each group
df = pd.DataFrame(columns=['name', 'group', 'count'])
for j in j_files:
dd = Json_file(j).stat # count
# group
group = j.split('.')[-3] # group name, *map_genome.bowtie.json
group = group.split('_')[1] #
# check spike-in
if j_files.index(j) == 0 and group == 'genome' and len(j_files) > 1:
group = 'spikein'
num_map = dd['map']
df = df.append(pd.DataFrame([[prefix, group, num_map]],
columns = ['name', 'group', 'count']),
ignore_index=True)
# unmap reads
dd = Json_file(j_files[-1]).stat
unmap = dd['unmap']
df = df.append(pd.DataFrame([[prefix, 'unmap', unmap]],
columns=['name', 'group', 'count']),
ignore_index=True)
return df
def _get_bam_files(self):
path = self.path
bam_files = sorted(glob.glob(path + '/*.bam'), key=len)
bam_files = [f for f in bam_files if self._is_non_empty(f)
and not os.path.islink(f)] # not empty files
if len(bam_files) > 0:
return bam_files
else:
raise ValueError('No .bam files found in: %s' % path)
# count bam files
def count_bam_files(self):
path = self.path
prefix = os.path.basename(path)
bam_files = self._get_bam_files()
df = pd.DataFrame(columns=['name', 'group', 'count'])
for b in bam_files:
b_cnt = pysam.AlignmentFile(b, 'rb').count()
group = b.split('.')[-2] # group name*.map_genome.bam
group = group.split('_')[1] # reference name
if bam_files.index(b) == 0 and group == 'genome' and len(bam_files) > 1:
group = 'spikein'
df = df.append(pd.DataFrame([[prefix, group, b_cnt]],
columns=['name', 'group', 'count']),
ignore_index=True)
# output
return df
def _tmp(self):
"""Create a temp file"""
tmpfn = tempfile.NamedTemporaryFile(prefix='tmp',
suffix='.csv',
delete=False)
return tmpfn.name
def saveas(self, _out=None):
"""Make a copy of statistics of mapping results"""
path = self.path
if _out is None:
prefix = os.path.basename(path)
_out = os.path.join(os.path.dirname(path),
prefix + '.mapping_stat.csv')
df = self.stat
default_kwargs = dict(sep='\t', header=False, index=False)
if isinstance(_out, io.TextIOWrapper):
print(df.to_string(index=False, header=False, justify='left'))
else:
df.to_csv(_out, **default_kwargs)
return _out
## EOF
| bakerwm/goldclip | goldclip/log_parser/alignment.py | alignment.py | py | 26,410 | python | en | code | 0 | github-code | 13 |
268989372 | import numpy as np
import vidProc
import cellSum
import grid_map
import videoRecord
import math
import cv2
def printCalibrationShape():
#chamar função que permite enviar o GCode que imprime a forma de calibração
#Subir a Cabeça de impressão
return None
def movePrintCore(time, name, celLen):
#Dar instrução do movimento e Gravar o video correspondente ao movimento
#Gravar video
videoRecord.recordVid(time, name)
cap = cv2.VideoCapture(name)
length = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
last_frame = length-3
#print(last_frame)
#last_frame = 295
fps_count = 1
inital_frame = 5
if cap.isOpened():
ret, frame = cap.read()
cols = frame.shape[1]
else:
ret = False
while ret:
ret, frame = cap.read()
#Make the code only start getting points after intial_frame
if fps_count >= inital_frame and fps_count <= last_frame and fps_count % 1 == 0:
#find points of the first frame
if fps_count == inital_frame:
centerPoint = np.array([]).reshape(0, 2)
midFrame = np.flip(np.asarray(frame.shape[:2])/2)
centerPoint = np.vstack((centerPoint, midFrame))
#print(centerPoint[-1,:])
intersectionPoints, totalGrid, img = vidProc.findInitPoints(frame, midFrame)
n_rows_i, n_cols_i = grid_map.nRowsCols(intersectionPoints, cols*0.036)
D_xy_mean_total = [0, 0]
rect_init, cel_init, sidePx = cellSum.fetchCellPoints(midFrame, intersectionPoints, cols*0.005, cols*0.036)
cimg, cel_cord_init = vidProc.four_point_transform(img, rect_init, midFrame)
#print(cel_cord_init)
#vidProc.show_wait_destroy('teste', cimg)
#vidProc.show_wait_destroy('teste', img)
#cellSum.plot_a_b(intersectionPoints, midFrame, '+b', 'xr')
#lines,cimg=vidProc.binaryGridDetection(frame)
#vidProc.show_wait_destroy('posição do centro na celula',cimg)
intersectionPoints1, cimg, horizontal, vertical, img_bwa = vidProc.findIntPoints(frame, midFrame)
if len(intersectionPoints1) < len(intersectionPoints)-5:
print('frame bugado')
else:
totalGrid, oldPoints, newPoints, D_xy_mean_total, centerPoint = cellSum.continuousGrid(intersectionPoints, np.asarray(intersectionPoints1), totalGrid,totalGrid, intersectionPoints,np.asarray(intersectionPoints1),0,0,D_xy_mean_total, centerPoint, midFrame, cols*0.036)
#make the mew frame the old for the next iteration of cycle
intersectionPoints = np.asarray(intersectionPoints1)
#cimg = vidProc.findCircles(frame)
#vidProc.show_wait_destroy("frame", cimg)
#Descomentar caso seja para ver a imagem
#cv2.imshow('preview', cimg)
#cellSum.plotab(totalGrid, centerPoint, '+b', 'xr')
if fps_count == last_frame:
#vidProc.show_wait_destroy("last Frame", cimg)
#cellSum.plot_a_b(intersectionPoints, midFrame, '+b', 'xr')
last_frame_img = frame
if cv2.waitKey(1) == 27:
break
#print(fps_count)
fps_count += 1
#print(fps_count)
cv2.destroyAllWindows()
cap.release()
#cellSum.plotab(totalGrid, centerPoint, '+b', 'xr')
n_rows_tg, n_cols_tg = grid_map.nRowsCols(totalGrid, cols*0.036)
#MUDAR CENTERPOINT PARA PONTO REAL E NAO APROXIMAÇÃO
rect, cel_last_frame, _ = cellSum.fetchCellPoints(midFrame, intersectionPoints, cols*0.005, cols*0.036)
cimg1, p_after = vidProc.four_point_transform(cimg, rect, midFrame)
#print(p_after)
#vidProc.show_wait_destroy("last frame cell", cimg1)
start_cel, end_cel = grid_map.global_cel_location(midFrame, intersectionPoints, n_rows_i, n_cols_i, totalGrid, cel_init, n_rows_tg, n_cols_tg, D_xy_mean_total, cols*0.036, cols*0.005)
#Descomentar caso seja para dar display da figura com o plot do deslocamento
#grid_map.createMesh(n_rows_tg, n_cols_tg, 2,np.asarray(cel_cord_init), np.asarray(p_after), np.asarray(start_cel), np.asarray(end_cel))
d, d_total = grid_map.dist_calc(np.asarray(cel_cord_init), np.asarray(p_after), np.asarray(start_cel), np.asarray(end_cel), celLen)
print('A distância percorrida foi de', d[0],'[mm] em xx, e', d[1], '[mm] em yy')
print('A distância total é de ', d_total, '[mm].')
#Record
return d, d_total, last_frame_img, intersectionPoints, p_after, cel_last_frame, sidePx
def nozzleCenterDist1(frame, intersectionPoints, p, cel, cel_side):
#Entra o frame final e encontra-se a coordenada do centro da forma de calibração
cimg, nozzle_cord = vidProc.findCircles(frame)
#Obter a célula e as coordenadas onde se encontra o nozzle
rect_nozzle, cel_nozzle, sidePx = cellSum.fetchCellPoints(nozzle_cord, intersectionPoints, frame.shape[1]*0.005, frame.shape[1]*0.036)
cimg, cel_cord_nozzle = vidProc.four_point_transform(frame, rect_nozzle, nozzle_cord)
#Vector between center of camera and nozzle
vec_cels = cel_nozzle - cel
vec_inside_cel = cel_cord_nozzle - p
#180 é o numero de pixeis por celula
vec = np.flip(vec_cels*cel_side) + vec_inside_cel*(cel_side/180)
return vec
def nozzleCamDistCalc(vec, d):
vec_nozzle_cam = d + vec
return vec_nozzle_cam
def nozzleCamProc(celLen):
d, d_total, last_frame_img, intersectionPoints, p, cel, _ = movePrintCore(10, 'teste_nozzleCamProc.mp4', celLen)
vec = nozzleCenterDist(last_frame_img, intersectionPoints, np.asarray(p), np.asarray(cel), 2.381)
vec_nozzle_cam = nozzleCamDistCalc(vec, d)
print('O vetor entre o nozzle e a camera é de', vec_nozzle_cam)
return vec_nozzle_cam
def nozzleCenterDist2(frame, midFrame):
cimg, nozzle_cord = vidProc.findCircles(frame)
vec = nozzle_cord - midFrame
return vec
def nozzleCamProc2():
#Give instruction to print calibration shape
#Give instruction to raise head
#Give instruction of first guess of displacement (disp_vector)
#Get the difference between center of camera and nozzle
vec = nozzleCenterDist2(frame, midFrame)
while vec > d_max:
#Estimate the adjustment displacement (disp_adjust)
# Give new comand to move (disp_adjust)
#Update disp_vector
disp_vector += disp_adjust
#Get new difference between center of camera and nozzle
vec = nozzleCenterDist2(frame, midFrame)
return disp_vector
def MmToPx(dx_1mm, dy_1mm, sidePx, celLen):
#Give command to move 1mm_xx
PxPerMm = [sidePx[0]/celLen, sidePx[1]/celLen]
#Quantity of pixels (in x and y direction) per mm
dpx_1mm_xx = [int(dx_1mm[0]*PxPerMm[0]), int(dx_1mm[1]*PxPerMm[1])]
dpx_1mm_yy = [int(dy_1mm[0]*PxPerMm[0]), int(dy_1mm[1]*PxPerMm[1])]
dpx_mm = np.array([dpx_1mm_xx, dpx_1mm_yy])
dpx_mm = dpx_mm.transpose()
return dpx_mm
def steps_mm_cal_xx(A, time, name, celLen):
#beforing calling this function, M503 and M83 must be sent to printer
#Get A parameter from M503
#Give instruction to move 10 mm in xx and get dx
#_, dx, _, _, _, _ = movePrintCore(time, name)
#dx = 9.9
#Make the printer move 10 mm in x again
_, dx, _, _, _, _, _ = movePrintCore(time, name, celLen)
dif = dx - 10
if abs(dif) < 0.05:
D = A
print("Steps are calibrated")
elif dif != 0:
D = 10*A/dx
print("Y_Steps require calibration")
print(D)
return D
def steps_mm_cal_yy(A, time, name, celLen):
#beforing calling this function, M503 and M83 must be sent to printer
#Get A parameter from M503
#Give instruction to move 10 mm in xx and get dx
#_, dx, _, _, _, _ = movePrintCore(time, name)
#dx = 9.9
#Make the printer move 10 mm in x again
_, dy, _, _, _, _, _ = movePrintCore(time, name, celLen)
dif = dy - 10
if abs(dif) < 0.05:
D = A
print("Steps are calibrated")
elif dif != 0:
D = 10*A/dy
print("Y_Steps require calibration")
print(D)
return D
def getSkewCoefxy(dx, dy):
#move the printCore (-10, 0) for example
#dx, dx_total = movePrintCore([-10, 0])
#dx = np.array([-10, 0])
#move the printCore (0, -10) for example
#dy, dy_total = movePrintCore([0, -10])
#dy = np.array([0, -10])
unit_vector_1 = dx / np.linalg.norm(dx)
unit_vector_2 = dy / np.linalg.norm(dy)
dot_product = np.dot(unit_vector_1, unit_vector_2)
angle = np.arccos(dot_product)*(180/math.pi)
#dot_product = np.dot(dy, dx)
#angle = (np.arccos(dot_product))*(180/math.pi)
#xytan = math.tan((math.pi/2)-angle)
return angle, unit_vector_1, unit_vector_2
#d, _, _, _, _, _, _ = movePrintCore(20, 'test_RP2.mp4', 1.985)
#nozzleCamProc()
| DuarteCPereira/tese | camNozzle.py | camNozzle.py | py | 9,067 | python | en | code | 0 | github-code | 13 |
1386991125 | import requests
from datetime import datetime
def getData(city) -> str:
try:
api_key = "eb5ef05a1f20ab85b9ff23ff53bd617f"
URL = f"https://api.openweathermap.org/data/2.5/weather?q={city}&appid={api_key}"
r = requests.get(URL)
return r.json()
except:
r = r.json()
if r['mesage']=="city not found":
print("Error")
return None
def out(res, city) -> str:
temp_city = ((res['main']['temp']) - 273.15)
weather_desc = res['weather'][0]['description']
hmdt = res['main']['humidity']
wind_spd = res['wind']['speed']
date_time = datetime.now().strftime("%d %b %Y | %I:%M:%S %p")
print ("-------------------------------------------------------------")
print ("Weather Stats for - {} || {}".format(city.upper(), date_time))
print ("-------------------------------------------------------------")
print ("Current temperature is: {:.2f} deg C".format(temp_city))
print ("Current weather desc :",weather_desc)
print ("Current Humidity :",hmdt, '%')
print (" speed :",wind_spd ,'kmph')
with open('file.txt', 'w')as f:
f.write(f"""-------------------------------------------------------------
Weather Stats for - {city.upper()} || {date_time}
-------------------------------------------------------------
Current temperature is: {temp_city} deg C
Current weather desc :{weather_desc}
Current Humidity :{hmdt}%
speed :{wind_spd} kmph
""")
f.close()
return True
if __name__=='__main__':
city = input("Enter the city name: ")
res = getData(city)
breakpoint()
if res == 'None':
print("City not found! check your keywork")
else:
out(res, city)
| ayirolg/ShapeAI-Project | weather.py | weather.py | py | 1,918 | python | en | code | 0 | github-code | 13 |
41837400754 | import argparse
from tsm.lexicon import Lexicon
from tsm.util import read_file_to_lines, write_lines_to_file
from tsm.sentence import Sentence
parser = argparse.ArgumentParser()
parser.add_argument('lexicon_path')
parser.add_argument('src_path')
parser.add_argument('dest_path')
parser.add_argument('--with-prob', action='store_true')
parser.add_argument('--pos-tagged', action='store_true')
parser.add_argument('--remove-punct', action='store_true')
parser.add_argument('--segmented', action='store_true')
args = parser.parse_args()
if args.with_prob is None:
args.with_prob = args.lexicon_path.endswith('lexiconp.txt')
lexicon = Lexicon.from_kaldi(args.lexicon_path, args.with_prob)
lines = read_file_to_lines(args.src_path)
sents = [Sentence.from_line(line, remove_punct=args.remove_punct,
segmented=args.segmented,
pos_tagged=args.pos_tagged)
for line in lines]
phn_sents = [[lexicon.get_most_probable(word, "NULL") for word in sent] for sent in sents]
phn_lines = [" ".join(phn_sent) for phn_sent in phn_sents]
write_lines_to_file(args.dest_path, phn_lines)
| Chung-I/ChhoeTaigiDatabase | lexicon_g2p.py | lexicon_g2p.py | py | 1,137 | python | en | code | null | github-code | 13 |
37616809579 | import tweepy
import time
from sportsipy.mlb.roster import Player
import datetime
x = datetime.datetime.now()
print('Twitter bot active', flush=True)
#API keys from twitter
#Add your own here
CONSUMER_KEY = ""
CONSUMER_SECRET = ""
ACCESS_KEY = ""
ACCESS_SECRET = ""
auth = tweepy.OAuthHandler(CONSUMER_KEY,CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY,ACCESS_SECRET)
api = tweepy.API(auth)
#Most recent tweet stored in text file
FILE_NAME = "last_seen_id.txt"
def retrieve_last_seen_id(file_name):
f_read = open(file_name, 'r')
last_seen_id = int(f_read.read().strip())
f_read.close()
print (str(last_seen_id))
return last_seen_id
def store_last_seen_id(last_seen_id, file_name):
f_write = open(file_name, 'w')
f_write.write(str(last_seen_id))
f_write.close()
return
last_seen_id = retrieve_last_seen_id(FILE_NAME)
mentions = api.mentions_timeline()
def reply_to_tweets(starting_pitcher, win_loss, era ):
print("Searching for and replying to tweets...", flush=True)
last_seen_id = retrieve_last_seen_id(FILE_NAME)
mentions = api.mentions_timeline(last_seen_id, tweet_mode='extended')
for mention in reversed(mentions):
print(str(mention.id) + "-" + mention.full_text, flush=True)
last_seen_id = mention.id
store_last_seen_id(last_seen_id, FILE_NAME)
#Different replies for different hashtags
if "#starter" in mention.full_text.lower() or "#pitcher" in mention.full_text.lower():
print("Hashtag found", flush=True)
print("Responding back...", flush=True)
api.update_status("@" + mention.user.screen_name + " " +
x.strftime("%A") +", "+ x.strftime("%B") +" "+ x.strftime("%d")+", "+ x.strftime("%Y") + "\n" +
"Starting today: " + starting_pitcher + "\n" +
"W/L: " + win_loss + "\n" +
"ERA: " + era, mention.id)
if "#gausman" in mention.full_text.lower() or "#kevin" in mention.full_text.lower():
print("Hashtag found", flush=True)
print("Responding back...", flush=True)
gausman = Player("gausmke01")
starting_pitcher = "Kevin Gausman"
win_loss = str(gausman("2022").wins) + " - " + str(gausman("2022").losses)
era = str(gausman("2022").era)
whip = str(gausman("2022").whip)
k_9 = str(gausman("2022").batters_struckout_per_nine_innings)
games_started = str(gausman("2022").games_pitcher)
innings = str(gausman("2022").innings_played)
h_9 = str(gausman("2022").hits_against_per_nine_innings)
api.update_status("@" + mention.user.screen_name + " " +
starting_pitcher + "\n" +
"GS: " + games_started + "\n" +
"IP: " + innings + "\n" +
"W/L: " + win_loss + "\n" +
"ERA: " + era + "\n" +
"WHIP: " + whip + "\n" +
"H/9: " + h_9 + "\n" +
"K/9: " + k_9 + "\n"
, mention.id)
if "#manoah" in mention.full_text.lower() or "#alek" in mention.full_text.lower():
print("Hashtag found", flush=True)
print("Responding back...", flush=True)
manoah = Player("manoaal01")
starting_pitcher = "Alek Manoah 🔥🤘"
win_loss = str(manoah("2022").wins) + " - " + str(manoah("2022").losses)
era = str(manoah("2022").era)
whip = str(manoah("2022").whip)
k_9 = str(manoah("2022").batters_struckout_per_nine_innings)
games_started = str(manoah("2022").games_pitcher)
innings = str(manoah("2022").innings_played)
h_9 = str(manoah("2022").hits_against_per_nine_innings)
api.update_status("@" + mention.user.screen_name + " " +
starting_pitcher + "\n" +
"GS: " + games_started + "\n" +
"IP: " + innings + "\n" +
"W/L: " + win_loss + "\n" +
"ERA: " + era + "\n" +
"WHIP: " + whip + "\n" +
"H/9: " + h_9 + "\n" +
"K/9: " + k_9 + "\n"
, mention.id)
if "#berrios" in mention.full_text.lower() or "#jose" in mention.full_text.lower():
print("Hashtag found", flush=True)
print("Responding back...", flush=True)
berrios = Player("berrijo01")
starting_pitcher = "José Berríos 🦾"
win_loss = str(berrios("2022").wins) + " - " + str(berrios("2022").losses)
era = str(berrios("2022").era)
whip = str(berrios("2022").whip)
k_9 = str(berrios("2022").batters_struckout_per_nine_innings)
games_started = str(berrios("2022").games_pitcher)
innings = str(berrios("2022").innings_played)
h_9 = str(berrios("2022").hits_against_per_nine_innings)
api.update_status("@" + mention.user.screen_name + " " +
starting_pitcher + "\n" +
"GS: " + games_started + "\n" +
"IP: " + innings + "\n" +
"W/L: " + win_loss + "\n" +
"ERA: " + era + "\n" +
"WHIP: " + whip + "\n" +
"H/9: " + h_9 + "\n" +
"K/9: " + k_9 + "\n"
, mention.id)
if "#kikuchi" in mention.full_text.lower() or "#yusei" in mention.full_text.lower():
print("Hashtag found", flush=True)
print("Responding back...", flush=True)
kikuchi = Player("kikucyu01")
starting_pitcher = "Yusei Kikuchi 🇯🇵"
win_loss = str(kikuchi("2022").wins) + " - " + str(kikuchi("2022").losses)
era = str(kikuchi("2022").era)
whip = str(kikuchi("2022").whip)
k_9 = str(kikuchi("2022").batters_struckout_per_nine_innings)
games_started = str(kikuchi("2022").games_pitcher)
innings = str(kikuchi("2022").innings_played)
h_9 = str(kikuchi("2022").hits_against_per_nine_innings)
api.update_status("@" + mention.user.screen_name + " " +
starting_pitcher + "\n" +
"GS: " + games_started + "\n" +
"IP: " + innings + "\n" +
"W/L: " + win_loss + "\n" +
"ERA: " + era + "\n" +
"WHIP: " + whip + "\n" +
"H/9: " + h_9 + "\n" +
"K/9: " + k_9 + "\n"
, mention.id)
if "#stripling" in mention.full_text.lower() or "#ross" in mention.full_text.lower():
print("Hashtag found", flush=True)
print("Responding back...", flush=True)
stripling = Player("stripro01")
starting_pitcher = "Ross Stripling 🍗"
win_loss = str(stripling("2022").wins) + " - " + str(stripling("2022").losses)
era = str(stripling("2022").era)
whip = str(stripling("2022").whip)
k_9 = str(stripling("2022").batters_struckout_per_nine_innings)
games_started = str(stripling("2022").games_pitcher)
innings = str(stripling("2022").innings_played)
h_9 = str(stripling("2022").hits_against_per_nine_innings)
api.update_status("@" + mention.user.screen_name + " " +
starting_pitcher + "\n" +
"GS: " + games_started + "\n" +
"IP: " + innings + "\n" +
"W/L: " + win_loss + "\n" +
"ERA: " + era + "\n" +
"WHIP: " + whip + "\n" +
"H/9: " + h_9 + "\n" +
"K/9: " + k_9 + "\n"
, mention.id)
if "#ryu" in mention.full_text.lower() or "#hyunjin" in mention.full_text.lower():
print("Hashtag found", flush=True)
print("Responding back...", flush=True)
ryu = Player("ryuhy01")
starting_pitcher = "Hyun Jin Ryu 🇰🇷👹"
win_loss = str(ryu("2022").wins) + " - " + str(ryu("2022").losses)
era = str(ryu("2022").era)
whip = str(ryu("2022").whip)
k_9 = str(ryu("2022").batters_struckout_per_nine_innings)
games_started = str(ryu("2022").games_pitcher)
innings = str(ryu("2022").innings_played)
h_9 = str(ryu("2022").hits_against_per_nine_innings)
api.update_status("@" + mention.user.screen_name + " " +
starting_pitcher + "\n" +
"GS: " + games_started + "\n" +
"IP: " + innings + "\n" +
"W/L: " + win_loss + "\n" +
"ERA: " + era + "\n" +
"WHIP: " + whip + "\n" +
"H/9: " + h_9 + "\n" +
"K/9: " + k_9 + "\n"
, mention.id)
if "#romano" in mention.full_text.lower() or "#jordan" in mention.full_text.lower():
print("Hashtag found", flush=True)
print("Responding back...", flush=True)
romano = Player("romanjo03")
starting_pitcher = "Jordan Romano 🍁"
saves = str(romano("2022").saves)
era = str(romano("2022").era)
whip = str(romano("2022").whip)
k_9 = str(romano("2022").batters_struckout_per_nine_innings)
games_started = str(romano("2022").games_pitcher)
innings = str(romano("2022").innings_played)
h_9 = str(romano("2022").hits_against_per_nine_innings)
api.update_status("@" + mention.user.screen_name + " " +
starting_pitcher + "\n" +
"GP: " + games_started + "\n" +
"IP: " + innings + "\n" +
"Saves: " + saves + "\n" +
"ERA: " + era + "\n" +
"WHIP: " + whip + "\n" +
"H/9: " + h_9 + "\n" +
"K/9: " + k_9 + "\n"
, mention.id)
def pitcher_gausman():
gausman = Player("gausmke01")
starting_pitcher = "Kevin Gausman"
win_loss = str(gausman("2022").wins) + " - " + str(gausman("2022").losses)
era = str(gausman("2022").era)
return [starting_pitcher, win_loss, era]
def pitcher_manoah():
manoah = Player("manoaal01")
starting_pitcher = "Alek Manoah 🔥🤘"
win_loss = str(manoah("2022").wins) + " - " + str(manoah("2022").losses)
era = str(manoah("2022").era)
return [starting_pitcher, win_loss, era]
def pitcher_berrios():
berrios = Player("berrijo01")
starting_pitcher = "José Berríos 🦾"
win_loss = str(berrios("2022").wins) + " - " + str(berrios("2022").losses)
era = str(berrios("2022").era)
return [starting_pitcher, win_loss, era]
def pitcher_kikuchi():
kikuchi = Player("kikucyu01")
starting_pitcher = "Yusei Kikuchi 🇯🇵"
win_loss = str(kikuchi("2022").wins) + " - " + str(kikuchi("2022").losses)
era = str(kikuchi("2022").era)
return [starting_pitcher, win_loss, era]
def pitcher_stripling():
stripling = Player("stripro01")
starting_pitcher = "Ross Stripling 🍗"
win_loss = str(stripling("2022").wins) + " - " + str(stripling("2022").losses)
era = str(stripling("2022").era)
return [starting_pitcher, win_loss, era]
def pitcher_ryu():
ryu = Player("ryuhy01")
starting_pitcher = "Hyun Jin Ryu 🇰🇷👹"
win_loss = str(ryu("2022").wins) + " - " + str(ryu("2022").losses)
era = str(ryu("2022").era)
return [starting_pitcher, win_loss, era]
def pitcher (probable):
if probable == "gausman":
return pitcher_gausman()
if probable == "manoah":
return pitcher_manoah()
if probable == "berrios":
return pitcher_berrios()
if probable == "kikuchi":
return pitcher_kikuchi()
if probable == "stripling":
return pitcher_stripling()
if probable == "ryu":
return pitcher_ryu()
else:
print ("error")
#Hard-coded issue to be fixed
probable = "kikuchi"
while True:
reply_to_tweets(pitcher(probable)[0],pitcher(probable)[1],pitcher(probable)[2])
time.sleep(10)
| mihan-b/jaysstarter-bot | bot.py | bot.py | py | 12,390 | python | en | code | 0 | github-code | 13 |
21830953372 | import numpy as np
from math import cos, sin, atan2, pi
import math
class RobotModel:
def __init__(self, max_v, min_v, max_w, max_acc_v, max_acc_w,
init_x, init_y, init_yaw, init_v=0.0, init_w=0.0, robot_radius=0.3,
laser_min_angle=-pi/2, laser_max_angle=pi/2, laser_increment_angle=pi/6, laser_max_range=10.0):
self.state = np.zeros(5) # state = [x,y,yaw,v,w]
self.state[0] = init_x
self.state[1] = init_y
self.state[2] = init_yaw
self.state[3] = init_v
self.state[4] = init_w
# physical constrain
self.max_v = max_v
self.min_v = min_v
self.max_w = max_w
self.max_acc_v = max_acc_v
self.max_acc_w = max_acc_w
self.robot_radius = robot_radius
self.laser_min_angle = laser_min_angle
self.laser_max_angle = laser_max_angle
self.laser_increment_angle = laser_increment_angle
self.laser_num = int(round((laser_max_angle-laser_min_angle)/laser_increment_angle)) + 1
self.laser_max_range = laser_max_range
def motion(self, input_u, dt):
# constrain input velocity
constrain = self.constrain_input_velocity(self.state, dt)
u = np.array(input_u)
u[0] = max(constrain[0], u[0])
u[0] = min(constrain[1], u[0])
u[1] = max(constrain[2], u[1])
u[1] = min(constrain[3], u[1])
# motion model, euler
self.state[2] += u[1] * dt
self.state[2] = self.normalize_angle(self.state[2])
self.state[0] += u[0] * cos(self.state[2]) * dt
self.state[1] += u[0] * sin(self.state[2]) * dt
self.state[3] = u[0]
self.state[4] = u[1]
return self.state
def constrain_input_velocity(self, state, dt):
v_pre_max = min(state[3] + self.max_acc_v * dt, self.max_v)
v_pre_min = max(state[3] - self.max_acc_v * dt, self.min_v)
w_pre_max = min(state[4] + self.max_acc_w * dt, self.max_w)
w_pre_min = max(state[4] - self.max_acc_w * dt, -self.max_w)
return [v_pre_min, v_pre_max, w_pre_min, w_pre_max]
def get_laser_scan(self, ob_list, ob_radius):
# view ob in robot coordinate system
new_ob_list = []
for ob in ob_list:
angle = atan2(ob[1]-self.state[1], ob[0]-self.state[0]) - self.state[2]
distance = np.linalg.norm(ob-self.state[:2])
if distance<=self.laser_max_range + ob_radius: # fetch ob in the circle
new_ob_list.append([angle, distance])
laser_ranges = []
for i in range(self.laser_num):
laser_angle = self.laser_min_angle + i * self.laser_increment_angle
min_range = self.laser_max_range
# calculate laser range
for new_ob in new_ob_list:
laser_range = self.calc_cross_point(laser_angle, new_ob, ob_radius)
if min_range>laser_range:
min_range = laser_range
laser_ranges.append(min_range)
return laser_ranges
def calc_cross_point(self, laser_angle, new_ob, ob_radius):
diff_angle = new_ob[0] - laser_angle
line_a = abs(new_ob[1] * sin(diff_angle))
if abs(line_a) > ob_radius:
return self.laser_max_range
if cos(diff_angle)<=0:
return self.laser_max_range
line_b = abs(new_ob[1] * cos(diff_angle))
laser_range = min(line_b - math.sqrt(ob_radius**2 - line_a**2), self.laser_max_range)
return laser_range
def normalize_angle(self, angle):
norm_angle = angle % (2 * math.pi)
if norm_angle > math.pi:
norm_angle -= 2 * math.pi
return norm_angle
def set_init_state(self, init_x, init_y, init_yaw, init_v=0.0, init_w=0.0):
self.state[0] = init_x
self.state[1] = init_y
self.state[2] = init_yaw
self.state[3] = init_v
self.state[4] = init_w
def rot_to_angle(self, theta):
norm_theta = self.normalize_angle((theta)-self.state[2])
dead_zone = pi/8.0
factor = self.max_w/dead_zone
# angular_velocity = norm_theta * 7
if norm_theta>dead_zone:
angular_velocity = self.max_w
elif norm_theta<-dead_zone:
angular_velocity = -self.max_w
else:
angular_velocity = norm_theta*factor
return angular_velocity
# def PD_controler(self, theta): | sldai/RL_pursuit_evasion | robot_model.py | robot_model.py | py | 4,448 | python | en | code | 10 | github-code | 13 |
20220590257 | #!/usr/bin/env python3
from jinja2 import Template
import sys, json, yaml
json_file = open(sys.argv[2])
json_file.close
variables = json.load(json_file)
with open(sys.argv[1]) as file_:
t = Template(file_.read())
output = t.render(variables)
with open(sys.argv[3], 'w') as f:
f.write(output) | tacobayle/nestedEsxiVcenter | python/template.py | template.py | py | 299 | python | en | code | 1 | github-code | 13 |
19388818042 | import numpy as np
from scipy.io import loadmat # this is the SciPy module that loads mat-files
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from datetime import datetime, date, time
import pandas as pd
# mat is a dict that contains this_resp
# this_resp is a 3x7x9 cell (3 blocks, 7 muscle emgs, 9 conditions)
# where block 0: A ch25, B ch1
# block 1: A ch25, B ch6
# block 2: A ch6, B: ch1
# conditions are
# 0 : seulement channel A
# 1: channel A, delay 40ms, channel B
# 2: channel A, delay 20ms, channel B
# 3: channel A, delay 10ms, channel B
# 4: channel A et channel B simultaneous
# 5: channel B, delay 10ms, channel A
# 6: channel B, delay 20ms, channel A
# 7: channel B, delay 40ms, channel A
# 8: seulement channel B
# each cell contains a 20x733 matrix (20 stimulations, 733 time series
# emg response)
rawmat = loadmat('RawMonkeyEmgResponse.mat')
rawdata = rawmat['raw_resp']
filtmat = loadmat('FilteredMonkeyEmgResponse.mat')
filtdata = filtmat['filt_resp']
# Let's build a proper datastructure
# dct[chi][chj][deltat] will contain all timeseries for pair chi, chj
# with time delay delta_t
# dct[ch_i][ch_i] will contain single channel pulses
EMG = 0
dct = {1:{1:{},6:{},25:{}},
6:{1:{},6:{},25:{}},
25:{1:{},6:{},25:{}}}
dct[1][1] = {'data': filtdata[0,EMG,8]}
dct[6][6] = {'data': filtdata[1,EMG,8]}
dct[25][25] = {'data': filtdata[0,EMG,0]}
for cond,delay in enumerate([40,20,10,0],1):
dct[25][1][delay] = {'data': filtdata[0,EMG,cond]}
dct[25][6][delay] = {'data': filtdata[1,EMG,cond]}
dct[6][1][delay] = {'data': filtdata[2,EMG,cond]}
for cond,delay in enumerate([0,10,20,40],4):
dct[1][25][delay] = {'data': filtdata[0,EMG,cond]}
dct[6][25][delay] = {'data': filtdata[1,EMG,cond]}
dct[1][6][delay] = {'data': filtdata[2,EMG,cond]}
# Now gather meanmax statistics
# store them in dct[ch_i][ch_j][deltat]['meanmax']
for ch1 in [1,6,25]:
for ch2 in [1,6,25]:
if ch1 == ch2:
ch = dct[ch1][ch2]['data']
maxs = ch.max(axis=1)
dct[ch1][ch2]['meanmax'] = maxs.mean()
dct[ch1][ch2]['stdmax'] = maxs.std()
elif ch1 != ch2:
for dt in [0,10,20,40]:
ch = dct[ch1][ch2][dt]['data']
maxs = ch.max(axis=1)
dct[ch1][ch2][dt]['meanmax'] = maxs.mean()
dct[ch1][ch2][dt]['stdmax'] = maxs.std()
# Tester resultat en 2d et essayer de trouver un prior qui va bien fit
# Regarder pour chaque delta_t
def build_f_grid(dt=40, f='meanmax'):
z_grid = np.zeros((3,3))
for i,ch1 in enumerate([1,6,25]):
for j,ch2 in enumerate([1,6,25]):
if ch1 == ch2:
z_grid[i][j] = dct[ch1][ch2][f]
elif ch1 != ch2:
z_grid[i][j] = dct[ch1][ch2][dt][f]
return z_grid
# Plot the response graph for a given delta_t (0,10,20,40)
def plot_2d(dt=40,f='meanmax', usestd=True, ax=None, title=None):
""" f can be either a str (which will be used to call build_f_grid)
or a grid already built, for eg. grid of diff between response and prior"""
x = [0,1,2]
y = [0,1,2]
x_grid, y_grid = np.meshgrid(x,y)
if type(f) == str:
z_grid = build_f_grid(dt,f)
else:
z_grid = f
if ax is None:
ax = plt.subplot(1,1,1,projection='3d')
ax.get_xaxis().set_ticks([0,1,2])
ax.get_yaxis().set_ticks([0,1,2])
if title is None:
ax.set_title('delta_t = {}'.format(dt))
else:
ax.set_title(title)
surf = ax.plot_surface(x_grid, y_grid, z_grid,
#cmap=cm.coolwarm,
linewidth=0, antialiased=False)
if usestd:
std_grid = build_f_grid(dt,f='stdmax')
# We plot 1 std deviation above and below the mean
std_min = z_grid - std_grid
std_max = z_grid + std_grid
ax.scatter(x_grid, y_grid, std_min, c='red')
ax.scatter(x_grid, y_grid, std_max, c='red')
# We plotted the scatter for aesthetic reasons,
# now we plot 2d lines to show the std_dev more clearly
for x,y,smin,smax in zip(x_grid.flatten(), y_grid.flatten(),
std_min.flatten(), std_max.flatten()):
xs = np.ones(100) * x
ys = np.ones(100) * y
zs = np.linspace(smin,smax,100)
ax.plot(xs, ys, zs, c='red')
def plot_all_2d(f='meanmax'):
plt.figure()
for i,dt in enumerate([0,10,20,40],1):
ax = plt.subplot(2,2,i,projection='3d')
plot_2d(dt,f,ax=ax)
#plot_2d(0)
#plot_all_2d('meanmax')
resp2d = build_f_grid()
prior1d = resp2d.diagonal().reshape((3,1))
prior2d_add = (prior1d + prior1d.T) * 1/2
prior2d_mult = (prior1d * prior1d.T)
diff_add = resp2d - prior2d_add
diff_mult = resp2d - prior2d_mult
plt.figure()
for i,(z_grid,title) in enumerate([(prior2d_add, "prior2d_add"),
(diff_add, "diff_add"),
(prior2d_mult, "prior2d_mult"),
(diff_mult, "diff_mult")],
1):
ax = plt.subplot(2,2,i,projection='3d')
plot_2d(f=z_grid, usestd=False, title=title, ax=ax)
plt.show()
| samlaf/GP-BCI | data_singe/load_matlab.py | load_matlab.py | py | 5,232 | python | en | code | 1 | github-code | 13 |
7897248420 | import re
import pandas as pd
import os
movie_data = pd.read_csv(os.path.join("data",'movies_awards.csv'))
lenght_movies = movie_data['Title'].count()
dummy=[0 for i in xrange(lenght_movies)]
d={}
columns = ['Awards_won','Awards_nominated','Primetime_awards_won','Primetime_awards_nominated',
'Oscar_awards_won','Oscar_awards_nominated','Golden_Globe_awards_won','Golden_Globe_awards_nominated',
'BAFTA_awards_won','BAFTA_awards_nominated','Another_nominated','Another_won']
cols = {'Movies':movie_data['Title'],'Awards':movie_data['Awards']}
cols1 = d.fromkeys(columns,dummy)
cols.update(cols1)
req_data = pd.DataFrame(cols)
req_data['Awards'].fillna("",inplace=True)
req_data['Awards'] = [i.lower() for i in req_data['Awards'].values]
nominated = 'nominated for [0-9]+'
for i in req_data.index:
awards = req_data['Awards'].ix[i]
if awards:
if 'nominated for' in awards:
oscar = re.findall('{0} oscar'.format(nominated),awards)
goldenglobe = re.findall('{0} golden globe'.format(nominated),awards)
primetime = re.findall('{0} primetime'.format(nominated),awards)
bafta = re.findall('{0} BAFTA'.format(nominated),awards)
another = re.findall('{0} another'.format(nominated),awards)
if oscar:
req_data['Oscar_awards_nominated'].ix[i] = oscar[0].split()[-2]
if goldenglobe:
req_data['Golden_Globe_awards_nominated'].ix[i] = goldenglobe[0].split()[-3]
if primetime:
req_data['Primetime_awards_nominated'].ix[i] = primetime[0].split()[-2]
if bafta:
req_data['BAFTA_awards_nominated'].ix[i] = bafta[0].split()[-2]
if another:
req_data['Another_nominated'].ix[i] = another[0].split()[-2]
if 'won' in awards or "win" in awards:
oscar_wins = re.findall('won [0-9]+ oscar',awards)
primetime_wins = re.findall('won [0-9]+ primetime',awards)
bafta_wins = re.findall('won [0-9]+ bafta',awards)
another_wins = re.findall('another [0-9]+ win',awards)
goldenglobe_wins = re.findall('won [0-9]+ goldenglobe',awards)
if oscar_wins:
req_data['Oscar_awards_won'].ix[i] = oscar_wins[0].split()[-2]
if goldenglobe_wins:
req_data['Golden_Globe_awards_won'].ix[i] = goldenglobe_wins[0].split()[-3]
if primetime_wins:
req_data['Primetime_awards_won'].ix[i] = primetime_wins[0].split()[-2]
if bafta_wins:
req_data['BAFTA_awards_won'].ix[i] = bafta_wins[0].split()[-2]
if another_wins:
req_data['Another_won'].ix[i] = another_wins[0].split()[-2]
if re.search('another [0-9]+ nominat',awards):
anot = re.findall('another [0-9]+ nominat',awards)
if anot:
req_data['Another_nominated'].ix[i] = anot[0].split()[-2]
if 'win' in awards:
wins = re.findall('[0-9 ]+win[s]*',awards)
if wins:
req_data['Awards_won'].ix[i]= wins[0].split()[0]
if 'nominat' in awards:
nomi = re.findall('[0-9 ]+nominat*',awards)
if nomi:
req_data['Awards_nominated'].ix[i] = nomi[0].split()[0]
print(req_data.head(2))
req_data.to_csv('Q4_PART_1.csv')
| Uppalapa/Dataanalysis-using-Python-Projects | Assignment 3/Q4_PART_1.py | Q4_PART_1.py | py | 3,433 | python | en | code | 0 | github-code | 13 |
358111766 | # -*- coding: utf-8 -*-
import json
from PIL import Image,ImageDraw,ImageFont
from datetime import datetime
baseUi = 'display/ui/resources/UI.png'
currentlyIconPath = 'display/ui/resources/icons/currently/'
dailyIconPath = 'display/ui/resources/icons/daily/'
hourlyIconPath = 'display/ui/resources/icons/hourly/'
class UIBuilder:
def __init__(self):
self.ui = Image.open(baseUi)
def date(self, date):
panel = self._createPanel((400, 30))
weekday = date.strftime("%A")
month = date.strftime("%B")
day = date.strftime("%d")
year = date.strftime("%Y")
date_string = '{}, {} {}, {}'.format(weekday, month, day, year)
ImageDraw.Draw(panel).text((5,6), date_string, fill='black', \
font=ImageFont.truetype('Roboto-Medium.ttf', 15))
self.ui.paste(panel, (0, 0), panel)
return self
def currently(self, currentlyForecast):
# panel = self._createPanel((280, 94))
panel = self._createPanel((400, 94))
conditions = self._currently(currentlyForecast)
# panel.paste(conditions, (0, 0), conditions)
panel.paste(conditions, (60, 0), conditions)
self.ui.paste(panel, (0, 31), panel)
return self
def daily(self, dailyForecast):
for i in range(0, 5):
panel = self._daily(dailyForecast['data'][i])
self.ui.paste(panel, ((80 * i), 126), panel)
return self
def hourly(self, hourlyForecast):
for i in range(0, 5):
hourlyForecast['data'][i]
panel = self._hourly(hourlyForecast['data'][i])
self.ui.paste(panel, ((80 * i), 226), panel)
return self
def build(self):
return self.ui
def _currently(self, conditions):
panel = self._createPanel((280, 95))
draw = ImageDraw.Draw(panel)
icon = self._getIcon('currently', conditions['icon'])
panel.paste(icon, (10, 5), icon)
temperature = str(int(conditions['temperature']))
feelsLike = str(int(conditions['apparentTemperature']))
summary = conditions['summary']
humidity = str(int(conditions['humidity'] * 100))
wind = str(int(conditions['windGust']))
# Draw temperature.
h,w = draw.textsize("{}\xb0".format(temperature), \
font=ImageFont.truetype('Roboto-Medium.ttf', 35))
draw.text((90 + (190 - h) / 2, 2), "{}\xb0".format(temperature), \
fill='black', font=ImageFont.truetype('Roboto-Medium.ttf', 35))
# Draw 'feels like' temperature.
h,w = draw.textsize("Feels like {}\xb0".format(feelsLike), \
font=ImageFont.truetype('Roboto-Medium.ttf', 11))
draw.text((90 + (190 - h) / 2, 41), "Feels like {}\xb0".format(feelsLike), \
fill='black', font=ImageFont.truetype('Roboto-Medium.ttf', 11))
# Draw summary.
h,w = draw.textsize("{}".format(summary), \
font=ImageFont.truetype('Roboto-Medium.ttf', 15))
draw.text((90 + (190 - h) / 2, 55), "{}".format(summary), \
fill='black', font=ImageFont.truetype('Roboto-Medium.ttf', 15))
# Draw humidity.
h,w = draw.textsize("Humidity: {}%".format(humidity), \
font=ImageFont.truetype('Roboto-Medium.ttf', 11))
draw.text(((180 + (190 / 2) - h) / 2, 75), "Humidity: {}%".format(humidity), \
fill='black', font=ImageFont.truetype('Roboto-Medium.ttf', 11))
# Draw wind.
h,w = draw.textsize("Wind: {} mph".format(wind), \
font=ImageFont.truetype('Roboto-Medium.ttf', 11))
draw.text((90 + (190 / 2) + ((190 / 2) - h) / 2, 75), "Wind: {} mph".format(wind), \
fill='black', font=ImageFont.truetype('Roboto-Medium.ttf', 11))
return panel
def _daily(self, conditions):
panel = self._createPanel((79, 99))
draw = ImageDraw.Draw(panel)
day = datetime.fromtimestamp(\
conditions['time']).strftime("%A")
h,w = draw.textsize("{}".format(day), \
font=ImageFont.truetype('Roboto-Medium.ttf', 11))
draw.text((((79 - h) / 2), 5), day, fill='black', \
font=ImageFont.truetype('Roboto-Medium.ttf', 11))
icon = self._getIcon('daily', conditions['icon'])
panel.paste(icon, ((80 - 55) // 2, 15), icon)
highTemp = int(conditions['temperatureHigh'])
lowTemp = int(conditions['temperatureLow'])
h,w = draw.textsize("{}\xb0 / {}\xb0".format(highTemp, lowTemp), \
font=ImageFont.truetype('Roboto-Medium.ttf', 11))
draw.text(((79-h)/2, 65), "{}\xb0 / {}\xb0".format(lowTemp, highTemp), \
fill='black', font=ImageFont.truetype('Roboto-Medium.ttf', 11))
h,w = draw.textsize(conditions['summary'], \
font=ImageFont.truetype('Roboto-Medium.ttf', 11))
if h > 79:
h,w = draw.textsize(self._getSummaryFromIcon(conditions['icon']), \
font=ImageFont.truetype('Roboto-Medium.ttf', 11))
draw.text(((79 - h) / 2, 80), self._getSummaryFromIcon(conditions['icon']),
fill='black', font=ImageFont.truetype('Roboto-Medium.ttf', 11))
return panel
def _hourly(self, conditions):
panel = self._createPanel((79, 99))
draw = ImageDraw.Draw(panel)
time = datetime.fromtimestamp(conditions['time'])
time = "{} {}".format(int(time.strftime("%I")), time.strftime('%p'))
h,w = draw.textsize("{}".format(time), \
font=ImageFont.truetype('Roboto-Medium.ttf', 11))
draw.text((((79 - h) / 2), 2), time, fill='black', \
font=ImageFont.truetype('Roboto-Medium.ttf', 11))
icon = self._getIcon('daily', conditions['icon'])
panel.paste(icon, ((80-55) // 2, 9), icon)
temp = int(conditions['temperature'])
feelsLike = int(conditions['apparentTemperature'])
h,w = draw.textsize("{}\xb0 / {}\xb0".format(temp, feelsLike), \
font=ImageFont.truetype('Roboto-Medium.ttf', 11))
draw.text(((79-h)/2, 58), "{}\xb0 / {}\xb0".format(temp, feelsLike), \
fill='black', font=ImageFont.truetype('Roboto-Medium.ttf', 11))
return panel
def _getIcon(self, type, icon):
if type == 'currently':
iconPath = currentlyIconPath
elif type == 'daily':
iconPath = dailyIconPath
elif type == 'hourly':
iconPath = hourlyIconPath
if icon == 'clear-day':
return Image.open(iconPath + 'clear-day.png')
if icon == 'clear-night':
return Image.open(iconPath + 'clear-night.png')
if icon == 'rain':
return Image.open(iconPath + 'rain.png')
if icon == 'snow':
return Image.open(iconPath + 'snow.png')
if icon == 'sleet':
return Image.open(iconPath + 'sleet.png')
if icon == 'wind':
return Image.open(iconPath + 'wind.png')
if icon == 'fog':
return Image.open(iconPath + 'fog.png')
if icon == 'cloudy':
return Image.open(iconPath + 'cloudy.png')
if icon == 'partly-cloudy-day':
return Image.open(iconPath + 'partly-cloudy-day.png')
if icon == 'partly-cloudy-night':
return Image.open(iconPath + 'partly-cloudy-night.png')
if icon == 'thunderstorm':
return Image.open(iconPath + 'thunderstorm')
def _getSummaryFromIcon(self, icon):
if icon == 'clear-day':
return 'Clear'
if icon == 'clear-night':
return 'Clear'
if icon == 'rain':
return 'Rain'
if icon == 'snow':
return 'Snow'
if icon == 'sleet':
return 'Sleet'
if icon == 'wind':
return 'Wind'
if icon == 'fog':
return 'Fog'
if icon == 'cloudy':
return 'Cloudy'
if icon == 'partly-cloudy-day':
return 'Partly Cloudy'
if icon == 'partly-cloudy-night':
return 'Partly Cloudy'
if icon == 'thunderstorm':
return 'Thunderstorm'
def _createPanel(self, size):
return Image.new('RGBA', size, (255, 255, 255))
| namarino41/weatherbox | weatherbox_display/display/ui/ui_builder.py | ui_builder.py | py | 8,294 | python | en | code | 1 | github-code | 13 |
26383856387 | from leetcode.string.longest_repeating_character import Solution
import pytest
@pytest.mark.parametrize(
"string,k,expected",
[
("ABAB", 2, 4),
("AABABBA", 1, 4),
("BAAAABBA", 1, 5),
("BAAAABBA", 3, 8),
("BAAAABBBBBA", 1, 6),
("CBAAAABBBBBA", 2, 7),
("CBAAAABBBBBA", 1, 6),
("CABAAAABBBBBA", 2, 7),
("", 1, 0),
("AAAA", 3, 4),
("AAAA", 1, 4),
],
)
def test_longest_repeating_character(string, k, expected):
assert Solution().character_replacement(string, k) == expected
| martinabeleda/leetcode | leetcode/string/test/test_longest_repeating_character.py | test_longest_repeating_character.py | py | 576 | python | en | code | 1 | github-code | 13 |
1653427365 | import logging
from datetime import datetime
from sqlalchemy import func
from flask import Blueprint, jsonify, request
from ledger import db
from ledger.models import Transaction
from .decorators import auth_required
logger = logging.getLogger(__name__)
index_blueprint = Blueprint('index', __name__)
@index_blueprint.route('/', methods=['GET'])
@auth_required
def balances_view():
# Get the date provided in the args or use the current date
if request.args.get('date'):
date = datetime.strptime(request.args.get('date'), '%Y-%m-%d')
else:
date = datetime.now()
totals = db.session.query(Transaction.recipiant,
func.sum(Transaction.amount)) \
.filter(Transaction.date <= date) \
.group_by(Transaction.recipiant) \
.all()
# Put the query results into a more comfortable format
totals_dict = {}
for total in totals:
totals_dict[total[0]] = round(total[1], 2)
return jsonify(totals_dict)
| fizzy123/ledger | ledger/views/index.py | index.py | py | 1,049 | python | en | code | 0 | github-code | 13 |
28942761930 | import requests
import json
class initfmp :
"""
FMP Api driver to get data with attached apikeys and main endpoint
"""
def __init__(self) :
self.config = self.get_config()
self.endpoint = self.config['fmp']['endpoint']
self.api_key = self.config['fmp']['api_key']
def get_config(self) -> dict :
with open('./credentials/config.json', 'r') as config_file:
configs = json.load(config_file)
return configs
def get_data(self, api_path:str) -> json :
"""
api_path(str): the target api path for example -> "historical-price-full/stock_dividend/AAPL?"
"""
url = "{}/{}".format(self.endpoint,api_path)
res = requests.get(url, params = {"apikey":self.api_key})
status_code = res.status_code
if status_code == 200 :
data = json.loads(res.text)
return data
else :
e = "Getting Data Error With status code: {}".format(status_code)
raise Exception(e) | gonggse/dime-data-engineer-exam | utils/fmp_driver.py | fmp_driver.py | py | 1,031 | python | en | code | 2 | github-code | 13 |
14966961078 | from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
import debug_toolbar
urlpatterns = [
path('admin/', admin.site.urls),
path('review/', include("review.urls", namespace="review")),
path('account/', include("account.urls", namespace="account")),
path('cart/', include("cart.urls", namespace="cart")),
path('order/', include("order.urls", namespace="order")),
path('coupon/', include("coupon.urls", namespace="coupon")),
path('api/', include("drf.urls", namespace="drf")),
path('', include("store.urls", namespace="store")),
path("__debug__", include(debug_toolbar.urls)),
]
if settings.DEBUG:
urlpatterns += static(
settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT) | Oseni03/ecomstore | core/urls.py | urls.py | py | 817 | python | en | code | 1 | github-code | 13 |
21928266713 | # CodeChef - LAPIN - Lapindromes
# https://www.codechef.com/LP1TO201/problems/LAPIN
import sys
input = sys.stdin.readline
from collections import Counter
T = int(input())
for _ in range(T):
S = input().rstrip()
middle = len(S) // 2
if len(S) % 2 != 0:
S = S[:middle] + S[middle+1:]
first_half, second_half = Counter(S[:middle]), Counter(S[middle:])
answer = "YES" if first_half == second_half else "NO"
print(answer) | dyabk/competitive-programming | Codechef/self_learning/level_up/strings/Lapindromes.py | Lapindromes.py | py | 454 | python | en | code | 0 | github-code | 13 |
24349269402 | from configparser import ConfigParser
import os
from freddi import Freddi, FreddiNeutronStar
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
DAY = 86400
# Should contain all dimensional qu
_UNITS = dict(
Mx=1.98892e33,
Mopt=1.98892e33,
period=DAY,
distance=1000 * 3.08567758135e18,
time=DAY,
tau=DAY,
)
def _to_float(x):
try:
return float(x)
except ValueError:
return x
def default_freddi_kwargs():
header = 'DEFAULT'
with open(os.path.join(DATA_DIR, 'freddi.ini')) as f:
freddi_ini = '[{}]\n'.format(header) + f.read()
cp = ConfigParser(inline_comment_prefixes=['#'])
cp.optionxform = str
cp.read_string(freddi_ini)
kwargs = dict(cp[header].items())
kwargs = {name: _to_float(value) for name, value in kwargs.items()}
kwargs.update({name: unit * kwargs[name] for name, unit in _UNITS.items() if name in kwargs})
return kwargs
def defaulted_kwargs(**kwargs):
return {**default_freddi_kwargs(), **kwargs}
def freddi_w_default(**kwargs):
return Freddi(**defaulted_kwargs(**kwargs))
def freddi_ns_w_default(**kwargs):
return FreddiNeutronStar(**defaulted_kwargs(**kwargs))
| hombit/freddi | python/test/test_util.py | test_util.py | py | 1,201 | python | en | code | 6 | github-code | 13 |
1935925886 |
class Difficulties:
"""A class that initializes the chosen difficulty
Attributes:
difficulty: The chosen game difficulty
"""
def __init__(self, difficulty):
"""The classes constructor which sets the difficulties values
Args:
Listed above
"""
self.name = difficulty
self.speed = 0
self. addition = 0
self.column = 0
if difficulty == "EASY":
self.easy()
elif difficulty == "MEDIUM":
self.medium()
else:
self.hard()
def easy(self):
"""Initializes the classes variables for easy difficulty
"""
self.speed = 300
self.addition = 1
self.column = 1
def medium(self):
"""Initializes the classes variables for medium difficulty
"""
self.speed = 200
self.addition = 2
self.column = 2
def hard(self):
"""Initializes the classes variables for hard difficulty
"""
self.speed = 150
self.addition = 3
self.column = 3
| Savones/ot-harjoitustyo | memory_game/src/objects/difficulties.py | difficulties.py | py | 1,096 | python | en | code | 1 | github-code | 13 |
12218437330 | import socket
import sys
import subprocess as sp
from timestamp import timestamp
from datetime import datetime
extProc = sp.Popen(['python','node2.py']) # runs myPyScript.py
status = sp.Popen.poll(extProc) # status should be 'None'
IP = '0x2A'
MAC = 'N2'
LOCAL_ARP_TABLE = {
"0x21": "R2",
"0x2A": "N2",
"0x2B": "N3"
}
cable = ("localhost", 8200)
node2 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
node2.bind(("localhost", 8002))
def reply_ping(packet):
node2.sendto(bytes(packet, "utf-8"), ("localhost", 8102))
node2.sendto(bytes(packet, "utf-8"), ("localhost", 8033))
def log_protocol(source_ip, source_mac, message):
with open('node2.log', 'a') as file:
file.write("\nSOURCE IP: " + source_ip + '\nSOURCE MAC: ' + source_mac + '\n' + 'MESSAGE: ' + message + '\n\n')
def wrap_packet_ip(message, dest_ip, protocol):
ethernet_header = ""
IP_header = ""
source_ip = IP
IP_header = IP_header + source_ip + dest_ip
source_mac = MAC
protocol = protocol
data = message
data_length = str(len(message))
ping_type = 'rep'
if len(data_length) == 2:
data_length = '0' + data_length
elif len(data_length) == 1:
data_length = '00' + data_length
if dest_ip in LOCAL_ARP_TABLE:
destination_mac = LOCAL_ARP_TABLE[dest_ip]
else:
destination_mac = 'R2'
ethernet_header = ethernet_header + source_mac + destination_mac
packet = ethernet_header + IP_header + ping_type + protocol + data_length + data
return packet
while True:
received_message, addr = node2.recvfrom(1024)
received_message = received_message.decode("utf-8")
source_mac = received_message[0:2]
destination_mac = received_message[2:4]
ip_source = received_message[4:8]
destination_ip = received_message[8:12]
ping_type = ''
protocol = ''
data_length = ''
message = ''
start_time = ''
if received_message[12] == 'r':
ping_type = received_message[12:15]
protocol = received_message[15:16]
data_length = int(received_message[16:19])
# print(data_length)
end_pos = 19 + data_length
message = received_message[19:end_pos]
protocol = int(protocol)
start_time = received_message[end_pos:]
else:
protocol = received_message[12:13]
data_length = int(received_message[13:16])
# print(data_length)
end_pos = 16 + data_length
message = received_message[16:end_pos]
protocol = int(protocol)
if IP == destination_ip and MAC == destination_mac:
if protocol == 3:
print("-----------" + timestamp() + "-----------")
print("\nThe packet received:\nSource MAC address: {source_mac}, Destination MAC address: {destination_mac}".format(source_mac=source_mac, destination_mac=destination_mac))
print("\nSource IP address: {ip_source}, Destination IP address: {destination_ip}".format(ip_source=ip_source, destination_ip=destination_ip))
print("\nProtocol: Simple Messaging")
print("\nData Length: " + data_length)
print("\nMessage: " + message)
print("----------------------------------")
elif protocol == 0:
print("-----------" + timestamp() + "-----------")
print("\nThe packet received:\nSource MAC address: {source_mac}, Destination MAC address: {destination_mac}".format(source_mac=source_mac, destination_mac=destination_mac))
print("\nSource IP address: {ip_source}, Destination IP address: {destination_ip}".format(ip_source=ip_source, destination_ip=destination_ip))
print("\nProtocol: Ping")
print("\nData Length: " + data_length)
print("\nMessage: " + message)
print("----------------------------------")
end = datetime.now()
print(end)
total = end - datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S.%f')
print('seconds')
print(total.total_seconds())
print("Ping successful: ", total.total_seconds())
msg = "Reply from 0x2A: No lost packet, one way trip time: " + str(total.total_seconds())
reply_ping(wrap_packet_ip(message, ip_source, str(protocol)))
print(message)
elif protocol == 1:
print("-----------" + timestamp() + "-----------")
print("\nThe packet received:\nSource MAC address: {source_mac}, Destination MAC address: {destination_mac}".format(source_mac=source_mac, destination_mac=destination_mac))
print("\nSource IP address: {ip_source}, Destination IP address: {destination_ip}".format(ip_source=ip_source, destination_ip=destination_ip))
print("\nProtocol: Log")
print("\nData Length: " + data_length)
print("\nMessage: " + message)
print("----------------------------------")
log_protocol(ip_source, source_mac, message)
elif protocol == 2:
print("-----------" + timestamp() + "-----------")
print("\nThe packet received:\nSource MAC address: {source_mac}, Destination MAC address: {destination_mac}".format(source_mac=source_mac, destination_mac=destination_mac))
print("\nSource IP address: {ip_source}, Destination IP address: {destination_ip}".format(ip_source=ip_source, destination_ip=destination_ip))
print("\nProtocol: Kill")
print("\nData Length: " + data_length)
print("\nMessage: " + message)
print("----------------------------------")
print("Kill protocol has been given. Will exit now...")
sp.Popen.terminate(extProc)
sys.exit()
elif protocol == 4:
# POISON ARP HERE
pass
elif IP == '0x21' and MAC == destination_mac:
pass
# DO MITM HERE
else:
print("-----------" + timestamp() + "-----------")
print("\nThe packet received:\nSource MAC address: {source_mac}, Destination MAC address: {destination_mac}".format(source_mac=source_mac, destination_mac=destination_mac))
print("\nSource IP address: {ip_source}, Destination IP address: {destination_ip}".format(ip_source=ip_source, destination_ip=destination_ip))
print("\nProtocol: " + str(protocol))
print("\nData Length: " + data_length)
print("\nMessage: " + message)
print()
print("PACKET NOT FOR ME. DROPPING NOW...")
print("----------------------------------") | wellsonah2019/cs441_t6 | node2-listener copy.py | node2-listener copy.py | py | 6,559 | python | en | code | 0 | github-code | 13 |
6002482278 | '''
Example code of simulating a liquid fountain in a simple box.
The simulation is based on PBF (https://mmacklin.com/pbf_sig_preprint.pdf)
Note: the same functions used for this PBF are used for liquid reconstruction
'''
import torch
import open3d as o3d
from differentiableFluidSim import FluidGravityForce, uniformSource,\
XsphViscosity, MullerConstraints
from utils import generateSDF_approx
# Simulation parameters:
dt = 1/60.0
num_particles_emitted_per_frame = 1
gravity_force = [0, 0, 9.8]
max_particle_velocity = 2
interaction_radius = 0.005
sdf_resolution = 0.05
dampening_factor = 0.01
# Callback for open3d visualizer to end simulation
end_simulation = False
def close_visualizer(vis):
global end_simulation
end_simulation = True
if __name__ == "__main__":
torch.cuda.empty_cache()
# Generate box mesh
mesh = o3d.geometry.TriangleMesh.create_box()
wire_box = o3d.geometry.LineSet.create_from_triangle_mesh(mesh)
# Get SDF from mesh
print("Generating SDF mesh. This can take a while...")
sdf, pos_sdf = generateSDF_approx(mesh, sdf_resolution, 50*sdf_resolution)
# Prepare simulation modules
fluid_gravity_force = FluidGravityForce(torch.tensor(gravity_force,
dtype=torch.float32).reshape((1, 1, -1)).cuda(),
maxSpeed=max_particle_velocity).cuda()
collision_pose = torch.tensor([[[pos_sdf[0], pos_sdf[1], pos_sdf[2],
0, 0, 0, 1]]], dtype=torch.float32).cuda()
fluid_constraints = MullerConstraints(torch.from_numpy(sdf), sdf_resolution,
collision_pose, radius=0.005,
numStaticIterations=3, numIteration=5,
fluidRestDistance = 0.6).cuda()
fluid_viscosity = XsphViscosity(radius=interaction_radius).cuda()
# Source location of liquid:
source_init_loc_torch = torch.tensor([0.5, 0.05, 0.8], dtype=torch.float32).reshape((1, 1, -1)).cuda()
source_init_vel_torch = torch.tensor([0.0,0.0,0.0], dtype=torch.float32).reshape((1, 1, -1)).cuda()
locs, vel = uniformSource(source_init_loc_torch, source_init_vel_torch,
num_particles_emitted_per_frame, 1, 0.01)
# Visualizer
vis = o3d.visualization.VisualizerWithKeyCallback()
vis.create_window()
vis.register_key_callback(88, close_visualizer)
# Add all the geometry to visualizer
wire_box = o3d.geometry.LineSet.create_from_triangle_mesh(mesh)
vis.add_geometry(wire_box)
pcd_fluid = o3d.geometry.PointCloud()
pcd_fluid.points = o3d.utility.Vector3dVector(locs.cpu().numpy()[0])
vis.add_geometry(pcd_fluid)
print("Hit X to close simulation")
while not end_simulation:
# Predict particles according to their velocity and gravity
new_locs, vel = fluid_gravity_force(locs, vel, dt)
# Add new particles from source
add_locs, add_vel = uniformSource(source_init_loc_torch,
source_init_vel_torch,
num_particles_emitted_per_frame, 1, 0.01)
new_locs = torch.cat((new_locs, add_locs), 1)
vel = torch.cat((vel, add_vel), 1)
# Apply fluid position constriants
new_locs = fluid_constraints(locs = new_locs)
# Update velocity after applying position constraints to the old particles
vel[:, :locs.shape[1], :] = (1-dampening_factor)*(new_locs[:, :locs.shape[1], :] - locs)/dt
# Apply fluid viscosity
vel = fluid_viscosity(new_locs, vel)
locs = new_locs
# Update visualizer
pcd_fluid.points = o3d.utility.Vector3dVector(locs.cpu().numpy()[0])
pcd_fluid.paint_uniform_color([0.5, 0.5, 1.0])
vis.update_geometry(pcd_fluid)
vis.poll_events()
vis.update_renderer()
| ucsdarclab/liquid_reconstruction | simulateBox.py | simulateBox.py | py | 4,021 | python | en | code | 1 | github-code | 13 |
36636885428 | # properties ...
class Car:
def __init__(self, speed):
self.speed=speed
c1 = Car("high")
c2 = Car("mid")
c3 = Car("low")
def get_speed(car):
speeds = {
"high":300,
"mid":200,
"low":100
}
speed = speeds.get(car.speed , None)
if not speed:
raise TypeError("car not specified!")
return speed
for car in [c1,c2,c3]:
print (f"this car's going {get_speed(car): 0}")
| Sina-Gharloghi/HeyvaAI-exercise | 102prj16.py | 102prj16.py | py | 462 | python | en | code | 0 | github-code | 13 |
24653586600 | import base64
import xml.etree.ElementTree as element_tree
import string
import os
import boto3
from util import Util
from hls_aes import HLSAesLib
from key_generator import KeyGenerator
s3_client = boto3.client("s3")
KEY_STORE_BUCKET = os.environ["KEY_STORE_BUCKET"]
HLS_AES_128_SYSTEM_ID = '81376844-f976-481e-a84e-cc25d39b0b33'
HLS_AES_128_KEY_FORMAT = 'identity'
HLS_AES_128_KEY_FORMAT_VERSIONS = '1'
"""
CPIX XMLドキュメントを組み立てて返す
"""
class ServerResponseBuilder:
key = ""
def __init__(self, request_body):
self.root = element_tree.fromstring(request_body)
self.hls_aes_lib = HLSAesLib()
self.key_generator = KeyGenerator()
element_tree.register_namespace("cpix", "urn:dashif:org:cpix")
element_tree.register_namespace("pskc", "urn:ietf:params:xml:ns:keyprov:pskc")
element_tree.register_namespace("speke", "urn:aws:amazon:com:speke")
element_tree.register_namespace("ds", "http://www.w3.org/2000/09/xmldsig#")
element_tree.register_namespace("enc", "http://www.w3.org/2001/04/xmlenc#")
def fixup_document(self, drm_system, system_id, kid, content_id):
"""
Update the returned XML document based on the specified system ID
"""
if system_id.lower() == HLS_AES_128_SYSTEM_ID.lower():
uri = self.hls_aes_lib.build_key_uri(content_id, kid)
ext_x_key = uri["uri"]
object_key = uri["key"]
s3_client.put_object(Body=Util.bin_to_hex_str(self.key).encode("utf-8"), Bucket=KEY_STORE_BUCKET, Key=object_key)
drm_system.find("{urn:dashif:org:cpix}URIExtXKey").text = ext_x_key
drm_system.find("{urn:aws:amazon:com:speke}KeyFormat").text = base64.b64encode(HLS_AES_128_KEY_FORMAT.encode("utf-8")).decode("utf-8")
drm_system.find("{urn:aws:amazon:com:speke}KeyFormatVersions").text = base64.b64encode(HLS_AES_128_KEY_FORMAT_VERSIONS.encode("utf-8")).decode("utf-8")
self.safe_remove(drm_system, "{urn:dashif:org:cpix}ContentProtectionData")
self.safe_remove(drm_system, "{urn:aws:amazon:com:speke}ProtectionHeader")
self.safe_remove(drm_system, "{urn:dashif:org:cpix}PSSH")
else:
raise Exception("Invalid system ID {}".format(system_id))
def fill_request(self):
content_id = self.root.get("id")
system_ids = {}
explicitIV = None
content_keys = self.root.findall("./{urn:dashif:org:cpix}ContentKeyList/{urn:dashif:org:cpix}ContentKey")
for drm_system in self.root.findall("./{urn:dashif:org:cpix}DRMSystemList/{urn:dashif:org:cpix}DRMSystem"):
kid = drm_system.get("kid")
self.key = self.key_generator.gen_content_key(kid)
system_id = drm_system.get("systemId")
system_ids[system_id] = kid
iv = base64.b64encode(self.hls_aes_lib.gen_iv(content_id, kid)).decode('utf-8')
self.fixup_document(drm_system, system_id, kid, content_id)
explicitIV = iv.encode("utf-8").decode('utf-8')
for content_key_tag in content_keys:
init_vector = content_key_tag.get("explicitIV")
# explicitIVはHLS AESまたはSAMPLE AES(Fairplay)の場合必要
if init_vector is None and system_ids.get(HLS_AES_128_SYSTEM_ID, False) == kid:
content_key_tag.set('explicitIV', explicitIV)
data = element_tree.SubElement(content_key_tag, "{urn:dashif:org:cpix}Data")
secret = element_tree.SubElement(data, "{urn:ietf:params:xml:ns:keyprov:pskc}Secret")
plain_value = element_tree.SubElement(secret, "{urn:ietf:params:xml:ns:keyprov:pskc}PlainValue")
# キーを指定
plain_value.text = base64.b64encode(self.key).decode('utf-8')
def get_response(self):
"""
Get the key request response as an HTTP response.
"""
self.fill_request()
body = base64.b64encode(element_tree.tostring(self.root)).decode('utf-8')
print(body)
return {
"isBase64Encoded": True,
"statusCode": 200,
"headers": {
"Content-Type": "application/xml",
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Headers": "*",
"Access-Control-Allow-Methods": "GET,POST,OPTIONS",
"Speke-User-Agent": "oda-key-server"
},
"body": body
}
def safe_remove(self, element, match):
elm_instance = element.find(match)
if elm_instance is not None:
element.remove(elm_instance)
else:
print("not match xml", match)
| OdaDaisuke/aws-speke | src/server_response_builder.py | server_response_builder.py | py | 4,716 | python | en | code | 5 | github-code | 13 |
33704174995 | # coding=UTF-8
import random
import requests
from requests import Timeout, RequestException
from bs4 import BeautifulSoup
proxies_pool = [
{},
{'http': '172.19.0.11:8118','https': '172.19.0.11:8118'}
]
headers_ = {'user_agent': "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/113.0",
'accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8',
'accept-encoding':'gzip, deflate, br',
'accept-language':'en,en-US;q=0.9,zh-CN;q=0.8,zh;q=0.7'
}
user_agent_pool = ["Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/113.0"]
def get_once(url, timeout=30, retry_count=2):
proxies = random.choice(proxies_pool)
user_agent = random.choice(user_agent_pool)
headers = headers_.copy()
headers['User_agent'] = user_agent
data = None
for i in range(retry_count):
try:
response = requests.get(url, proxies=proxies, headers=headers, timeout=timeout)
if not response:
print(f"Status code = {response.status_code}, retrying...")
continue
data = BeautifulSoup(response.content)
break
except Timeout:
print(f"Request Timeout {i}, retrying...")
except RequestException as e:
print(f"Request error: {e}")
break
else:
print("Failed to fetch the page after multiple retries.", url)
return data
import pandas as pd
def render_html(xls_fp):
df = pd.read_excel(xls_fp)
df2 = df[['compound', 'CAS']]
df2['link'] = [f'https://www.chemscene.com/{cas}.html' for cas in df['CAS']]
df2.to_html('index.html', index=False, render_links=True)
| DarkGoldBar/spiders_lair | drug_solubility/main.py | main.py | py | 1,708 | python | en | code | 0 | github-code | 13 |
30138843612 | from flask_jwt_extended import jwt_required
from flask_restful import Resource
from zou.app.mixin import ArgsMixin
from zou.app.models.project import Project, PROJECT_STYLES
from zou.app.models.project_status import ProjectStatus
from zou.app.services import (
deletion_service,
projects_service,
shots_service,
user_service,
persons_service,
assets_service,
tasks_service,
status_automations_service,
)
from zou.app.utils import events, permissions, fields
from zou.app.blueprints.crud.base import BaseModelResource, BaseModelsResource
from zou.app.services.exception import ArgumentsException
class ProjectsResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, Project)
def add_project_permission_filter(self, query):
if permissions.has_admin_permissions():
return query
else:
return query.filter(user_service.build_related_projects_filter())
def check_read_permissions(self):
return True
def check_creation_integrity(self, data):
"""
Check if the data descriptor has a valid production_style.
"""
if "production_style" in data:
if data["production_style"] is None:
data["production_style"] = "2d3d"
types = [type_name for type_name, _ in PROJECT_STYLES]
if data["production_style"] not in types:
raise ArgumentsException("Invalid production_style")
return True
def update_data(self, data):
open_status = projects_service.get_or_create_open_status()
if "project_status_id" not in data:
data["project_status_id"] = open_status["id"]
if "team" in data:
data["team"] = [
persons_service.get_person_raw(person_id)
for person_id in data["team"]
]
if "asset_types" in data:
data["asset_types"] = [
assets_service.get_asset_type_raw(asset_type_id)
for asset_type_id in data["asset_types"]
]
if "task_statuses" in data:
data["task_statuses"] = [
tasks_service.get_task_status_raw(task_status_id)
for task_status_id in data["task_statuses"]
]
if "task_types" in data:
data["task_types"] = [
tasks_service.get_task_type_raw(task_type_id)
for task_type_id in data["task_types"]
]
if "status_automations" in data:
data["status_automations"] = [
status_automations_service.get_status_automation_raw(
task_type_id
)
for task_type_id in data["status_automations"]
]
return data
def post_creation(self, project):
project_dict = project.serialize()
if project.production_type == "tvshow":
episode = shots_service.create_episode(project.id, "E01")
project_dict["first_episode_id"] = fields.serialize_value(
episode["id"]
)
user_service.clear_project_cache()
projects_service.clear_project_cache("")
return project_dict
class ProjectResource(BaseModelResource, ArgsMixin):
def __init__(self):
BaseModelResource.__init__(self, Project)
self.protected_fields.append("team")
def check_read_permissions(self, project):
return user_service.check_project_access(project["id"])
def check_update_permissions(self, project, data):
return user_service.check_manager_project_access(project["id"])
def pre_update(self, project_dict, data):
if "team" in data:
data["team"] = [
persons_service.get_person_raw(person_id)
for person_id in data["team"]
]
if "asset_types" in data:
data["asset_types"] = [
assets_service.get_asset_type_raw(asset_type_id)
for asset_type_id in data["asset_types"]
]
if "task_statuses" in data:
data["task_statuses"] = [
tasks_service.get_task_status_raw(task_status_id)
for task_status_id in data["task_statuses"]
]
if "task_types" in data:
data["task_types"] = [
tasks_service.get_task_type_raw(task_type_id)
for task_type_id in data["task_types"]
]
if "status_automations" in data:
data["status_automations"] = [
status_automations_service.get_status_automation_raw(
task_type_id
)
for task_type_id in data["status_automations"]
]
return data
def post_update(self, project_dict):
if project_dict["production_type"] == "tvshow":
episode = shots_service.get_or_create_first_episode(
project_dict["id"]
)
project_dict["first_episode_id"] = fields.serialize_value(
episode["id"]
)
projects_service.clear_project_cache(project_dict["id"])
return project_dict
def clean_get_result(self, data):
project_status = ProjectStatus.get(data["project_status_id"])
data["project_status_name"] = project_status.name
return data
def post_delete(self, project_dict):
projects_service.clear_project_cache(project_dict["id"])
return project_dict
def update_data(self, data, instance_id):
"""
Check if the data descriptor has a valid production_style.
"""
if "production_style" in data:
if data["production_style"] is None:
data["production_style"] = "2d3d"
types = [type_name for type_name, _ in PROJECT_STYLES]
if data["production_style"] not in types:
raise ArgumentsException("Invalid production_style")
return data
@jwt_required()
def delete(self, instance_id):
force = self.get_force()
project = self.get_model_or_404(instance_id)
project_dict = project.serialize()
if projects_service.is_open(project_dict):
return {
"error": True,
"message": "Only closed projects can be deleted",
}, 400
else:
self.check_delete_permissions(project_dict)
if force:
deletion_service.remove_project(instance_id)
else:
project.delete()
events.emit("project:delete", {"project_id": project.id})
self.post_delete(project_dict)
return "", 204
class ProjectTaskTypeLinksResource(Resource, ArgsMixin):
@jwt_required()
def post(self):
args = self.get_args(
[
("project_id", "", True),
("task_type_id", "", True),
("priority", 1, False, int),
]
)
task_type_link = projects_service.create_project_task_type_link(
args["project_id"], args["task_type_id"], args["priority"]
)
projects_service.clear_project_cache(task_type_link["project_id"])
return task_type_link, 201
| cgwire/zou | zou/app/blueprints/crud/project.py | project.py | py | 7,319 | python | en | code | 152 | github-code | 13 |
35386369181 | import PyGeom2
def viz(g):
a=g.point(0,0,color=(1,0,0))
b=g.point(100,0,moveable=True)
c=g.point(0,100,moveable=True)
g.text(base=c, string = "asdf", size=-18, color = (0,1,0))
g.line(a,b)
g.rect(c, (5, 6))
g.polygon(vertices = (a, b, c), fill = (1,2,3))
g.text(base=b, string = str(b), size=-18, color = (0,1,0))
d = c + 0.2*(b-a)
g.line(a,d)
PyGeom2.show(viz)
| victorliu/PyGeom2 | test.py | test.py | py | 374 | python | en | code | 1 | github-code | 13 |
39423093284 | class Parrot:
species = "bird"
def __init__(self,name,age):
self.name = name
self.age = age
def __str__(self):
return"{}(name:{} age :{})".format(__class__.species,self.name,self.age)
blu = Parrot("Blu",10)
woo = Parrot("woo",21)
print(blu)
print(woo)
| dharmeshvyas/BCA-SEM-6 | python/UNIT -2/practical-1.py | practical-1.py | py | 305 | python | en | code | 0 | github-code | 13 |
21411108938 | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def verticalOrder(self, root: TreeNode) -> List[List[int]]:
"""
[MEMO] If there is order requirement like top to bottom && left to right
Always use BFS
[Tip] Can always sort the keys and then output
"""
if not root:
return []
min_col = 0 # root has col 0, min_col represents relative column position
d = defaultdict(list)
queue = deque() # Stores (node, col)
queue.append((root, 0))
while queue:
size = len(queue)
for _ in range(size):
curr, col = queue.popleft()
d[col].append(curr.val)
# update min_col
if col < min_col:
min_col = col
if curr.left:
queue.append((curr.left, col - 1))
if curr.right:
queue.append((curr.right, col + 1))
# return result
result = []
i = min_col
while i in d:
result.append(d[i])
i += 1
return result
| stevenjst0121/leetcode | 314_binrary_tree_vertical_order_traversal.py | 314_binrary_tree_vertical_order_traversal.py | py | 1,294 | python | en | code | 0 | github-code | 13 |
31272834440 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""收集由RabbitMQ回传的微博数据."""
import logging
import asyncio
import json
from core.mq_connection import get_channel
from spider import TweetP
from db import RawDataDAO
from db import TweetDAO
class DataCollecter(object):
"""收集由RabbitMQ回传的微博数据."""
def __init__(self):
self.logger = logging.getLogger('DataCollecter')
self.rawdata_dao = RawDataDAO()
self.tweet_dao = TweetDAO()
def start_loop():
loop = asyncio.get_event_loop()
loop.run_until_complete(self.init())
loop.run_forever()
start_loop()
async def init(self):
self.channel = await get_channel()
await self.channel.basic_consume(queue_name='weibo_data', callback=self.handle_weibo_data)
async def handle_weibo_data(self, channel, body, envelope, properties):
weibos = json.loads(body.decode('utf-8'))
def convert_generator():
for weibo_dict in weibos:
weibo = TweetP.from_json_dict(weibo_dict)
# print(weibo.pretty())
yield weibo
self.logger.info("Got {0} weibo.".format(len(weibos)))
self.save_weibo(convert_generator())
await self.channel.basic_client_ack(delivery_tag=envelope.delivery_tag)
self.logger.debug('Save {0} weibo successful.'.format(len(weibos)))
def save_weibo(self, weibos):
if isinstance(weibos, TweetP):
weibos = [weibos]
for weibo in weibos:
if self.rawdata_dao:
self.rawdata_dao.set_raw_data(weibo.mid, weibo.raw_html)
if weibo.forward_tweet:
self.rawdata_dao.set_raw_data(weibo.forward_tweet.mid, weibo.forward_tweet.raw_html)
if self.tweet_dao:
self.tweet_dao.update_or_create_tweetp(weibo)
if weibo.forward_tweet:
self.tweet_dao.update_or_create_tweetp(weibo.forward_tweet)
| njnubobo/WeiboSpider | weibo_spider/scheduler/datacollecter.py | datacollecter.py | py | 2,020 | python | en | code | 0 | github-code | 13 |
24632157870 | import os
import re
from setuptools import setup, find_packages
def long_description():
try:
return open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
except IOError:
return None
def read_version():
with open(os.path.join(os.path.dirname(__file__), 'pyplanet', '__init__.py')) as handler:
return re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", handler.read(), re.M).group(1)
def read_requirements(filename):
with open(os.path.join(os.path.dirname(__file__), filename), 'r') as handler:
return [line for line in handler.readlines() if not line.startswith('#') and not line.startswith('-') and not len(line) <= 1]
EXCLUDE_FROM_PACKAGES = [
'pyplanet.bin',
'pyplanet.conf.app_template',
'pyplanet.conf.project_template',
'docs*',
'env*',
'tests*',
'apps*',
'settings*',
]
PKG = 'pyplanet'
######
setup(
name=PKG,
version=read_version(),
description='Maniaplanet Server Controller',
long_description=long_description(),
keywords='maniaplanet, controller, plugins, apps',
license='GNU General Public License v3 (GPLv3)',
packages=find_packages(exclude=EXCLUDE_FROM_PACKAGES),
package_data={
'pyplanet/tests': ['pyplanet/tests/**.txt', 'pyplanet/tests/**.json', 'pyplanet/tests/**.xml', 'pyplanet/tests/**.j2']
},
install_requires=read_requirements('requirements.txt'),
tests_require=read_requirements('requirements-dev.txt'),
extras_require={},
test_suite='tests',
include_package_data=True,
scripts=['pyplanet/bin/pyplanet'],
entry_points={'console_scripts': [
'pyplanet = pyplanet.core.management:execute_from_command_line',
]},
author='Tom Valk',
author_email='tomvalk@lt-box.info',
url='http://pypla.net/',
classifiers=[
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3 :: Only',
'Operating System :: OS Independent',
'Topic :: Internet',
'Topic :: Software Development',
'Topic :: Games/Entertainment',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Intended Audience :: System Administrators',
'Intended Audience :: Developers',
],
zip_safe=False,
)
| 15009199/PyPlanet-F8-F9-rebind | setup.py | setup.py | py | 2,404 | python | en | code | null | github-code | 13 |
17297281267 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth import authenticate, login,logout
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect
from rest_framework import viewsets
from .models import *
from .serializers import *
from django.shortcuts import render
from .forms import *
# Create your views here.
class EmployeeViewSet(viewsets.ModelViewSet):
queryset = Employee.objects.all()
serializer_class = EmployeeSerializer
class CompanyViewSet(viewsets.ModelViewSet):
queryset = Company.objects.all()
serializer_class = CompanySerializer
class LocationViewSet(viewsets.ModelViewSet):
queryset = Location.objects.all()
serializer_class = LocationSerializer
def home(request):
return render(request, "search.html" )
def logout1(request):
logout(request)
return HttpResponseRedirect('/')
def elogin(request):
errmsg = ""
next = ""
uid = 0
resendemail = False
if request.GET:
next = request.GET['next']
#print(next)
request.session['next'] = next
if request.method == 'POST':
form2 = LoginForm(request.POST)
print(request.POST)
print(form2)
next = request.session.get('next', {})
print(len(next))
username = request.POST['username']
password = request.POST['password']
print(username)
user2 = None
try:
user2 = User.objects.get(username=username)
except:
print('no user, should exit here')
user = authenticate(username=username, password=password)
if user is None and user2 is not None:
user = authenticate(username=user2.email, password=password)
if user is not None:
login(request, user)
if len(next) > 0:
return HttpResponseRedirect(next)
return HttpResponseRedirect("/")
else:
errmsg = "Error in username and password combination"
form1 = LoginForm()
return render(request, "login.html", {'form': form1, 'errmsg': errmsg})
def register(request):
if request.user.is_authenticated():
return HttpResponseRedirect(home)
registration_form = RegistrationForm()
if request.method == 'POST':
form = RegistrationForm(request.POST)
if form.is_valid():
datas={}
datas['username']=form.cleaned_data['username']
datas['email']=form.cleaned_data['email']
datas['password1']=form.cleaned_data['password1']
datas['first_name']= form.cleaned_data['first_name']
datas['last_name']= form.cleaned_data['last_name']
datas['company']= form.cleaned_data['company']
u=form.save(datas) #Save the user and his profile
login(request,u)
return HttpResponseRedirect('/')
else:
registration_form = form #Display form with error messages (incorrect fields, etc)
return render(request, 'register.html', {'form': registration_form})
@login_required
def addemployee(request):
form=None
if request.method=='POST':
form=EmployeeForm(request.POST)
emp=form.save(commit=False)
emp.company=CompanyUser.objects.filter(user=request.user).first().company
emp.save()
return HttpResponseRedirect('/addemployee')
else:
form=EmployeeForm()
return render(request,"add_employee.html",{'form':form})
@login_required
def addjob(request):
form=None
if request.method=='POST':
form=JobForm(request.POST)
job=form.save(commit=False)
job.associated_company=CompanyUser.objects.filter(user=request.user).first().company
job.save()
return HttpResponseRedirect('/addjob')
else:
form=JobForm()
return render(request,"add_job.html",{'form':form})
| sayok88/magical_task | apps/app1/views.py | views.py | py | 3,918 | python | en | code | 0 | github-code | 13 |
10945448738 | import pytest
from pynamodb.pagination import RateLimiter
class MockTime():
def __init__(self):
self.current_time = 0.0
def sleep(self, amount):
self.current_time += amount
def time(self):
return self.current_time
def increment_time(self, amount):
self.current_time += amount
def test_rate_limiter_exceptions():
with pytest.raises(ValueError):
r = RateLimiter(0)
with pytest.raises(ValueError):
r = RateLimiter(-1)
with pytest.raises(ValueError):
r = RateLimiter(10)
r.rate_limit = 0
with pytest.raises(ValueError):
r = RateLimiter(10)
r.rate_limit = -1
def test_basic_rate_limiting():
mock_time = MockTime()
r = RateLimiter(0.1, mock_time)
# 100 operations
for i in range(0, 100):
r.acquire()
# Simulates an operation that takes 1 second
mock_time.increment_time(1)
r.consume(1)
# Since the first acquire doesn't take time, thus we should be expecting (100-1) * 10 seconds = 990 delay
# plus 1 for the last increment_time(1) operation
assert mock_time.time() == 991.0
def test_basic_rate_limiting_small_increment():
mock_time = MockTime()
r = RateLimiter(0.1, mock_time)
# 100 operations
for i in range(0, 100):
r.acquire()
# Simulates an operation that takes 2 second
mock_time.increment_time(2)
r.consume(1)
# Since the first acquire doesn't take time, thus we should be expecting (100-1) * 10 seconds = 990 delay
# plus 2 for the last increment_time(2) operation
assert mock_time.time() == 992.0
def test_basic_rate_limiting_large_increment():
mock_time = MockTime()
r = RateLimiter(0.1, mock_time)
# 100 operations
for i in range(0, 100):
r.acquire()
# Simulates an operation that takes 2 second
mock_time.increment_time(11)
r.consume(1)
# The operation takes longer than the minimum wait, so rate limiting should have no effect
assert mock_time.time() == 1100.0
| pynamodb/PynamoDB | tests/test_pagination.py | test_pagination.py | py | 2,078 | python | en | code | 2,311 | github-code | 13 |
16788061574 |
import weakref
import math
import numpy as np
import py_trees
import shapely
import carla
from srunner.scenariomanager.carla_data_provider import CarlaDataProvider
from srunner.scenariomanager.timer import GameTime
from srunner.scenariomanager.traffic_events import TrafficEvent, TrafficEventType
from srunner.scenariomanager.scenarioatomics.atomic_criteria import Criterion
class RouteCompletionTest(Criterion):
"""
Check at which stage of the route is the actor at each tick
Important parameters:
- actor: CARLA actor to be used for this test
- route: Route to be checked
- terminate_on_failure [optional]: If True, the complete scenario will terminate upon failure of this test
"""
DISTANCE_THRESHOLD = 10.0 # meters
WINDOWS_SIZE = 2
def __init__(self, actor, route, name="RouteCompletionTest", terminate_on_failure=False):
"""
"""
super(RouteCompletionTest, self).__init__(name, actor, 100, terminate_on_failure=terminate_on_failure)
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
self._actor = actor
self._route = route
self._map = CarlaDataProvider.get_map()
self._wsize = self.WINDOWS_SIZE
self._current_index = 0
self._route_length = len(self._route)
self._waypoints, _ = zip(*self._route)
self.target = self._waypoints[-5]
self._accum_meters = []
prev_wp = self._waypoints[0]
for i, wp in enumerate(self._waypoints):
d = wp.distance(prev_wp)
if i > 0:
accum = self._accum_meters[i - 1]
else:
accum = 0
self._accum_meters.append(d + accum)
prev_wp = wp
self._traffic_event = TrafficEvent(event_type=TrafficEventType.ROUTE_COMPLETION)
self.list_traffic_events.append(self._traffic_event)
self._percentage_route_completed = 0.0
def update(self):
"""
Check if the actor location is within trigger region
"""
new_status = py_trees.common.Status.RUNNING
location = CarlaDataProvider.get_location(self._actor)
if location is None:
return new_status
if self._terminate_on_failure and (self.test_status == "FAILURE"):
new_status = py_trees.common.Status.FAILURE
elif self.test_status == "RUNNING" or self.test_status == "INIT":
for index in range(self._current_index, min(self._current_index + self._wsize + 1, self._route_length)):
# Get the dot product to know if it has passed this location
ref_waypoint = self._waypoints[index]
wp = self._map.get_waypoint(ref_waypoint)
wp_dir = wp.transform.get_forward_vector() # Waypoint's forward vector
wp_veh = location - ref_waypoint # vector waypoint - vehicle
dot_ve_wp = wp_veh.x * wp_dir.x + wp_veh.y * wp_dir.y + wp_veh.z * wp_dir.z
if dot_ve_wp > 0:
# good! segment completed!
self._current_index = index
self._percentage_route_completed = 100.0 * float(self._accum_meters[self._current_index]) \
/ float(self._accum_meters[-1])
self._traffic_event.set_dict({
'route_completed': self._percentage_route_completed})
self._traffic_event.set_message(
"Agent has completed > {:.2f}% of the route".format(
self._percentage_route_completed))
if self._percentage_route_completed > 95.0 and location.distance(self.target) < self.DISTANCE_THRESHOLD:
route_completion_event = TrafficEvent(event_type=TrafficEventType.ROUTE_COMPLETED)
route_completion_event.set_message("Destination was successfully reached")
self.list_traffic_events.append(route_completion_event)
self.test_status = "SUCCESS"
self._percentage_route_completed = 100
elif self.test_status == "SUCCESS":
new_status = py_trees.common.Status.SUCCESS
self.logger.debug("%s.update()[%s->%s]" % (self.__class__.__name__, self.status, new_status))
return new_status
def terminate(self, new_status):
"""
Set test status to failure if not successful and terminate
"""
self.actual_value = round(self._percentage_route_completed, 2)
if self.test_status == "INIT":
self.test_status = "FAILURE"
super(RouteCompletionTest, self).terminate(new_status)
class CollisionTest(Criterion):
"""
This class contains an atomic test for collisions.
Args:
- actor (carla.Actor): CARLA actor to be used for this test
- other_actor (carla.Actor): only collisions with this actor will be registered
- other_actor_type (str): only collisions with actors including this type_id will count.
Additionally, the "miscellaneous" tag can also be used to include all static objects in the scene
- terminate_on_failure [optional]: If True, the complete scenario will terminate upon failure of this test
- optional [optional]: If True, the result is not considered for an overall pass/fail result
"""
MIN_AREA_OF_COLLISION = 3 # If closer than this distance, the collision is ignored
MAX_AREA_OF_COLLISION = 5 # If further than this distance, the area is forgotten
MAX_ID_TIME = 5 # Amount of time the last collision if is remembered
def __init__(self, actor, other_actor=None, other_actor_type=None,
optional=False, name="CollisionTest", terminate_on_failure=False):
"""
Construction with sensor setup
"""
super(CollisionTest, self).__init__(name, actor, 0, None, optional, terminate_on_failure)
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
world = self.actor.get_world()
blueprint = world.get_blueprint_library().find('sensor.other.collision')
self._collision_sensor = world.spawn_actor(blueprint, carla.Transform(), attach_to=self.actor)
self._collision_sensor.listen(lambda event: self._count_collisions(weakref.ref(self), event))
self.other_actor = other_actor
self.other_actor_type = other_actor_type
self.registered_collisions = []
self.last_id = None
self.collision_time = None
def update(self):
"""
Check collision count
"""
new_status = py_trees.common.Status.RUNNING
if self._terminate_on_failure and (self.test_status == "FAILURE"):
new_status = py_trees.common.Status.FAILURE
actor_location = CarlaDataProvider.get_location(self.actor)
new_registered_collisions = []
# Loops through all the previous registered collisions
for collision_location in self.registered_collisions:
# Get the distance to the collision point
distance_vector = actor_location - collision_location
distance = math.sqrt(math.pow(distance_vector.x, 2) + math.pow(distance_vector.y, 2))
# If far away from a previous collision, forget it
if distance <= self.MAX_AREA_OF_COLLISION:
new_registered_collisions.append(collision_location)
self.registered_collisions = new_registered_collisions
if self.last_id and GameTime.get_time() - self.collision_time > self.MAX_ID_TIME:
self.last_id = None
self.logger.debug("%s.update()[%s->%s]" % (self.__class__.__name__, self.status, new_status))
return new_status
def terminate(self, new_status):
"""
Cleanup sensor
"""
if self._collision_sensor is not None:
self._collision_sensor.destroy()
self._collision_sensor = None
super(CollisionTest, self).terminate(new_status)
@staticmethod
def _count_collisions(weak_self, event): # pylint: disable=too-many-return-statements
"""
Callback to update collision count
"""
self = weak_self()
if not self:
return
actor_location = CarlaDataProvider.get_location(self.actor)
# Ignore the current one if it is the same id as before
if self.last_id == event.other_actor.id:
return
# Filter to only a specific actor
if self.other_actor and self.other_actor.id != event.other_actor.id:
return
# Filter to only a specific type
if self.other_actor_type:
if self.other_actor_type == "miscellaneous":
if "traffic" not in event.other_actor.type_id \
and "static" not in event.other_actor.type_id:
return
else:
if self.other_actor_type not in event.other_actor.type_id:
return
# Ignore it if its too close to a previous collision (avoid micro collisions)
for collision_location in self.registered_collisions:
distance_vector = actor_location - collision_location
distance = math.sqrt(math.pow(distance_vector.x, 2) + math.pow(distance_vector.y, 2))
if distance <= self.MIN_AREA_OF_COLLISION:
return
if ('static' in event.other_actor.type_id or 'traffic' in event.other_actor.type_id) \
and 'sidewalk' not in event.other_actor.type_id:
actor_type = TrafficEventType.COLLISION_STATIC
elif 'vehicle' in event.other_actor.type_id:
actor_type = TrafficEventType.COLLISION_VEHICLE
elif 'walker' in event.other_actor.type_id:
actor_type = TrafficEventType.COLLISION_PEDESTRIAN
else:
return
collision_event = TrafficEvent(event_type=actor_type)
collision_event.set_dict({
'type': event.other_actor.type_id,
'id': event.other_actor.id,
'x': actor_location.x,
'y': actor_location.y,
'z': actor_location.z})
collision_event.set_message(
"Agent collided against object with type={} and id={} at (x={}, y={}, z={})".format(
event.other_actor.type_id,
event.other_actor.id,
round(actor_location.x, 3),
round(actor_location.y, 3),
round(actor_location.z, 3)))
self.test_status = "FAILURE"
self.actual_value += 1
self.collision_time = GameTime.get_time()
self.registered_collisions.append(actor_location)
self.list_traffic_events.append(collision_event)
# Number 0: static objects -> ignore it
if event.other_actor.id != 0:
self.last_id = event.other_actor.id
| liuyuqi123/ComplexUrbanScenarios | test/scenario_runner/srunner/drl_code/scenario_utils/atomic_criteria_fixed.py | atomic_criteria_fixed.py | py | 10,927 | python | en | code | 37 | github-code | 13 |
17587494512 | from tkinter import *
#Configuracion de la raiz
root = Tk()
"""
# variables dinamicas
texto = StringVar()
texto.set("Un nuevo texto")
#Configuracion de un marco
#frame = Frame(root, width=480, height=320)
#frame.pack()
#label = Label(root, text="Hola Mundo")
#label.place(x=500, y=500) # x=0 y = 0
#label.pack()
Label(root, text="Hola Mundo").pack(anchor="nw")
label = Label(root, text="Otra etiqueta")
label.pack(anchor="center")
label.config(bg="green", fg="blue", font=("Verdana",24))
label.config(textvariable=texto)
Label(root, text="Ultima etiqueta").pack(anchor="se")
"""
imagen = PhotoImage(file="./imagen.gif")
Label(root, image=imagen, bd=0).pack(side="left")
#Finalmente bucle de la aplicacion
root.mainloop() | irwinet/app_python3 | Fase 4 - Temas avanzados/Tema 13 - Interfaces graficas con tkinter/label.py | label.py | py | 729 | python | es | code | 0 | github-code | 13 |
37984908992 | import dataclasses
import time
import rospy
from droneresponse_mathtools import Lla
from mavros_msgs.msg import RCIn
from .Drone import FlightMode
from .sensor import SensorData
def is_data_available(data: SensorData) -> bool:
a = dataclasses.asdict(data)
# Geofence not needed to run tests
del a["geofence"]
for _, msg in a.items():
if msg == None:
return False
return True
def is_armed(data: SensorData):
return data.state.armed
def is_disarmed(data: SensorData):
return not data.state.armed
def is_loiter_mode(data: SensorData):
return data.state.mode == FlightMode.LOITER.value
def is_posctl_mode(data: SensorData):
return data.state.mode == FlightMode.POSCTL.value
def is_takeoff_mode(data: SensorData):
return data.state.mode == FlightMode.TAKEOFF.value
def is_offboard_mode(data: SensorData):
return data.state.mode == FlightMode.OFFBOARD.value
def is_takeoff_or_offboard_mode(data: SensorData):
return is_takeoff_mode(data) or is_offboard_mode(data)
def make_func_is_alt_reached(alt: float, threshold: float = 1.00):
def is_takeoff_alt_reached(data: SensorData):
delta_alt = alt - data.relative_altitude.data
delta_alt = abs(delta_alt)
return delta_alt < threshold
return is_takeoff_alt_reached
def make_func_is_drone_at_target(target_wgs84: Lla,
threshold_distance_meters: float = 1.25):
def is_arrived(data: SensorData):
lat = data.position.latitude
lon = data.position.longitude
alt = data.position.altitude
current_pos = Lla(lat, lon, alt)
return current_pos.distance(target_wgs84) < threshold_distance_meters
return is_arrived
def is_on_ground(data: SensorData):
# this constant comes from
# http://docs.ros.org/en/api/mavros_msgs/html/msg/ExtendedState.html
_LANDED_STATE_ON_GROUND = 1
return data.extended_state.landed_state == _LANDED_STATE_ON_GROUND
def is_off_ground(data: SensorData):
# _LANDED_STATE_ON_GROUND comes from
# http://docs.ros.org/en/api/mavros_msgs/html/msg/ExtendedState.html
_LANDED_STATE_ON_GROUND = 1
return data.extended_state.landed_state > _LANDED_STATE_ON_GROUND
def is_inside_geofence(data: SensorData):
geofence = data.geofence
coordinates = [(w.x_lat, w.y_long) for w in geofence.waypoints]
pos = data.position
return inside_polygon(len(coordinates), coordinates, (pos.latitude, pos.longitude))
def inside_polygon(num_vertices: int, polygon, location):
test_lat, test_lon = location
inside = False
i = 0
j = num_vertices - 1
lat = [p[0] for p in polygon]
lon = [p[1] for p in polygon]
while i < num_vertices:
if ((lat[i] > test_lat) != (lat[j] > test_lat)) and (
test_lon < (lon[j] - lon[i]) * (test_lat - lat[i]) / (lat[j] - lat[i]) + lon[i]
):
inside = not inside
j = i
i += 1
return inside
def is_user_ready_to_start(data: SensorData):
"""If this function returns True then the user has indicated (with the RC transmitter) that
they are ready to start the test.
"""
if data.rcin is None:
user_ready_logger.log("is_user_ready_to_start: data.rcin is None")
return False
chan5_raw = get_rc_channel_value(data.rcin, 5)
chan8_raw = get_rc_channel_value(data.rcin, 8)
is_chan5_ok = 1320 <= chan5_raw and chan5_raw <= 1480 # offboard mode
is_chan8_ok = chan8_raw < 1500 # not in return mode
user_ready_logger.log(f"is_user_ready_to_start: channel 5 = {chan5_raw} is_chan5_ok = {is_chan5_ok} channel 8 = {chan8_raw} is_chan8_ok = {is_chan8_ok}")
return is_chan5_ok and is_chan8_ok
def is_user_taking_control(data: SensorData):
"""If this function returns True, then the user is trying to take control with the RC
transmitter
"""
if data.state is not None:
# if the flight controller enters these modes, it means the user is trying to take control
user_control_modes = [
FlightMode.STABILIZED.value,
FlightMode.ALTCTL.value,
FlightMode.POSCTL.value,
FlightMode.RTL.value,
]
if data.state.mode in user_control_modes:
rospy.logfatal(f"The mode is {data.state.mode}, did someone take control?")
return True
if data.rcin is not None:
chan5_raw = get_rc_channel_value(data.rcin, 5)
return 1160 <= chan5_raw and chan5_raw <= 1320 # Land mode
return False
def get_rc_channel_value(rcin: RCIn, channel_number):
"""Return the channel PWM value from the RCIn object.
Args:
rcin the RC inputs from mavros
channel_number: the channel to read
"""
channel_index = channel_number - 1
return rcin.channels[channel_index]
class DebounceLogger:
def __init__(self, wait=0.75):
self.last_time = None
self.wait = wait
self.log_messages = {}
def log(self, msg: str):
t = time.monotonic()
last_time = t - 2 * self.wait
if msg in self.log_messages:
last_time = self.log_messages[msg]
if last_time + self.wait < t:
self.log_messages[msg] = t
rospy.loginfo(msg)
user_ready_logger = DebounceLogger() | DroneResponse/hardware-tests | src/dr_hardware_tests/flight_predicate.py | flight_predicate.py | py | 5,341 | python | en | code | 0 | github-code | 13 |
12811267793 | # File : word_frequency.py
# Author : 임현 (hyunzion@gmail.com)
# Since : 2018 - 06 - 06
import string # 스트링
import re # 정규 표현식
# 단어 빈도수를 저장할 파이썬 딕셔너리 변수
frequency = {}
# sms spam data txt 파일을 염
spam_data = open('spam_sms.txt', 'r')
# data를 읽어서 문자를 모두 소문자로 바꿔줌 (정규 표현식을 쉽게 쓰기 위함)
data_string = spam_data.read().lower()
# a부터 z까지 해당되는 정규 표현식, 3글자 이상 20글자 이하
match_pattern = re.findall(r'\b[a-z]{3,20}\b', data_string)
# 단어 빈도수 카운트
for word in match_pattern:
count = frequency.get(word, 0)
frequency[word] = count + 1
# 총 단어 갯수 변수
cnt = 0
# 단어 빈도수 70이상만 출력
for words in frequency.keys():
if frequency.get(words) > 70:
print(words, frequency[words]) # 각 단어 및 빈도수 출력
cnt = cnt + frequency[words]
# 총 단어 갯수 출력
print(cnt)
| HyunIm/Sangmyung_University | 2018년도 1학기/통계, 유훈 교수님/기말고사_대체 과제/스팸 필터링/임현/1_Materials/3_Python/word_frequency.py | word_frequency.py | py | 975 | python | ko | code | 4 | github-code | 13 |
71006328977 | import numpy as np
import random
import math
import agents.evaluator.subsquares as subsquares
def showVector(v, dec):
fmt = "%." + str(dec) + "f" # like %.4f
for i in range(len(v)):
x = v[i]
if x >= 0.0: print(' ', end='')
print(fmt % x + ' ', end='')
class NeuralNetwork:
def __init__(self, layer_list=[32,40,10,1]):
self.layer_size = layer_list
self.NumberOfLayers = len(self.layer_size)
self.NumberOfHiddenLayers = self.NumberOfLayers - 2
self.layers = []
self.weights = []
self.biases = []
self.lenCoefficents = 0
self.rebuildCoefficents = None
self.rnd = np.random.seed()
# initiate layers
self.initiateLayers()
self.initiateWeights()
self.initiateBiases()
def initiateLayers(self):
for i in self.layer_size:
nodes = np.zeros(shape=[i], dtype=np.float32)
self.layers.append(nodes)
def initiateWeights(self):
for i in range(self.NumberOfLayers-1):
inputNodes = self.layer_size[i]
outputNodes = self.layer_size[i+1]
# increment the number of coefficents
self.lenCoefficents += inputNodes * outputNodes
weights = np.random.random_sample([inputNodes,outputNodes])
weights = self.normaliseVectors(weights)
self.weights.append(weights)
def initiateBiases(self):
for i in range(self.NumberOfLayers-1):
biasNodes = self.layer_size[i+1]
self.lenCoefficents += biasNodes
biases = np.random.random_sample(biasNodes)
biases = self.normaliseVectors(biases)
self.biases.append(biases)
def getAllCoefficents(self):
self.ravel = np.array([])
# ravel weights
for i in self.weights:
ting = np.ravel(i)
self.ravel = np.hstack((self.ravel,ting))
# ravel biases
for i in self.biases:
ting = np.ravel(i)
self.ravel = np.hstack((self.ravel,ting))
return self.ravel
def loadCoefficents(self, ravelled):
if len(ravelled) != self.lenCoefficents:
raise ValueError('The number of coefficents do not match.')
# calculate number of weights to split array from
totalNumWeights = 0
for i in self.weights:
totalNumWeights += i.shape[0] * i.shape[1]
# rebuild weights
weights = ravelled[:totalNumWeights]
weight_inc = 0
for i in range(len(self.weights)):
# get the dimensions of i
resolution = self.weights[i].shape[0] * self.weights[i].shape[1]
sub_weight = weights[weight_inc:weight_inc+resolution]
splitter = np.split(sub_weight, self.weights[i].shape[0])
splitter = np.matrix(splitter)
self.weights[i] = splitter
weight_inc += resolution
# rebuild biases
biases = ravelled[totalNumWeights:]
biases_inc = 0
for i in range(len(self.biases)):
resolution = self.biases[i].shape[0]
sub_biases = biases[biases_inc:biases_inc+resolution]
biases_inc += resolution
self.biases[i] = sub_biases
return True
def compute(self, x):
sums = []
# initate placeholders to compute results.
for i in range(self.NumberOfLayers-1):
holder = np.zeros(shape=[self.layer_size[i+1]], dtype=np.float32)
sums.append(holder)
# assign input values to input layer
self.layers[0] = x
# compute neural network propagation for hidden layers
for n in range(len(sums)):
# compute weight addition
for j in range(self.layer_size[n+1]):
if n == 0:
sums[n] = np.array([self.layers[n]]).dot(self.weights[n])
else:
sums[n] = sums[n-1].dot(self.weights[n])
# check if output layer so we can feed the sum of the input layer directly
if n == len(sums)-1:
# on output layer
if self.layers[0].size == 91:
sums[n] = sums[n] + self.layers[0][-1]*32
else:
sums[n] = sums[n] + np.sum(self.layers[0])
# add biases
sums[n] += self.biases[n][j]
# perform nonlinear_function if we're not computing the final layer
self.layers[n+1] = self.nonlinear_function(sums[n])
flatten = self.layers[self.NumberOfLayers-1].flatten()
if flatten.size == 1:
return flatten[0]
else:
return self.layers[self.NumberOfLayers-1]
@staticmethod
def subsquares(x):
"""
Calculates 3x3 to 8x8 set of subsquares on the checkerboard.
"""
return subsquares.subsquares(x)
@staticmethod
def normaliseVectors(vector):
# normalise to a range from -0.2 to 0.2
return (vector-0.5) * 0.4
# normalise to a range from -1 to 1
# return (vector-0.5) * 2
def nonlinear_function(self,val):
# tanh/sigmoid
return self.tanh(val)
# return self.crelu(val)
# return self.relu(val)
@staticmethod
def tanh(val):
return np.tanh(val)
@staticmethod
def relu(x):
# rectifier method; it turns out that this is not very effective at all.
x[x<0] =0
return x
@staticmethod
def crelu(x):
# linear cap from -1
x[x<-1] =-1
return x
@staticmethod
def softmax(oSums):
"""
Function to softmax output values.
"""
result = np.zeros(shape=[len(oSums)], dtype=np.float32)
m = max(oSums)
divisor = 0.0
for k in range(len(oSums)):
divisor += math.exp(oSums[k] - m)
for k in range(len(result)):
result[k] = math.exp(oSums[k] - m) / divisor
return result
if __name__ == "__main__":
# Insert checkerboard.
x = np.array([1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1], dtype=np.float32)
# standard neural network
inputs = [32,40,10,1]
nn = NeuralNetwork(inputs)
# subsquare neural network
subsq = [91,40,10,1]
nn2 = NeuralNetwork(subsq)
import datetime
# print("Regular Neural Network")
start = datetime.datetime.now().timestamp()
yValues = nn.compute(x)
print("RNN:",yValues)
end = datetime.datetime.now().timestamp() - start
# print("RNN Time:",end)
x = nn.subsquares(x)
# print("Subsquare Processed Neural Network")
mu = datetime.datetime.now().timestamp()
# print(x.size)
yValues = nn2.compute(x)
print("SNN:",yValues)
end2 = datetime.datetime.now().timestamp() - start
# print("SNN Time:",end2)
# print("\nOutput values are: ")
# showVector(yValues, 4)
print("Time Multiplier:",end2/end) | thien/slowpoke | library/agents/evaluator/neural.py | neural.py | py | 6,288 | python | en | code | 1 | github-code | 13 |
43770684836 | import copy
input = 'input'
with open(input) as f:
rules, messages_received = f.read().split('\n\n')
rulemap = {}
for rule in rules.split("\n"):
nr, value =rule.split(':')
rulemap[nr] = value.strip().strip('"')
all = []
def gen(message, messages):
for i, item in enumerate(message):
if item not in ['a', 'b']:
parts = []
p = rulemap[item]
if '|' in p:
parts=p.split('|')
else:
parts.append(p)
for part in parts:
part.strip()
mm = copy.copy(message)
rules = part.split()
rules.reverse()
mm.pop(i)
for rule in rules:
mm.insert(i, rule)
messages.append(mm)
return
def doit(messages):
while len(messages):
message = messages.pop()
digits=[s.isdigit() for s in message]
if not any(digits):
all.append(''.join(message))
else:
gen(message, messages)
m31 = [['31']]
doit(m31)
a31 = copy.copy(all)
s31 = set(a31)
all = []
m42 = [['42']]
doit(m42)
a42 = copy.copy(all)
s42 = set(a42)
x = 8
count=0
for message in messages_received.split('\n'):
count42 = 0
count31 = 0
b=3
for i in range(b):
byte = message[i*x:(i+1) * x]
if byte in s42 and count31 == 0:
count42+= 1
elif byte in s31:
count31 += 1
if count42 + count31 == b and count42>count31 and count31:
count+=1
print("part1", count)
count=0
for message in messages_received.split('\n'):
b = len(message)/x
count42 = 0
count31 = 0
for i in range(b):
byte = message[i*x:(i+1) * x]
if byte in s42 and count31 == 0:
count42+= 1
elif byte in s31:
count31 += 1
if count42 + count31 == b and count42>count31 and count31:
count+=1
print("part2", count)
| stehal/aoc2020 | day19/solution.py | solution.py | py | 2,016 | python | en | code | 0 | github-code | 13 |
5099903891 | from concurrent.futures import ThreadPoolExecutor
from pipeline.pipeline_processes.pipeline_process_interface import PipelineProcessInterface
from pipeline.pipeline_processes.load_file import LoadFile
from pipeline.global_params import GlobalParams
class EncodeData(PipelineProcessInterface):
FIRST_COLUMN_TO_ENCODE = 1
@classmethod
def execute(cls, global_params: GlobalParams):
number_of_threads = global_params.number_of_threads
input_array = global_params.data_to_process
features = []
executor = ThreadPoolExecutor(number_of_threads)
futures = []
for number_of_thread in range(number_of_threads):
from_index = int(number_of_thread*len(input_array)/number_of_threads)
to_index = int((number_of_thread+1)*len(input_array)/number_of_threads)
futures.append(executor.submit(cls.sub_encode_array, input_array[from_index:to_index], features))
ended = False
while not ended:
ended = True
for future in futures:
ended &= future.done()
results = []
for future in futures:
results += future.result()[0]
global_params.data_to_process = results
@classmethod
def sub_encode_array(cls, input_array, features = []):
output_array = []
for index, element in enumerate(input_array):
if isinstance(element, list):
output_element = cls.sub_encode_array(element, features)[0]
output_array.append(output_element)
else:
if index < EncodeData.FIRST_COLUMN_TO_ENCODE:
output_array.append(element)
else:
if element not in features:
features.append(element)
output_array.append(features.index(element))
return (output_array, features) | mortalswat/hashcode-2021 | pipeline/pipeline_processes/encode_data.py | encode_data.py | py | 1,933 | python | en | code | 0 | github-code | 13 |
3769567163 | import pyttsx3
import datetime
import speech_recognition as sr
import wikipedia
import webbrowser
from selenium import webdriver
import os
import smtplib
engine = pyttsx3.init()
voices = engine.getProperty('voices')
engine.setProperty('voices',voices[1].id)
#print(voices[1].id)
def speak(audio):
engine.say(audio)
engine.runAndWait()
def wishMe():
hour = int(datetime.datetime.now().hour)
if hour>=0 and hour<12:
speak(" Good Morning Sir!")
elif hour>=12 and hour<18:
speak(" Good Afternoon Sir!")
else:
speak(" Good Evening Sir!")
speak("I am Thaaanooos ! How may I help you?")
def takeCommand():
# It takes microphone input from user and returns string output
r = sr.Recognizer()
with sr.Microphone() as source :
print("Listening... ")
#r.pause_threshold = 0.8
audio = r.listen(source)
try:
print("Recognizing...")
query = r.recognize_google(audio, language='en-in')
print(f"User Said: {query}\n")
except Exception as e:
print(e)
print("Say That Again Please...")
return "None"
return query
def sendEmail(to,content):
server = smtplib.SMTP('smtp.gmail.com',587)
server.ehlo()
server.starttls()
server.login('YOURMAILID', 'YOURPASSWORD')
server.sendmail('YOURMAILID',to,content)
server.close()
if __name__ == '__main__':
#speak("Akash is a good boy")
wishMe()
while True:
query = takeCommand().lower()
# logic for executing tasks
if 'wikipedia' in query:
speak('Searching wikipedia...')
query = query.replace("wikipedia", "")
results = wikipedia.summary(query, sentences = 2)
speak("According to Wikipedia")
print(results)
speak(results)
elif 'open youtube' in query:
webbrowser.open("youtube.com")
elif 'google' in query:
webbrowser.open("google.com")
elif 'stackoverflow' in query:
webbrowser.open("stackoverflow.com")
elif 'play some smooth music' in query:
webbrowser.open("gaana.com/song/smooth-music")
elif 'weather' in query:
driver = webdriver.Firefox()
city = str(input("Enter the name of the city you want the forecast for: "))
driver.get("https://www.weather-forecast.com/locations/"+city+"/forecasts/latest")
we = driver.find_elements_by_class_name("b-forecast__table-description-content")[0].text
speak(we)
elif 'the time' in query:
strTime = datetime.datetime.now().strftime("%H:%M:%S")
speak(f"Sir, the time is {strTime}")
elif 'open text editor' in query:
editor_path = "C:\\Program Files\\Sublime Text 3\\sublime_text.exe"
os.startfile(editor_path)
elif "email to harry" in query:
try:
speak("What should I say ?")
content = takeCommand()
to = "HARRY'SMAILID"
sendEmail(to,content)
speak("Email has been sent")
except Exception as e:
print(e)
speak(" I am afraid I could not perform the task successfully ")
elif "thank you" in query:
speak("It's my pleasure to help you sir!")
| akashchakraborty/Thanos---Voice-Assistant | ThanosAssistant.py | ThanosAssistant.py | py | 3,008 | python | en | code | 0 | github-code | 13 |
20758396578 | import psycopg2
from telegram import Update, Bot
from telegram.ext import CallbackContext
import logging
from aiogram import Bot, Dispatcher, types
from aiogram.contrib.fsm_storage.memory import MemoryStorage
from aiogram.dispatcher import FSMContext
from aiogram.dispatcher.filters.state import State, StatesGroup
from states.base_state import CalculatorState, StatusForm
dp = Dispatcher()
# Калькулятор расчет стоимости заказа
@dp.callback_query_handler(lambda query: query.data == 'calculator')
async def handle_calculator(query: types.CallbackQuery):
await query.message.reply("Расчет стоимости товара\n\nПожалуйста, введите цену:")
# Ввод цен
await CalculatorState.price.set()
@dp.message_handler(state=CalculatorState.price)
async def process_price(message: types.Message, state: FSMContext):
try:
price = float(message.text)
except ValueError:
await message.reply("Неверный формат цены. Пожалуйста, введите действительную цену.")
return
# добавление суммы валюты и курса
exchange_rate = 11.5 # Примерные значение
commission = 8 # Примерные значение
total_cost = price * exchange_rate + commission
await message.reply(f"Цена: {price}\nКурс юаня: {exchange_rate}\nКоммиссия: {commission}\nОбщая стоимость: {total_cost} рублей \n")
keyboard = types.InlineKeyboardMarkup(row_width=2)
buttons = [
types.InlineKeyboardButton("🛍️ Розничная торговля", callback_data='checkout_retail'),
types.InlineKeyboardButton("📦 Оптовая торговля", callback_data='checkout_wholesale'),
types.InlineKeyboardButton("⬅️ Назад", callback_data='back'),
]
keyboard.add(*buttons)
await state.finish()
# Калькулятор расчет стоимости заказа
# Контакты
@dp.callback_query_handler(lambda query: query.data == 'contact')
async def handle_contact_manager(query: types.CallbackQuery):
message_text = "Связь менеджером\n\n"
message_text += "Убедительная просьба, не писать менеджеру по вопросам, которые есть в разделе «ответы на популярные вопросы». \n"
message_text += " Пожалуйста, повторно прочтите информацию и может быть вы найдете ответ на свой вопрос.\n"
message_text += "Менеджеру стоит писать в том случае, если у вас возникли вопросы после оформления заказа. \n\n"
message_text += "Вы можете связаться с нами в Telegram.\n"
message_text += "Аккаунт нашего менеджера : @admin021\n\n"
keyboard = types.InlineKeyboardMarkup()
button = types.InlineKeyboardButton("Open Telegram", url="https://t.me/chyngyz0411")
keyboard.add(button)
keyboard = types.InlineKeyboardMarkup(row_width=2)
buttons = [
types.InlineKeyboardButton("🛍️ Розничная торговля", callback_data='checkout_retail'),
types.InlineKeyboardButton("📦 Оптовая торговля", callback_data='checkout_wholesale'),
types.InlineKeyboardButton("⬅️ Назад", callback_data='back'),
]
keyboard.add(*buttons)
await query.message.reply(message_text, reply_markup=keyboard)
# Контакты
# доставка
@dp.callback_query_handler(lambda query: query.data == 'delivery')
async def handle_delivery(query: types.CallbackQuery):
delivery_info = [
"Доставка оплачивается по факту в Москве, перед получением. Когда заказ приходит к нам, мы взвешиваем его и рассчитываем стоимость",
" 🚗 Авто доставка 890 рублей за кг (15-25 дня)",
"✈️ Авиа доставка 1190 рублей за кг (12-15 дней)",
"(Сроки с учетом того, что площадка отправила нам заказ без задержек)",
"Расчет доставки производится без округления веса.Если посылка, например, 1,3кг значит, стоимость доставки*1.3 "
]
message_text = "\n".join(delivery_info)
keyboard = types.InlineKeyboardMarkup(row_width=2)
buttons = [
types.InlineKeyboardButton("🛍️ Розничная торговля", callback_data='checkout_retail'),
types.InlineKeyboardButton("📦 Оптовая торговля", callback_data='checkout_wholesale'),
types.InlineKeyboardButton("⬅️ Назад", callback_data='back'),
]
keyboard.add(*buttons)
await query.message.reply(message_text, parse_mode='HTML', reply_markup=keyboard)
# доставка
# Статус заказа
@dp.callback_query_handler(lambda query: query.data == 'status', state=None)
async def ask_for_product_id(query: types.CallbackQuery):
await StatusForm.ID_PRODUCT.set()
await query.message.reply("Введите ID товара:")
@dp.message_handler(state=StatusForm.ID_PRODUCT)
async def process_product_id(message: types.Message, state: FSMContext):
product_id = message.text
cursor = conn.cursor()
conn = psycopg2.connect(
host='localhost',
port='5432',
database='telebot',
user='telebot',
password='xsyusp'
)
try:
cursor.execute("SELECT * FROM telegram_wholesaleordertelegtam WHERE unique_id = %s", (product_id,))
wholesale_order = cursor.fetchone()
if wholesale_order:
expected_status = wholesale_order[5]
status_mapping = {
1: 'Доставлено',
2: 'Статус актуально',
}
expected_status_text = status_mapping.get(expected_status, 'Неизвестный статус')
message_text = f"Статус товара ID: {product_id}\nОжидаемый статус заказа: {expected_status_text}"
else:
message_text = f"Заказ с ID {product_id} не найден"
except Exception as e:
message_text = f"Произошла ошибка: {str(e)}"
cursor.close()
await message.reply(message_text)
# Статус заказа
# Курс валют
@dp.callback_query_handler(lambda query: query.data == 'course')
async def handle_course(query: types.CallbackQuery):
exchange_rate = 11.5 # Заменить фактическим обменным курсом
message_text += f"Текущий курс на сегодняшний день 12.4\n"
keyboard = types.InlineKeyboardMarkup(row_width=1)
buttons = [
types.InlineKeyboardButton("🛍️ Розничная торговля", callback_data='checkout_retail'),
types.InlineKeyboardButton("📦 Оптовая торговля", callback_data='checkout_wholesale'),
types.InlineKeyboardButton("❓ Ответы на популярные вопросы", callback_data='answers'),
types.InlineKeyboardButton("⬅️ Назад", callback_data='back'),
]
keyboard.add(*buttons)
await query.message.reply(message_text, reply_markup=keyboard)
# Курс валют
# Ответы на популярные вопросы
@dp.callback_query_handler(lambda query: query.data == 'answers')
async def handle_answers(query: types.CallbackQuery):
# Получите ответы на популярные вопросы из вашего источника данных или базы данных
message_text = "Ответы на часто задаваемые вопросы вы можете найти в нашем FAQ\n\n"
message_text += "📚 Если у вас появятся вопросы, вы можете найти ответы в нашем разделе справки.\n\n"
message_text += "🔗 Перейдите по ссылке для получения дополнительной информации.\n\n"
keyboard = types.InlineKeyboardMarkup(row_width=2)
buttons = [
types.InlineKeyboardButton("Open Telegram", url="https://telegra.ph/FAQ-06-16-9#%D0%9A%D0%B0%D0%BA-%D0%BF%D1%80%D0%B0%D0%B2%D0%B8%D0%BB%D1%8C%D0%BD%D0%BE-%D0%BE%D1%84%D0%BE%D1%80%D0%BC%D0%BB%D1%8F%D1%82%D1%8C-%D0%B7%D0%B0%D0%BA%D0%B0%D0%B7 "),
types.InlineKeyboardButton("Назад", callback_data='back'),
]
keyboard.add(*buttons)
await query.message.reply(message_text, parse_mode='HTML', reply_markup=keyboard)
# Ответы на популярные вопросы
# ...
| chyngyz475/telebot_041 | bot/handlers/checkout_handler.py | checkout_handler.py | py | 9,088 | python | ru | code | 0 | github-code | 13 |
42702789891 | # Programa de configuração para o cx_Freeze poder "Buildar"
import sys
from cx_Freeze import setup, Executable
build_exe_options = {"packages": ["os"], "includes": ["tkinter"], "include_files": ["fundo.png", "lapis.ico"]}
base = None
if sys.platform == "win32":
base = "Win32GUI"
setup(
name="Editor de Texto",
version="1.0",
description="Um editor de texto personalizado",
options={"build_exe": build_exe_options},
executables=[Executable(script="# Hangman Game (Jogo da Forca).py", base=base, icon="lapis.ico")]
)
# Fim Config | luizsouza1993/Data_Science_Python | # Programa de configuração para o cx_Fre.py | # Programa de configuração para o cx_Fre.py | py | 576 | python | en | code | 0 | github-code | 13 |
28566799351 | import ccxt
import ta
import pandas as pd
import time
import talib
#This bot fetches the last 1000 1-day candles for the BTC/USDT symbol on the Bybit exchange, converts them to Heikin-Ashi candles, and calculates the 20-day and 50-day exponential moving averages (EMAs) and the 14-day relative strength index (RSI). If the EMA20 is above the EMA50 and the RSI is above 50, the bot executes a buy order for 10% of the
# Define exchange and symbol
exchange = ccxt.bybit({
'apiKey': '',
'secret': '',
'test': True, # required by Bybit
})
exchange.set_sandbox_mode(True) # activates testnet mode
symbol = 'LTC/USDT'
# Define Heikin-Ashi function
def heikin_ashi(data):
ha_close = (data['open'] + data['high'] + data['low'] + data['close']) / 4
ha_open = (ha_close.shift(1) + ha_close.shift(1)) / 2
ha_high = data[['high', 'open', 'close']].max(axis=1)
ha_low = data[['low', 'open', 'close']].min(axis=1)
ha_data = data.assign(open=ha_open, high=ha_high, low=ha_low, close=ha_close)
return ha_data
# Import talib
# Define bot function
def trading_bot(exchange, symbol):
# Set up the bot
balance = exchange.fetch_balance()['USDT']['free']
while True:
try:
# Fetch the last 1000 candles
ohlcv = exchange.fetch_ohlcv(symbol, timeframe='1d', limit=1000)
data = pd.DataFrame(ohlcv, columns=['timestamp', 'open', 'high', 'low', 'close', 'volume'])
data['timestamp'] = pd.to_datetime(data['timestamp'], unit='ms')
data.set_index('timestamp', inplace=True)
ha_data = heikin_ashi(data)
# Calculate the indicators
ha_data['ema20'] = ta.EMA(ha_data['close'].values, timeperiod=20)
ha_data['ema50'] = ta.EMA(ha_data['close'].values, timeperiod=50)
ha_data['rsi'] = ta.RSI(ha_data['close'].values, timeperiod=14)
# Determine the trade signal
if ha_data['ema20'][-1] > ha_data['ema50'][-1] and ha_data['rsi'][-1] > 50:
# Buy signal
amount = balance * 0.1 / ha_data['close'][-1]
order = exchange.create_order(symbol, type='market', side='buy', amount=amount)
print('Buy order executed:', order)
balance -= order['cost']
elif ha_data['ema20'][-1] < ha_data['ema50'][-1] and ha_data['rsi'][-1] < 50:
# Sell signal
amount = balance * 0.1 / ha_data['close'][-1]
order = exchange.create_order(symbol, type='market', side='sell', amount=amount)
print('Sell order executed:', order)
balance += order['cost']
# Wait for 24 hours before checking again
time.sleep(86400)
except Exception as e:
print('Error:', e)
time.sleep(60)
# Run the bot
trading_bot(exchange, symbol)
| osasere1m/tradingbotccxt | testbot/Heikin-Ashi.py | Heikin-Ashi.py | py | 2,922 | python | en | code | 0 | github-code | 13 |
32761733402 | import networkx as nx
import numpy as np
import random
import util
import sys
class graphSolver:
def __init__(self, node_names, house_names, start, adj_mat):
# Genetic Algo Hyperparameters
self.default_iterations = 100
self.population_size = 100
self.elite_size = int(self.population_size * 0.01)
self.mutation_rate = 0.05
self.node_names = node_names
self.house_names = house_names
self.start = start
self.adj_mat = adj_mat
self.G = nx.Graph()
# Dictionary used to go from name to index number
# The reverse can be done by indexing self.node_names
self.node_indices = {}
for i in range(len(node_names)):
self.node_indices[self.node_names[i]] = i
# Convert adjacency matrix to nx graph
# TODO: currently adding edges one-by-one. Not sure how to feed in and adjacency
# matrix that has 'x' elements that means no path.
# TODO: do we still need the adjacency matrix? Currently converting all weight
# elements to floats, but potentially can remove
for i in range(len(adj_mat)):
for j in range(len(adj_mat)):
if adj_mat[i][j] != 'x':
adj_mat[i][j] = float(adj_mat[i][j])
if i > j:
self.G.add_edge(self.node_names[i], self.node_names[j], weight=adj_mat[i][j])
'''
self.starting_population = self.get_initial_population()
population = self.starting_population
for _ in range(100):
population = self.next_generation(population)
#self.next_generation(self.starting_population)
'''
def run_evolution(self, iterations=100, population=None):
if population == None:
population = self.get_initial_population()
for _ in range(iterations):
population = self.next_generation(population)
return population[0]
def next_generation(self, population):
curr_population = []
next_population = []
total = 0
for c in population:
f = self.fitness(c)
total += 1/f
curr_population.append((c, f))
curr_population = sorted(curr_population, key=lambda x: x[1])
#print(str(int(curr_population[0][1])))
print(str(int(curr_population[0][1])) + ' ' + str(int(curr_population[10][1])) + ' ' + str(int(curr_population[25][1])))
def select_parents():
s1 = random.uniform(0, total)
s2 = random.uniform(0, total)
parent1, parent2 = None, None
curr = 0
for p in curr_population:
if parent1 and parent2:
break
curr += 1/p[1]
if not parent1 and curr > s1:
parent1 = p
if not parent2 and curr > s2:
parent2 = p
return parent1, parent2
for i in range(self.elite_size):
next_population.append(curr_population[i][0])
while len(next_population) < self.population_size:
p1, p2 = None, None
while p1 == p2:
p1, p2 = select_parents()
c1, c2 = self.breed(p1[0], p2[0])
if c1 == None:
continue
next_population.extend([c1, c2])
return next_population
def breed(self, path1, path2):
s = set(path1)
intersection = [v for v in path2 if v in s and v != self.start]
if len(intersection) == 0:
return None, None
node = random.choice(intersection)
i1 = path1.index(node)
i2 = path2.index(node)
child1 = path1[:i1] + path2[i2:]
child2 = path2[:i2] + path1[i1:]
if random.random() < self.mutation_rate * 1:
child1 = self.mutate(child1)
if random.random() < self.mutation_rate * 1:
child2 = self.mutate(child2)
return [child1, child2]
def fitness(self, path):
"""
Calculate fitness or score of a path to be maximized (inverse of the energy
expended).
Parameters
----------
path: list of nodes in the order of traversal
Return
------
-1 if path is invalid
inverse of total energy expended for the path
"""
energy = 0
for i in range(len(path) - 1):
if self.G.has_edge(path[i], path[i + 1]):
energy += self.G[path[i]][path[i + 1]]['weight']
else:
return -1
energy *- 2.0/3.0
for h in self.house_names:
# find the shortest length from h to a node in path and add it to energy
e, _ = self.shortest_path_to_cycle(path, h)
energy += e
return energy
def get_pedestrian_walks(self, path):
dropoff_locations = {}
for h in self.house_names:
_, n = self.shortest_path_to_cycle(path, h)
dropoff_locations[n] = dropoff_locations.get(n, []) + [h]
return dropoff_locations
def generate_random_cycle(self):
"""
Generate a valid random cycle within G
Return
------
rand_path: random cycle for vehicle to travel. Length of path correlates to
a normally generated random number
"""
# Normal Distribution Parameters
mu = len(self.node_names) * 3 / 4
std_dev = len(self.node_names) / 10
# Generate a list of random nodes to visit
node_names_copy = self.node_names[:]
random.shuffle(node_names_copy)
psuedo_path = [self.start]
for _ in range(int(random.gauss(mu, std_dev))):
psuedo_path.append(node_names_copy.pop())
psuedo_path.append(self.start)
# Connect random paths with shortest paths
rand_path = [self.start]
for i in range(len(psuedo_path) - 1):
node1 = psuedo_path[i]
node2 = psuedo_path[i + 1]
connection = nx.shortest_path(self.G, source=node1, target=node2)
rand_path.extend(connection[1:])
return rand_path
def mutate(self, path):
new_visit = int(random.uniform(0, len(self.node_names)))
new_visit = self.node_names[new_visit]
cut = int(random.uniform(0, len(path) - 1))
if new_visit == path[cut] or new_visit == path[cut + 1]:
return path
path1 = nx.shortest_path(self.G, source=path[cut], target=new_visit)
path2 = nx.shortest_path(self.G, source=new_visit, target=path[cut + 1])
new_path = path[:cut] + path1 + path2[1:-1] + path[cut + 1:]
return new_path
def shortest_path_to_cycle(self, path, node):
"""
Calculate shortest distance from a TA's house to the closest dropoff point on the path the vehicle takes
Parameters
----------
path: list of nodes in the path taken by the car
node: the house of a TA
Return
------
dist: float weight of the smallest cost of travelling from a node on the
path to the input node
"""
visited = set()
fringe = util.PriorityQueue()
goal = None
fringe.push(node, 0)
foundPath = False
final_cost = float('inf')
while not foundPath:
if fringe.isEmpty():
return None
curr_node, final_cost = fringe.pop()
if curr_node in path:
goal = curr_node
foundPath = True
elif curr_node not in visited:
visited.add(curr_node)
for child in list(self.G.neighbors(curr_node)):
cost = final_cost + self.G[curr_node][child]['weight']
fringe.update(child, cost)
return final_cost, goal
def get_initial_population(self):
initial_population = []
for _ in range(self.population_size):
initial_population.append(self.generate_random_cycle())
return initial_population
def solve(self, filename):
output_file = '../outputs/' + filename + '.out'
best_path = None
best_score = sys.maxint
for _ in range(3):
result_path = self.run_evolution(30)
fit = self.fitness(result_path)
print(result_path)
print(fit)
if fit < best_score:
best_score = fit
best_path = result_path
best_score = self.fitness(best_path)
print(best_score)
result_dropoff = self.get_pedestrian_walks(best_path)
writeOutput(output_file, best_path, result_dropoff)
def main():
#node_names, house_names, start, adj_mat = readInput('../inputs/9_50.in')
#node_names, house_names, start, adj_mat = readInput('../inputs/7_100.in')
#node_names, house_names, start, adj_mat = readInput('../inputs/7_200.in')
#solver = graphSolver(node_names, house_names, start, adj_mat)
#result_path = solver.run_evolution(100)
#print(result_path)
'''
best_path = None
best_score = 900
for _ in range(10):
result_path = solver.run_evolution(20)
print(result_path)
print(solver.fitness(result_path))
if solver.fitness(result_path) < best_score:
best_score = solver.fitness(result_path)
best_path = result_path
result_path = best_path
result_dropoff = solver.get_pedestrian_walks(result_path)
writeOutput('../outputs/9_100.out', result_path, result_dropoff)
'''
'''
filename = '../inputs/1_50.in'
node_names, house_names, start, adj_mat = readInput(filename)
path = ['1', '28', '19', '16', '23', '32', '15', '24', '41', '22', '37', '26', '34', '3', '11', '46', '44', '12', '25', '35', '17', '20', '45', '39', '31', '1']
solver = graphSolver(node_names, house_names, start, adj_mat)
result_dropoff = solver.get_pedestrian_walks(path)
writeOutput('1_50.out', path, result_dropoff)
print(solver.fitness(path))
'''
for i in range(0, 1):
filename = str(i) + '_50'
filename = '126_50'
print(filename)
input_file = '../inputs/' + filename + '.in'
#input_file = 'small_8.in'
node_names, house_names, start, adj_mat = readInput(input_file)
solver = graphSolver(node_names, house_names, start, adj_mat)
solver.solve(filename)
'''
for row in adj_mat:
print(row)
'''
def writeOutput(filename, path, dropoff):
f = open(filename, 'w')
for i in range(len(path)):
f.write(path[i])
if i + 1 < len(path):
f.write(' ')
f.write('\n')
f.write(str(len(dropoff)) + '\n')
for k in dropoff:
f.write(k + ' ')
for i in range(len(dropoff[k])):
f.write(dropoff[k][i])
if i + 1 < len(dropoff[k]):
f.write(' ')
f.write('\n')
def readInput(filename):
"""
Read from an input file
Parameters
----------
filename: relative path to the input file
Return
------
node_names: list of the names of all nodes
house_names: list of the names of houses
start_node: starting node
adj_mat: adjacency matrix
"""
f = open(filename, 'r')
num_nodes = int(f.readline().strip())
num_houses = int(f.readline().strip())
node_names = f.readline().strip().split(' ')
house_names = f.readline().strip().split(' ')
start_node = f.readline().strip()
adj_mat = []
for _ in range(num_nodes):
adj_mat.append(f.readline().strip().split(' '))
f.close()
return node_names, house_names, start_node, adj_mat
if __name__ == "__main__":
main()
| NickL77/CS170-TSP | geneticAlg/geneticAlg.py | geneticAlg.py | py | 11,889 | python | en | code | 0 | github-code | 13 |
9640497351 | class QueryContext:
def __init__(self, stream_id, model, input_list,scale=None):
# input 是tensor list
self.has_deadline=False
self.stream_id=stream_id
self.model=model
self.input_list=input_list
# shape是属性 size是方法
# print(len(input_list),"输入长度")
self.input_res=input_list[0].shape
print(self.input_res)
# 如果是sr model,query中需要包含放大倍数
self.scale=scale
# 这个不是query中需要指定的,而是调度器需要决策的
self.gpu_idx=0 | sunnie-star/DRL_Bilevel | scheduler/query.py | query.py | py | 594 | python | zh | code | 0 | github-code | 13 |
73159848339 | import json
import requests
def tag_data_using_clarinAPI(tagger, input_file_path, output_file_path):
clarinpl_url = "http://ws.clarin-pl.eu/nlprest2/base"
user_mail = ""
url = clarinpl_url + "/process"
lpmn = tagger
text = open(input_file_path, "r", encoding="utf8").read()
payload = {'text': text, 'lpmn': lpmn, 'user': user_mail}
headers = {'content-type': 'application/json'}
r = requests.post(url, data=json.dumps(payload), headers=headers)
ccl = r.content.decode('utf-8')
with open(output_file_path, "w", encoding="utf-8") as text_file:
text_file.write(ccl)
if __name__ == '__main__':
# taggers = ["wcrft2", "morphoDita"]
taggers = ["wcrft2"]
for tagger in taggers:
tag_data_using_clarinAPI(tagger, "pol_eval_data/test-raw.txt", f"pol_eval_data/test-tagged-{tagger}.ccl") | AgataSkibinska/NLP-taggers-analysis | clarin_API_tagger.py | clarin_API_tagger.py | py | 855 | python | en | code | 0 | github-code | 13 |
29863013617 | import json
import unittest
from os.path import dirname, join
from pytest import raises
import intelmq.tests.bots.experts.domain_suffix.test_expert as domain_suffix_expert_test
from intelmq.bots.experts.domain_suffix.expert import DomainSuffixExpertBot
from intelmq.bots.experts.taxonomy.expert import TaxonomyExpertBot
from intelmq.bots.experts.url.expert import URLExpertBot
from intelmq.lib.bot import BotLibSettings, Dict39, ExpertBot
from intelmq.lib.message import Message, MessageFactory
from intelmq.tests.lib import test_parser_bot
EXAMPLE_DATA_URL = Dict39({'source.url': 'http://example.com/'})
EXAMPLE_DATA_URL_OUT = EXAMPLE_DATA_URL | {'source.fqdn': 'example.com',
'source.port': 80,
'source.urlpath': '/',
'protocol.application': 'http',
'protocol.transport': 'tcp'}
EXAMPLE_IP_INPUT = {"source.ip": "192.0.43.7", # icann.org.
"destination.ip": "192.0.43.8", # iana.org.
"time.observation": "2015-01-01T00:00:00+00:00",
}
class BrokenInitExpertBot(ExpertBot):
def init(self):
raise ValueError('This initialization intionally raises an error!')
class RaisesOnFirstRunExpertBot(ExpertBot):
counter = 0
def init(self):
self.counter = 0
def process(self):
event = self.receive_message()
self.counter += 1
if self.counter == 1:
raise ValueError('This initialization intionally raises an error!')
self.send_message(event)
self.acknowledge_message()
def assertMessageEqual(actual, expected):
"""
Compare two messages as dicts.
"""
if isinstance(actual, Message):
actual = actual.to_dict(with_type=True)
else:
actual = actual.copy()
if isinstance(expected, Message):
expected = expected.to_dict(with_type=True)
else:
expected = expected.copy()
if 'time.observation' in actual:
del actual['time.observation']
if 'time.observation' in expected:
del expected['time.observation']
if 'output' in actual:
actual['output'] = json.loads(actual['output'])
if 'output' in expected:
expected['output'] = json.loads(expected['output'])
assert actual == expected
def test_dummy_parser_bot():
bot = test_parser_bot.DummyParserBot('dummy-bot', settings=BotLibSettings)
sent_messages = bot.process_message(test_parser_bot.EXAMPLE_REPORT.copy())
assertMessageEqual(sent_messages['output'][0], test_parser_bot.EXAMPLE_EVENT)
assertMessageEqual(sent_messages['error'][0], MessageFactory.from_dict(test_parser_bot.EXPECTED_DUMP[0].copy(), default_type='Report'))
assertMessageEqual(sent_messages['error'][1], MessageFactory.from_dict(test_parser_bot.EXPECTED_DUMP[1].copy(), default_type='Report'))
def test_domain_suffix():
domain_suffix = DomainSuffixExpertBot('domain-suffix',
settings=BotLibSettings | {'field': 'fqdn',
'suffix_file': join(dirname(domain_suffix_expert_test.__file__), 'public_suffix_list.dat')})
queues = domain_suffix.process_message({'source.fqdn': 'www.example.com'})
assert queues['output'][0]['source.domain_suffix'] == 'example.com'
def test_url_expert():
url_expert = URLExpertBot('url', settings=BotLibSettings)
queues = url_expert.process_message(EXAMPLE_DATA_URL.copy())
del url_expert
assert queues['output'] == [EXAMPLE_DATA_URL_OUT]
def test_url_and_taxonomy():
url_expert = URLExpertBot('url', settings=BotLibSettings)
queues_url = url_expert.process_message(EXAMPLE_DATA_URL.copy())
del url_expert
message = queues_url['output'][0]
taxonomy_expert = TaxonomyExpertBot('taxonomy', settings=BotLibSettings)
queues = taxonomy_expert.process_message(message)
assert queues['output'] == [Dict39(EXAMPLE_DATA_URL_OUT) | {'classification.taxonomy': 'other', 'classification.type': 'undetermined'}]
def test_bot_exception_init():
"""
When a bot raises an exception during Bot initialization
"""
with raises(ValueError):
BrokenInitExpertBot('broken', settings=BotLibSettings)
def test_bot_multi_message():
url_expert = URLExpertBot('url', settings=BotLibSettings)
queues = url_expert.process_message(EXAMPLE_DATA_URL.copy(), EXAMPLE_DATA_URL.copy())
del url_expert
assert queues['output'] == [EXAMPLE_DATA_URL_OUT] * 2
def test_bot_raises_and_second_message():
"""
The first message raises an error and the second message
This test is based on an issue where the exception-raising message was not cleared from the internal message store of the Bot/Pipeline instance and thus re-used on the second run
"""
raises_on_first_run = RaisesOnFirstRunExpertBot('raises', settings=BotLibSettings)
with raises(ValueError):
raises_on_first_run.process_message(EXAMPLE_DATA_URL)
queues = raises_on_first_run.process_message(EXAMPLE_IP_INPUT)
assert len(queues['output']) == 1
assertMessageEqual(queues['output'][0], EXAMPLE_IP_INPUT)
if __name__ == '__main__': # pragma: no cover
unittest.main()
| certtools/intelmq | intelmq/tests/lib/test_bot_library_mode.py | test_bot_library_mode.py | py | 5,333 | python | en | code | 856 | github-code | 13 |
13228013161 | import os
import copy
import logging
from pathlib import Path
from functools import reduce, partial
from operator import getitem
from datetime import datetime
from logger import setup_logging
from utils import read_json, write_json
class ConfigParser:
def __init__(self, config, testing=False, resume=None, modification=None, run_id=None):
"""
class to parse configuration json file. Handles hyperparameters for training, initializations of modules, checkpoint saving
and logging module.
:param config: Dict containing configurations, hyperparameters for training. contents of `config.json` file for example.
:param resume: String, path to the checkpoint being loaded.
:param modification: Dict keychain:value, specifying position values to be replaced from config dict.
:param run_id: Unique Identifier for training processes. Used to save checkpoints and training log. Timestamp is being used as default
"""
# load config file and apply modification
self._config = _update_config(config, modification)
self.resume = resume
# set save_dir where trained model and log will be saved.
save_dir = Path(self.config['trainer']['save_dir'])
if self.config['arch']['type'] != 'Classifier':
self.config['name'] = str(Path(self.config['name']) / self._naming())
exper_name = self.config['name']
# if run_id is None: # use timestamp as default run-id
# run_id = datetime.now().strftime(r'%m%d_%H%M%S')
# self._save_dir = save_dir / 'models' / exper_name / run_id
# self._log_dir = save_dir / 'log' / exper_name / run_id
self._save_dir = save_dir / 'models' / exper_name
self._log_dir = save_dir / 'log' / exper_name
# make directory for saving checkpoints and log.
if not testing:
exist_ok = run_id == ''
self.save_dir.mkdir(parents=True, exist_ok=exist_ok)
self.log_dir.mkdir(parents=True, exist_ok=exist_ok)
# save updated config file to the checkpoint dir
print('**********************************************************************************************************')
write_json(self.config, self.save_dir / 'config.json')
# configure logging module
setup_logging(self.log_dir)
self.log_levels = {
0: logging.WARNING,
1: logging.INFO,
2: logging.DEBUG
}
def _naming(self):
if self.config['arch']['type'] == 'HarmonicVAE':
arch_args = copy.deepcopy(self.config['arch']['args'])
for k, v in arch_args.items():
if isinstance(v, bool):
arch_args[k] = 't' if v else 'f'
seed = 'ms=' + str(self.config['trainer']['seed'])
split = 'ds=' + str(self.config['data_loader']['args']['split'])
pitch_shift = 'ps=' + str(self.config['trainer']['pitch_shift'])
n_latent = 'l=' + str(arch_args['latent_dim'])
decoding = arch_args['decoding']
mfcc = 'mfcc=' + arch_args['encode_mfcc']
pitch_emb = arch_args['pitch_embedding'] + '_' + arch_args['learn_pitch_emb']
pretrain_step = 'pre=' + str(self.config['loss']['args']['pretrain_step'])
# anneal_step = 'an=' + str(self.config['loss']['args']['anneal_step'])
gumbel = 'gb=' + arch_args['gumbel']
# hard = 'gh=' + arch_args['hard_gumbel']
# use_hp = 'hp=' + arch_args['use_hp']
# hp_share = 'hp_s=' + arch_args['hp_share']
# hp = '-'.join([use_hp, hp_share])
# bn_act = arch_args['bn'] + '_' + arch_args['act']
# decoder_arch = arch_args['decoder_arch']
bs = str(self.config['data_loader']['args']['batch_size'])
w_recon = str(self.config['loss']['args']['w_recon'])
w_kl = str(self.config['loss']['args']['w_kl'])
w_lmse = str(self.config['loss']['args']['w_lmse'])
w_contrast = str(self.config['loss']['args']['w_contrast'])
w_cycle = str(self.config['loss']['args']['w_cycle'])
w_pseudo = str(self.config['loss']['args']['w_pseudo'])
labeled = 'su=' + str(self.config['trainer']['labeled'])
pseudo_train = 'pse=' + str(self.config['trainer']['pseudo_train'])
back_freeze = 'bf=' + str(self.config['trainer']['freeze_encoder'])
if labeled.split('=')[-1] != '0.0':
assert w_pseudo == '1'
if labeled.split('=')[-1] == '1.0':
print('Fully supervised training')
w_pseudo = '0'
weights = '-'.join([w_recon, w_kl, w_lmse, w_contrast, w_cycle, w_pseudo])
# jfname = '-'.join([split, seed, bs, decoding, pitch_emb, hp, n_latent, mfcc, labeled, pitch_shift, pretrain_step, \
# j gumbel, pseudo_train, back_freeze, weights])
# fname = '-'.join([split, seed, bs, decoding, pitch_emb, n_latent, mfcc, labeled, pitch_shift, pretrain_step, \
# gumbel, pseudo_train, back_freeze, weights])
fname = '-'.join([seed, pitch_shift, labeled, n_latent, weights, back_freeze])
return fname
elif self.config['arch']['type'] == 'ConvNet':
arch_args = copy.deepcopy(self.config['arch']['args'])
target = 't=' + str(arch_args['target'])
fname = target
return fname
@classmethod
def from_args(cls, args, options='', testing=False):
"""
Initialize this class from some cli arguments. Used in train, test.
"""
for opt in options:
args.add_argument(*opt.flags, default=None, type=opt.type)
if not isinstance(args, tuple):
args = args.parse_args()
if args.device is not None:
os.environ["CUDA_VISIBLE_DEVICES"] = args.device
if args.resume is not None:
resume = Path(args.resume)
cfg_fname = resume.parent / 'config.json'
else:
msg_no_cfg = "Configuration file need to be specified. Add '-c config.json', for example."
assert args.config is not None, msg_no_cfg
resume = None
cfg_fname = Path(args.config)
config = read_json(cfg_fname)
if args.config and resume:
# update new config for fine-tuning
config.update(read_json(args.config))
# parse custom cli options into dictionary
modification = {opt.target : getattr(args, _get_opt_name(opt.flags)) for opt in options}
return cls(config, resume=resume, modification=modification, testing=testing)
def init_obj(self, name, module, *args, **kwargs):
"""
Finds a function handle with the name given as 'type' in config, and returns the
instance initialized with corresponding arguments given.
`object = config.init_obj('name', module, a, b=1)`
is equivalent to
`object = module.name(a, b=1)`
"""
module_name = self[name]['type']
module_args = dict(self[name]['args'])
assert all([k not in module_args for k in kwargs]), 'Overwriting kwargs given in config file is not allowed'
module_args.update(kwargs)
return getattr(module, module_name)(*args, **module_args)
def init_ftn(self, name, module, *args, **kwargs):
"""
Finds a function handle with the name given as 'type' in config, and returns the
function with given arguments fixed with functools.partial.
`function = config.init_ftn('name', module, a, b=1)`
is equivalent to
`function = lambda *args, **kwargs: module.name(a, *args, b=1, **kwargs)`.
"""
module_name = self[name]['type']
module_args = dict(self[name]['args'])
assert all([k not in module_args for k in kwargs]), 'Overwriting kwargs given in config file is not allowed'
module_args.update(kwargs)
return partial(getattr(module, module_name), *args, **module_args)
def __getitem__(self, name):
"""Access items like ordinary dict."""
return self.config[name]
def get_logger(self, name, verbosity=2):
msg_verbosity = 'verbosity option {} is invalid. Valid options are {}.'.format(verbosity, self.log_levels.keys())
assert verbosity in self.log_levels, msg_verbosity
logger = logging.getLogger(name)
logger.setLevel(self.log_levels[verbosity])
return logger
# setting read-only attributes
@property
def config(self):
return self._config
@property
def save_dir(self):
return self._save_dir
@property
def log_dir(self):
return self._log_dir
# helper functions to update config dict with custom cli options
def _update_config(config, modification):
if modification is None:
return config
for k, v in modification.items():
if v is not None:
_set_by_path(config, k, v)
return config
def _get_opt_name(flags):
for flg in flags:
if flg.startswith('--'):
return flg.replace('--', '')
return flags[0].replace('--', '')
def _set_by_path(tree, keys, value):
"""Set a value in a nested object in tree by sequence of keys."""
keys = keys.split(';')
_get_by_path(tree, keys[:-1])[keys[-1]] = value
def _get_by_path(tree, keys):
"""Access a nested object in tree by sequence of keys."""
return reduce(getitem, keys, tree)
| yjlolo/ismir20-unsupervised-disentanglement | parse_config.py | parse_config.py | py | 9,698 | python | en | code | 1 | github-code | 13 |
3481070043 | import requests
# Define the URL you want to request
class carCapas:
def __init__(self, url):
url = "http://192.168.1.147:8080/api/sparkle/"
self.url = url
def consumeGet(self,endPoint):
try:
response = requests.get(self.url+endPoint)
if response.status_code == 200:
data = response.json()
return (data)
else:
print(f"Request failed with status code {response.status_code}")
except requests.exceptions.RequestException as e:
print(f"Request error: {e}")
def consumePost(self, endPoint, data):
try:
response = requests.post(self.url+endPoint, json=data)
if response.status_code == 200:
response_data = response.json()
return response_data
else:
print(f"Request failed with status code {response.status_code}")
except requests.exceptions.RequestException as e:
print(f"Request error: {e}")
| gnro/PyPinino | ApiRequests.py | ApiRequests.py | py | 1,055 | python | en | code | 0 | github-code | 13 |
12635411644 | import os.path
import rasterio
import numpy as np
import matplotlib.pyplot as plt
import torch
from torchvision.models import ViT_L_16_Weights, vit_l_16
from torchvision.models import swin_v2_b, Swin_V2_B_Weights
from h3.utils.directories import get_xbd_hurricane_dir
def load_image() -> np.ndarray:
hurricane_dir = get_xbd_hurricane_dir()
imagepath = os.path.join(hurricane_dir, "hold", "images", "hurricane-florence_00000236_post_disaster.tif")
src = rasterio.open(imagepath)
image = src.read()
return image
def load_model():
# model = vit_l_16(weights=ViT_L_16_Weights.DEFAULT)
model = swin_v2_b(weights=Swin_V2_B_Weights.DEFAULT)
print(type(model))
model.eval()
return model
def main():
# device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print("loading model")
model = load_model()
print("loading image")
image = load_image()
print(image.shape)
image = torch.as_tensor(image)
# preprocess = ViT_L_16_Weights.IMAGENET1K_V1.transforms()
preprocess = Swin_V2_B_Weights.IMAGENET1K_V1.transforms()
batch = preprocess(image).unsqueeze(0)
out = model(batch).squeeze(0)
print(out)
print(out.shape)
if __name__ == "__main__":
main()
| ai4er-cdt/hurricane-harm-herald | h3/models/pre_train.py | pre_train.py | py | 1,191 | python | en | code | 1 | github-code | 13 |
21585948261 | # Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, '../README.md'), encoding='utf-8') as f:
long_description = f.read()
with open(path.join(here, './requirements.txt')) as f:
requirements = f.read().splitlines()
setup(
name='ehome-server',
version='0.0.1',
description='Python bindings of EHome server for HikVision IP camera.',
long_description=long_description,
url='https://github.com/corenel/ip-camera-ehome-server',
author='Yusu Pan',
author_email='xxdsox@gmail.com',
classifiers=[
'Development Status :: 3 - Beta',
'Intended Audience :: Information Technology',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
keywords='utility camera server',
py_modules=["ehome_server"],
package_data={'ehome_server': '_ehome_server.so'},
)
| corenel/ip-camera-ehome-server | python/setup.py | setup.py | py | 1,259 | python | en | code | 2 | github-code | 13 |
7041365744 | #!/usr/bin/env python3
#
# CovidDataMain.py
#
# Prepare the data from Github and produce the csv files for other apps
#
from datetime import datetime
from dateutil.relativedelta import relativedelta
from dateutil import parser
from geopy.geocoders import Nominatim
import os
import logging
import numpy as np
import pandas as pd
import C19CollectDataGlobalTimeSeries as gts
import C19CollectDataGlobalRollup as gpr
import C19CollectDataUSStates as uss
import C19CollectDataWriteIndexCsv as wcs
# ----------------------------------------------------------------------------
# Constants
# ----------------------------------------------------------------------------
CSV_DIRECTORY = '/Users/paulhart/Development/CovidProjects/Data/CSV_Files'
CONFIRMED_GLOBAL = '/Users/paulhart/Development/CovidProjects/Data/COVID-19/csse_covid_19_data/csse_covid_19_time_series/time_series_covid19_confirmed_global.csv'
CONFIRMED_US = '/Users/paulhart/Development/CovidProjects/Data/COVID-19/csse_covid_19_data/csse_covid_19_time_series/time_series_covid19_confirmed_US.csv'
DEATHS_GLOBAL = '/Users/paulhart/Development/CovidProjects/Data/COVID-19/csse_covid_19_data/csse_covid_19_time_series/time_series_covid19_deaths_global.csv'
DEATHS_US = '/Users/paulhart/Development/CovidProjects/Data/COVID-19/csse_covid_19_data/csse_covid_19_time_series/time_series_covid19_deaths_US.csv'
WORLD_POP = '/Users/paulhart/Development/CovidProjects/Data/WorldPop.csv'
# ----------------------------------------------------------------------------
# Global Variables
# ----------------------------------------------------------------------------
global global_keys
global global_new_keys
#global file_index_entry
#global file_index
class file_index_entry():
combined_key = ''
file_name = ''
country = ''
province = ''
def __init__(self, combined_key, file_name, country, province):
self.combined_key = combined_key
self.file_name = file_name
self.country = country
self.province = province
file_index = []
dfPopulations = pd.read_csv(WORLD_POP)
# ----------------------------------------------------------------------------
# Prepare dataframe
# ----------------------------------------------------------------------------
def process_dataframe():
''' Processing global and US state data'''
dfGlobal = gts.processGlobalDataframe()
gpr.processProvinceRollup(dfGlobal)
uss.processUSDataframe()
wcs.writeWriteIndexCsv()
# ----------------------------------------------------------------------------
# Entry point
# ----------------------------------------------------------------------------
def main():
df = process_dataframe()
if __name__ == '__main__':
main()
| SavedRepos/CovidProjects | Apps/C19CollectData/C19CollectDataMain.py | C19CollectDataMain.py | py | 2,750 | python | en | code | 0 | github-code | 13 |
36882800039 | from data import question_data
from question_model import Question
from quiz_brain import QuizBrain
question_bank=[]
for i in question_data:
new_que=Question(i["text"],i["answer"])
question_bank.append(new_que)
quiz=QuizBrain(question_bank)
while(quiz.still_have_question()):
quiz.next_question()
print(f"Your final score is {quiz.score}/{len(question_bank)}") | 23navi/Python-Codes | 100DaysOfCode/Day17/quiz-game/main.py | main.py | py | 373 | python | en | code | 4 | github-code | 13 |
15457391207 | # USBHub.py
#
# Contains class definitions to implement a USB hub.
from USB import *
from USBDevice import *
from USBConfiguration import *
from USBInterface import *
from USBEndpoint import *
class USBHubClass(USBClass):
name = "USB hub class"
def __init__(self, maxusb_app):
self.maxusb_app = maxusb_app
self.setup_request_handlers()
def setup_request_handlers(self):
self.request_handlers = {
0x00 : self.handle_get_hub_status_request,
0x03 : self.handle_set_port_feature_request
}
def handle_get_hub_status_request(self, req):
if self.maxusb_app.mode == 1:
print (" **SUPPORTED**",end="")
if self.maxusb_app.fplog:
self.maxusb_app.fplog.write (" **SUPPORTED**\n")
self.maxusb_app.stop = True
else:
response = b'\x61\x61\x61\x61'
self.maxusb_app.send_on_endpoint(0, response)
self.maxusb_app.stop = True
def handle_set_port_feature_request(self, req):
# print ("DEBUG: Set port feature request")
response = b''
self.maxusb_app.send_on_endpoint(0, response)
class USBHubInterface(USBInterface):
name = "USB hub interface"
def __init__(self, maxusb_app, verbose=0):
self.maxusb_app = maxusb_app
if self.maxusb_app.testcase[1] == "hub_bLength":
bLength = self.maxusb_app.testcase[2]
else:
bLength = 9
if self.maxusb_app.testcase[1] == "hub_bDescriptorType":
bDescriptorType = self.maxusb_app.testcase[2]
else:
bDescriptorType = 0x29
if self.maxusb_app.testcase[1] == "hub_bNbrPorts":
bNbrPorts = self.maxusb_app.testcase[2]
else:
bNbrPorts = 4
if self.maxusb_app.testcase[1] == "hub_wHubCharacteristics":
wHubCharacteristics = self.maxusb_app.testcase[2]
else:
wHubCharacteristics = 0xe000
if self.maxusb_app.testcase[1] == "hub_bPwrOn2PwrGood":
bPwrOn2PwrGood = self.maxusb_app.testcase[2]
else:
bPwrOn2PwrGood = 0x32
if self.maxusb_app.testcase[1] == "hub_bHubContrCurrent":
bHubContrCurrent = self.maxusb_app.testcase[2]
else:
bHubContrCurrent = 0x64
if self.maxusb_app.testcase[1] == "hub_DeviceRemovable":
DeviceRemovable = self.maxusb_app.testcase[2]
else:
DeviceRemovable = 0
if self.maxusb_app.testcase[1] == "hub_PortPwrCtrlMask":
PortPwrCtrlMask = self.maxusb_app.testcase[2]
else:
PortPwrCtrlMask = 0xff
hub_descriptor = bytes([
bLength, # length of descriptor in bytes
bDescriptorType, # descriptor type 0x29 == hub
bNbrPorts, # number of physical ports
wHubCharacteristics & 0xff , # hub characteristics
(wHubCharacteristics >> 8) & 0xff,
bPwrOn2PwrGood, # time from power on til power good
bHubContrCurrent, # max current required by hub controller
DeviceRemovable,
PortPwrCtrlMask
])
descriptors = {
USB.desc_type_hub : hub_descriptor
}
endpoint = USBEndpoint(
maxusb_app,
0x81, # endpoint number
USBEndpoint.direction_in,
USBEndpoint.transfer_type_interrupt,
USBEndpoint.sync_type_none,
USBEndpoint.usage_type_data,
16384, # max packet size
0x0c, # polling interval, see USB 2.0 spec Table 9-13
self.handle_buffer_available # handler function
)
# TODO: un-hardcode string index (last arg before "verbose")
USBInterface.__init__(
self,
maxusb_app,
0, # interface number
0, # alternate setting
9, # 3 interface class
0, # 0 subclass
0, # 0 protocol
0, # string index
verbose,
[ endpoint ],
descriptors
)
self.device_class = USBHubClass(maxusb_app)
self.device_class.set_interface(self)
def handle_buffer_available(self):
# print ("DEBUG: handle_buffer_available")
return
class USBHubDevice(USBDevice):
name = "USB hub device"
def __init__(self, maxusb_app, vid, pid, rev, verbose=0):
interface = USBHubInterface(maxusb_app, verbose=verbose)
if vid == 0x1111:
vid = 0x05e3
if pid == 0x2222:
pid = 0x0608
if rev == 0x3333:
rev = 0x7764
config = USBConfiguration(
maxusb_app,
1, # index
"Emulated Hub", # string desc
[ interface ] # interfaces
)
USBDevice.__init__(
self,
maxusb_app,
9, # 0 device class
0, # device subclass
1, # protocol release number
64, # max packet size for endpoint 0
vid, # vendor id
pid, # product id
rev, # device revision
"Genesys Logic, Inc", # manufacturer string
"USB2.0 Hub", # product string
"1234", # serial number string
[ config ],
verbose=verbose
)
| nccgroup/umap | devices/USBHub.py | USBHub.py | py | 5,949 | python | en | code | 265 | github-code | 13 |
19107656456 | #!/usr/bin/env python3
import sys
import time
isRemaining = 0
K = int(sys.argv[2])
currentWord = ""
byteStringLeftover = ""
frequencyTable = {}
treeInLine = []
fileInLine = []
trailingZeros = 0
codes = []
dictionary = {}
reqBits = 0
class Node():
def __init__(self, character, freq, left, right):
self.character = character
self.freq = freq
self.left = left
self.right = right
def processByteStringLeftover(temp_currentWord, temp_byteStringLeftover, temp_isRemaining, temp_fileInLine, file=None, root=None):
while temp_byteStringLeftover:
if len(temp_byteStringLeftover) >= K:
temp_currentWord = temp_byteStringLeftover[0:K]
temp_byteStringLeftover = temp_byteStringLeftover[K:] # arba len(byteStringLeftover) ?
if file is not None:
temp_fileInLine.append(buildCode(temp_currentWord))
else:
createFreqTable(temp_currentWord)
temp_currentWord = ""
else:
temp_currentWord = temp_byteStringLeftover
temp_isRemaining = K - len(temp_byteStringLeftover)
temp_byteStringLeftover = ""
return temp_currentWord, temp_byteStringLeftover, temp_isRemaining
def processByte(temp_currentWord, temp_byteStringLeftover, temp_isRemaining, temp_fileInLine, byte, file=None, root=None):
temp_currentWord, temp_byteStringLeftover, temp_isRemaining = processByteStringLeftover(temp_currentWord, temp_byteStringLeftover, temp_isRemaining, temp_fileInLine, file, root)
binary_str = format(ord(byte), 'b').zfill(8)
compareTo = K if temp_isRemaining == 0 else temp_isRemaining
if compareTo <= 8:
temp_currentWord += binary_str[:compareTo]
temp_byteStringLeftover = binary_str[compareTo:8]
if file is not None:
temp_fileInLine.append(buildCode(temp_currentWord))
else:
createFreqTable(temp_currentWord)
temp_currentWord = ""
if temp_isRemaining != 0:
temp_isRemaining = 0
else:
temp_currentWord += binary_str
temp_isRemaining = compareTo - 8
return temp_currentWord, temp_byteStringLeftover, temp_isRemaining
def createFreqTable(byteString):
if(len(byteString) < K):
global reqBits
reqBits = K - len(byteString)
for i in range(0,reqBits):
byteString = byteString + '0'
frequencyTable[byteString] = frequencyTable.get(byteString, 0) + 1
def CreateTree():
treeArray = []
shortestFlag = 0
for key, value in frequencyTable.items():
node = Node(key, value, None, None)
treeArray.append(node)
while len(treeArray) > 1:
left = removeMinFreq(treeArray)
right = removeMinFreq(treeArray)
parent = Node(None,left.freq+right.freq,left,right)
treeArray.append(parent)
return treeArray[0]
def removeMinFreq(list):
list.sort(key=lambda x: x.freq)
returnNode = list[0]
list.remove(returnNode)
return returnNode
def buildCode(word):
return dictionary[word]
def buildDict(node, s, dictionary):
if node.character:
if not s:
dictionary[node.character] = "0"
else:
dictionary[node.character] = s
else:
buildDict(node.left, s+"0", dictionary)
buildDict(node.right, s+"1", dictionary)
def printTree(treeInLine, tree):
if(tree != None):
if (tree.left == None and tree.right == None):
treeInLine.append('0')
treeInLine.append(tree.character)
else:
treeInLine.append('1')
printTree(treeInLine, tree.left)
printTree(treeInLine, tree.right)
def writeBytesToFile(fileToWrite, K, trailingZeros, treeInLine, fileInLine):
K_binary = "{0:05b}".format(K)
if trailingZeros != 0:
trailingZeros_binary = "{0:03b}".format(trailingZeros)
else:
trailingZeros_binary = "000"
firstByte = K_binary + trailingZeros_binary
treeAndFileString = treeInLine + fileInLine
treeAndFileString = [treeAndFileString[i:i+8] for i in range(0, len(treeAndFileString), 8)]
finalByteArray = [firstByte] + treeAndFileString
finalByteArray = [int(x, 2) for x in finalByteArray]
finalByteArray = bytes(finalByteArray)
with open(fileToWrite+'.huf', 'wb') as w:
w.write(finalByteArray)
with open(sys.argv[1], "rb") as f:
start_time = time.time()
print ("---------- CREATES FREQ TABLE ----------")
b = f.read(1)
count = 1
while b != b"":
currentWord, byteStringLeftover, isRemaining = processByte(currentWord, byteStringLeftover, isRemaining, fileInLine, b)
b = f.read(1)
currentWord, byteStringLeftover, isRemaining = processByteStringLeftover(currentWord, byteStringLeftover, isRemaining, fileInLine)
if currentWord:
createFreqTable(currentWord)
isRemaining = 0
currentWord = ""
print("-- TIME ELAPSED: %s seconds --" % (time.time() - start_time))
print ("---------- BUILD TREE ----------")
root = CreateTree()
print("-- TIME ELAPSED: %s seconds --" % (time.time() - start_time))
print ("---------- BUILD DICIONARY -----")
buildDict(root, "", dictionary)
print("-- TIME ELAPSED: %s seconds --" % (time.time() - start_time))
print ("---------- PRINT TREE ----------")
printTree(treeInLine, root)
treeInLine = "".join(treeInLine)
print("-- TIME ELAPSED: %s seconds --" % (time.time() - start_time))
print ("---------- WRITE FILE ----------")
f.seek(0)
b = f.read(1)
count = 1
while b != b"":
currentWord, byteStringLeftover, isRemaining = processByte(currentWord, byteStringLeftover, isRemaining, fileInLine, b,"output", root)
b = f.read(1)
#Prisegam nulius prie paskutinio
currentWord, byteStringLeftover, isRemaining = processByteStringLeftover(currentWord, byteStringLeftover, isRemaining, fileInLine, "output",root)
if(len(currentWord) < K and len(currentWord) > 0):
for i in range(0,K - len(currentWord)):
currentWord = currentWord + '0'
if currentWord:
fileInLine.append(buildCode(currentWord))
fileInLine = "".join(fileInLine)
reqBits_binary = "00000"
if reqBits != 0:
reqBits_binary = "{0:05b}".format(reqBits)
treeInLine = reqBits_binary + treeInLine
if len(treeInLine + fileInLine) % 8 != 0:
trailingZeros = 8 - (len(treeInLine + fileInLine) % 8)
for i in range(0,trailingZeros):
fileInLine = fileInLine + '0'
writeBytesToFile(sys.argv[1], K, trailingZeros, treeInLine, fileInLine)
print ("---------- END ----------")
print("-- TIME ELAPSED: %s seconds --" % (time.time() - start_time))
| Rugshtyne/HuffmanCoder | python/encoder.py | encoder.py | py | 6,070 | python | en | code | 0 | github-code | 13 |
24580181691 | from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QPushButton, QTextEdit, QLabel, QLineEdit, QFileDialog
from linkTree import build_link_tree_and_scrape
import json
import sys
class WebScraperApp(QWidget):
def __init__(self, parent=None):
super(WebScraperApp, self).__init__(parent)
self.setWindowTitle("Web Scraper")
self.layout = QVBoxLayout()
self.url_label = QLabel("Enter Website URL:")
self.url_input = QLineEdit()
self.start_button = QPushButton("Start Scraping")
self.start_button.clicked.connect(self.start_scraping)
self.text_area = QTextEdit()
self.log = QTextEdit()
self.save_button = QPushButton("Save as JSON")
self.save_button.clicked.connect(self.save_as_json)
self.save_button.setEnabled(False) # Disable until scraping is done
self.layout.addWidget(self.url_label)
self.layout.addWidget(self.url_input)
self.layout.addWidget(self.start_button)
self.layout.addWidget(self.text_area)
self.layout.addWidget(self.log)
self.layout.addWidget(self.save_button)
self.setLayout(self.layout)
self.data = {} # Initialize empty data
def start_scraping(self):
url = self.url_input.text()
logger = []
self.data = build_link_tree_and_scrape(url, logger)
self.text_area.setText(json.dumps(self.data, indent=4, ensure_ascii=False))
self.log.setText('\n'.join(logger))
self.save_button.setEnabled(True) # Enable the save button
def save_as_json(self):
options = QFileDialog.Options()
fileName, _ = QFileDialog.getSaveFileName(self,"QFileDialog.getSaveFileName()", "","JSON Files (*.json);;All Files (*)", options=options)
if fileName:
with open(fileName, 'w', encoding='utf-8') as f:
json.dump(self.data, f, ensure_ascii=False, indent=4)
def main():
app = QApplication(sys.argv)
scraper_app = WebScraperApp()
scraper_app.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
| trv893/py-web-scrape | ui.py | ui.py | py | 2,106 | python | en | code | 0 | github-code | 13 |
39443440443 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
""" Main file of Altai. Execute it to use the application. """
# System imports
import sys
from os.path import expanduser
import PySide2.QtGui as QtGui
import PySide2.QtCore as QtCore
import PySide2.QtWidgets as QtWidgets
# Altai imports
from . import config
from .vented_box_frame import VentedBoxFrame
from .vent_dimensions_frame import VentDimensionsFrame
from .driver_db_frame import DriverDatabaseFrame
class Gui(QtWidgets.QMainWindow):
""" Gui class for the main window. """
def __init__(self):
QtWidgets.QMainWindow.__init__(self)
self.setWindowTitle("Altai")
self.create_menu()
self.tab_bar = QtWidgets.QTabBar()
self.tab_bar.addTab("Vented Box Response")
self.tab_bar.addTab("Vent Dimensions")
self.tab_bar.addTab("Driver Database")
self.tab_bar.currentChanged.connect(self.change_main_tab)
vented_box_frame = VentedBoxFrame()
vent_dimensions_frame = VentDimensionsFrame()
driver_database_frame = DriverDatabaseFrame()
driver_database_frame.new_manufacturer_added.connect(
vented_box_frame.driver_selection.update_drivers)
driver_database_frame.new_manufacturer_added.connect(
vent_dimensions_frame.driver_selection.update_drivers)
self.main_frames = [vented_box_frame, vent_dimensions_frame,
driver_database_frame]
vbox = QtWidgets.QVBoxLayout()
vbox.addWidget(self.tab_bar)
for i, frame in enumerate(self.main_frames):
vbox.addWidget(frame)
if i > 0:
frame.hide()
self.main_frame = QtWidgets.QWidget()
self.main_frame.setLayout(vbox)
self.setCentralWidget(self.main_frame)
def change_main_tab(self, tab_index):
""" Switch between main tab views. """
for i, frame in enumerate(self.main_frames):
if tab_index == i:
frame.show()
else:
frame.hide()
def create_menu(self):
""" Create main menu """
menu_file = self.menuBar().addMenu("&File")
menu_help = self.menuBar().addMenu("&Help")
# Save Figure
act_save = QtWidgets.QAction(self)
act_save.setText("Save Response as...")
act_save.setIcon(QtGui.QIcon.fromTheme('document-save-as'))
menu_file.addAction(act_save)
act_save.triggered.connect(self.save_figure)
# Exit button
act_exit = QtWidgets.QAction(self)
act_exit.setText("Exit")
act_exit.setIcon(QtGui.QIcon.fromTheme('application-exit'))
menu_file.addAction(act_exit)
act_exit.triggered.connect(self.close)
# About window
act_about = QtWidgets.QAction(self)
act_about.setText("About")
act_about.setIcon(QtGui.QIcon.fromTheme('help-about'))
menu_help.addAction(act_about)
act_about.triggered.connect(self.create_about_window)
def save_figure(self):
""" Save figure as file; all filetypes that are supported by matplotlib"""
home = expanduser("~")
fname, _ = QtWidgets.QFileDialog.getSaveFileName(
self, "Save Response as", home,
"PDF, PNG and SVG (*.pdf *.png *.svg)")
self.main_frames[0].fig.savefig(fname)
def create_about_window(self):
""" Creates the about window for Altai. """
about = ("Altai is a cross-platform application for simulating audio "
"systems. With it, you can design speakers, find the optimum "
"driver, predict the frequency response, etc. It is still in "
"a very early stage of development. You can follow its "
"progress on github: <a href='http://github.com/Psirus/altai'>"
"Altai on GitHub</a>. Please report any issues and feature "
"ideas you may have.")
reply = QtWidgets.QMessageBox(self)
reply.setWindowTitle("About Altai")
reply.setTextFormat(QtCore.Qt.TextFormat.RichText)
reply.setText(about)
reply.exec_()
def main():
""" Main function; acts as entry point for Altai. """
app = QtWidgets.QApplication(sys.argv)
gui = Gui()
gui.resize(800, 600)
gui.show()
sys.exit(app.exec_())
| Psirus/altai | altai/gui/main.py | main.py | py | 4,344 | python | en | code | 0 | github-code | 13 |
9382542497 | import sys
import re
# copies two conllu files, but it removes sentid's if one of the files
# had a conversion error for that sentid
def add_keys(f):
keys = set()
reg_id = re.compile('# sent_id = (.*)$')
reg_error = re.compile('# error')
for line in f:
line = line.rstrip()
m = reg_id.match(line)
if m:
id = m.group(1)
keys.add(id)
m = reg_error.match(line)
if m:
keys.remove(id)
return keys
def copy_conllu(inf,keys):
reg_id = re.compile('# sent_id = (.*)$')
reg_src = re.compile('# source = ')
key_ok = True
for line in inf:
line = line.rstrip()
m = reg_src.match(line)
if m:
print(line) # always print source since it precedes sent_id
continue
m = reg_id.match(line)
if m:
id = m.group(1)
if id in keys:
key_ok = True
else:
print(line)
print("# skipped")
key_ok = False
if key_ok:
print(line)
def main():
[_,f1,f2] = sys.argv
with open(f1,'r') as in1:
keys = add_keys(in1)
with open(f2,'r') as in2:
copy_conllu(in2,keys)
if __name__ == "__main__":
main()
| rug-compling/Alpino | EvalUD/goodkeys.py | goodkeys.py | py | 1,337 | python | en | code | 19 | github-code | 13 |
18081905648 | # 导入os模块
import os
# 定义全局变量 BASE_DIR 通过VASE_DIR定位到项目根目录
BASE_PATH = os.path.dirname(os.path.abspath(__file__))
# 定义请求头
HEADERS = None
# 定义员工ID
EMP_ID = None | varrtgust0621/testIHRMProject | app.py | app.py | py | 216 | python | zh | code | 0 | github-code | 13 |
30993854546 | #! /usr/bin/env python
import os, sys
import pygame, random
try:
import android
except ImportError:
android = None
FPS = 30
TIMEREVENT = pygame.USEREVENT
skier_images = ["skier_left2.png",
"skier_left1.png",
"skier_down.png",
"skier_right1.png",
"skier_right2.png",
"skier_crash.png"]
class SkierClass(pygame.sprite.Sprite):
def __init__(self, screen, img_path):
pygame.sprite.Sprite.__init__(self)
self.img_path = img_path
self.angle = 0
self.rect = None
self.set_image(2+self.angle)
self.rect.center = [screen.get_width()/2, 100]
def set_image(self, angle):
center = None
if self.rect:
center = self.rect.center
self.image = pygame.image.load(os.path.join(self.img_path, skier_images[angle]))
self.rect = self.image.get_rect()
if center:
self.rect.center = center
def turn(self, direction):
self.angle = self.angle + direction
return self.set_angle(self.angle)
def set_angle(self, angle):
self.angle = angle
if self.angle < -2: self.angle = -2
if self.angle > 2: self.angle = 2
return self._calc_speed()
def _calc_speed(self):
self.set_image(2+self.angle)
speed = [self.angle, 6 - abs(self.angle) * 2]
return speed
def move(self, speed) :
self.rect.centerx = self.rect.centerx + speed[0]
if self.rect.centerx< 20: self.rect.centerx = 20
if self.rect.centerx > 620: self.rect.centerx = 620
class ObstacleClass(pygame.sprite.Sprite):
def __init__(self, image_file, location, type):
pygame.sprite.Sprite.__init__(self)
self.image_file = image_file
self.image = pygame.image.load(image_file)
self.location = location
self.rect = self.image.get_rect()
self.rect.center = location
self.type = type
self.passed = False
def scroll(self, t_ptr):
self.rect.centery = self.location[1] - t_ptr
class Game:
def __init__(self, screen, img_path):
self.img_path = img_path
self.screen = screen
self.screen_width = screen.get_width()
self.screen_height = screen.get_height()
self.clock = pygame.time.Clock()
self.brick_width = 45
self.brick_height = 20
self.border = 5
self.brick_border = 2
self.font = pygame.font.Font("freesansbold.ttf", 30)
self.debug_text = ""
self.score_text = None
self.level = 0
self.clearObstacleGroup()
self.init()
def init(self):
self.setup()
#self.skier = SkierClass(self.screen, self.img_path)
self.speed = [0, 6]
self.map_position = 0
self.mp = 0
self.points = 0
self.activeMap = 0
self.density = 1
self.clearObstacleGroup()
def clearObstacleGroup(self):
self.obstacles = pygame.sprite.Group()
def addObstacleGroup(self, oblist):
for ob in oblist: self.obstacles.add(ob)
def setup(self):
self.addAllBricks(self.level)
#self.createPaddle()
self.count = 0
def addAllBricks(self, level):
y = 72
if level >= 2:
oblist = self.addBrickRow("blue.png", y)
self.addObstacleGroup(oblist)
y = y + (self.brick_height+self.border) * 2
if level >= 1:
oblist = self.addBrickRow("green.png", y)
self.addObstacleGroup(oblist)
y = y + (self.brick_height+self.border) * 2
oblist = self.addBrickRow("red.png", y)
self.addObstacleGroup(oblist)
y = y + (self.brick_height+self.border) * 2
def addBrickRow(self, brickColor, y):
obstacles = pygame.sprite.Group()
img = os.path.join(self.img_path, brickColor)
type = "brick"
dx = (self.brick_width + self.brick_border)
for i in range(2):
for j in range(10):
location = [self.border + j*dx + (self.brick_width/2), y]
r = ObstacleClass(img, location, type)
obstacles.add(r)
y = y + self.brick_height + self.brick_border
return obstacles
def animate(self, flip=True):
self.screen.fill([0,0,0])
pygame.display.update(self.obstacles.draw(self.screen))
#self.screen.blit(self.skier.image, self.skier.rect)
if self.score_text:
self.screen.blit(self.score_text, [10, 10])
if self.debug_text:
text = self.font.render(self.debug_text, 1, (255,255,255))
self.screen.blit(text, [10, self.screen_height-60])
if flip:
pygame.display.flip()
def startScreen(self):
self.addAllBricks(10)
self.screen.fill([0,0,0])
pygame.display.update(self.obstacles.draw(self.screen))
title_text = self.font.render("Break the Ceiling", 1, (255,255,255))
again_text = self.font.render("Touch here to start", 1, (255,255,255))
pygame.display.flip()
while 1:
self.clock.tick(1000/FPS)
if android:
if android.check_pause():
android.wait_for_resume()
for event in pygame.event.get():
if event.type == pygame.QUIT:
return False
if event.type == pygame.MOUSEBUTTONUP:
return True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
return False
else:
return True
self.animate(False)
rect = title_text.get_rect()
self.screen.blit(title_text, [(self.screen_width - rect[2])/2, (self.screen_height - rect[3])/2])
rect = again_text.get_rect()
self.screen.blit(again_text, [(self.screen_width - rect[2])/2, (self.screen_height - 100)])
pygame.display.flip()
pygame.time.delay(500)
self.animate(False)
rect = title_text.get_rect()
self.screen.blit(title_text, [(self.screen_width - rect[2])/2, (self.screen_height - rect[3])/2])
pygame.display.flip()
pygame.time.delay(500)
def gameOver(self):
again_text = self.font.render("Press any key to play again", 1, (0,0,0))
while 1:
if android:
if android.check_pause():
android.wait_for_resume()
for event in pygame.event.get():
if event.type == pygame.QUIT:
return False
if event.type == pygame.MOUSEBUTTONUP:
return True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
return False
else:
return True
text = self.font.render("Game Over", 1, (0,0,0))
rect = text.get_rect()
self.animate(False)
self.screen.blit(text, [(self.screen_width - rect[2])/2, (self.screen_height - rect[3])/2])
rect = again_text.get_rect()
self.screen.blit(again_text, [(self.screen_width - rect[2])/2, (self.screen_height - 100)])
pygame.display.flip()
pygame.time.delay(500)
self.animate(False)
rect = again_text.get_rect()
self.screen.blit(again_text, [(self.screen_width - rect[2])/2, (self.screen_height - 100)])
pygame.display.flip()
pygame.time.delay(500)
pygame.quit()
sys.exit(0)
def play(self):
while True:
self.clock.tick(1000/FPS)
# Android-specific:
if android:
if android.check_pause():
android.wait_for_resume()
for event in pygame.event.get():
if event.type == pygame.QUIT:
return False
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
pass
elif event.key == pygame.K_RIGHT:
pass
elif event.key == pygame.K_ESCAPE:
return True
if android:
a = android.accelerometer_reading()
#self.debug_text = "%f" % a[0]
x = a[0]
if abs(x) < 1:
self.speed = self.skier.set_angle(0)
elif x > 3:
self.speed = self.skier.set_angle(-2)
elif x > 1:
self.speed = self.skier.set_angle(-1)
elif x < -3:
self.speed = self.skier.set_angle(2)
elif x < -1:
self.speed = self.skier.set_angle(1)
self.density = 1 + self.points/100
self.score_text = self.font.render("Score: " + str(self.points), 1, (255,255,255))
self.animate()
def main():
pygame.init()
screen = pygame.display.set_mode([480, 800])
if android:
android.init()
android.map_key(android.KEYCODE_BACK, pygame.K_ESCAPE)
android.accelerometer_enable(True)
game = Game(screen, "images")
ret = game.startScreen()
if ret == False:
pygame.quit()
sys.exit(1)
while 1:
game.init()
ret = game.play()
if ret == False:
pygame.quit()
sys.exit(1)
ret = game.gameOver()
if ret == False:
pygame.quit()
sys.exit(1)
# This isn't run on Android.
if __name__ == "__main__":
main()
| noahdhwest/Breakout | .backup-break.py | .backup-break.py | py | 9,637 | python | en | code | 1 | github-code | 13 |
16840314637 | from selenium import webdriver
from time import sleep
import sys
import json
#####
url = 'https://www.youtube.com'
file = 'cookies.dat'
driver_path = r"D:\ProgramData\Python add-ons\geckodriver.exe"
how_to_use = '''***** How to Use *****
signin2cookies.py
--> this uses default driver_path "D:\ProgramData\Python add-ons\geckodriver.exe", goes to default url 'https://www.youtube.com', prompts to signin and saves cookies to default file 'cookies.dat' after you click ENTER
signin2cookies.py url file
--> this uses default driver_path "D:\ProgramData\Python add-ons\geckodriver.exe", goes to given url, prompts to signin and saves cookies to given file after you click ENTER
signin2cookies.py driver_path url file
--> this uses given driver_path, goes to given url, prompts to signin and saves cookies to given file after you click ENTER
extra options:
-firefox --> to use firefox
-chrome --> to use chrome
'''
control_driver = webdriver.Firefox
#####
print(how_to_use)
if '-firefox' in sys.argv:
control_driver = webdriver.Firefox
driver_path = r"D:\ProgramData\Python add-ons\geckodriver.exe"
sys.argv.remove('-firefox')
elif '-chrome' in sys.argv:
control_driver = webdriver.Chrome
driver_path = r"D:\ProgramData\Python add-ons\chromedriver.exe"
sys.argv.remove('-chrome')
while '-firefox' in sys.argv:
sys.argv.remove('-firefox')
while '-chrome' in sys.argv:
sys.argv.remove('-chrome')
if len(sys.argv)==3:
url = sys.argv[1]
file = sys.argv[2]
elif len(sys.argv)==4:
driver_path = sys.argv[1]
url = sys.argv[2]
file = sys.argv[3]
elif len(sys.argv)==1:
pass
else:
sys.exit()
print('On loading the site, signin and visit site again to get cookies')
driver = control_driver(executable_path=driver_path)
#driver.execute_script("alert('Signin to website to get cookies');")
input('After signin, click ENTER')
driver.get(url)
cookies = driver.get_cookies()
with open(file,'w') as f:
json.dump(cookies,f)
print(f'Successfully saved cookies of the site "{url}" in the file "{file}"')
#driver.execute_script("url=arguments[0];file=arguments[1];alert('Successfully saved cookies of the site \"' + url + '\" in the file \"' + file+'\"');",url,file)
| akshaysmin/youtube_reply_bot | signin2cookies.py | signin2cookies.py | py | 2,249 | python | en | code | 1 | github-code | 13 |
27878463324 | '''
Testing the Measurement class
================================================================
Unit tests for running test sequences and related functions.
This is the first sanity check to run when testing any changes.
'''
#================================================================
#%% Imports
#================================================================
# Standard library
import os, time, sys
import unittest
# Third party libraries
import numpy as np
import pandas as pd
import xarray as xr
basepath = os.path.dirname(os.path.dirname(__file__))
sys.path.append(basepath)
print(basepath)
# Local libraries
# from test_measure_process import AbstractMeasurement
from example_test_setup import ExampleTestSequence,ExampleTestboard,ExampleStation
#================================================================
#%% Constants
#================================================================
DATAFILE_PATH = os.path.join(basepath,'unit_test','data_files')
#================================================================
#%% Functions
#================================================================
#================================================================
#%% Tests
#================================================================
class TestExampleSequence(unittest.TestCase):
def setUp(self):
resources = {
'station':ExampleStation(),
'tb':ExampleTestboard()
}
self.testseq = ExampleTestSequence(resources)
def tearDown(self):
pass
def test_dummy(self):
self.assertTrue(True)
def test_got_conditions_and_meas(self):
"""
Check conditions and measurements are in place
"""
self.assertTrue(hasattr(self.testseq,'conditions'),msg='No conditions property')
self.assertTrue(hasattr(self.testseq,'meas'),msg='No meas property')
cond_names = ['temperature_degC','humidity_pc']
for cond_name in cond_names:
self.assertTrue(cond_name in self.testseq.conditions,
msg=f'Setup conditions [{cond_name}] failed to be loaded')
meas_names = ['PressureSweep','AxisSweep']
for meas_name in meas_names:
self.assertTrue(meas_name in self.testseq.meas,
msg=f'Meas [{meas_name}] failed to be loaded')
def test_conditions_table(self):
"""
Check the iteration works
"""
cond_table = self.testseq.conditions_table
for cond in cond_table:
print(cond)
# just see if it works for now
def test_running_order(self):
"""
Check the table works
"""
self.testseq.make_running_order()
print(self.testseq._running_order)
# just see if it works for now
def test_df_running_order(self):
"""
Check the table works
"""
df = self.testseq.df_running_order
print(df)
# just see if it works for now
def test_running_default_conditions(self):
self.testseq.conditions.Iteration.enable = False
self.testseq.run()
# just see if it works for now
print(self.testseq.ds_results)
# Check iteration condition does not appear in results
self.assertFalse('Iteration' in self.testseq.ds_results.coords,
msg='Iteration condition is in results when it should not be enabled')
self.assertTrue(self.testseq.last_error=='',msg='Test run failed')
def test_running_default_conditions_with_iteration(self):
self.testseq.conditions.Iteration.enable = True
self.testseq.run()
# Disable iteration for other tests
self.testseq.conditions.Iteration.enable = False
# just see if it works for now
print(self.testseq.ds_results)
# Check iteration condition does not appear in results
self.assertTrue('Iteration' in self.testseq.ds_results.coords,
msg='Iteration condition is not in results when it should be enabled')
self.assertTrue(self.testseq.last_error=='',msg='Test run failed')
def test_stacking_multiple_runs(self):
"""
Run test sequence multiple times with different information
data and try to stack the resulting datasets
"""
nRuns = 4
results = []
for run in range(nRuns):
self.testseq.information.serial_number = f'SN_{run}'
self.testseq.information.part_number = 'PN_1'
self.testseq.run()
results.append(self.testseq.ds_results)
# concat only works on one dimension
ds_all = xr.concat(results,dim='serial_number')
self.assertTrue('serial_number' in ds_all.coords,
msg='serial_number is not a coordinate')
self.assertEqual(nRuns,ds_all.coords['serial_number'].size,
msg='Serial number coordinate is wrong length')
def test_save_results(self):
"""
Save data after a test run in various formats and check the
files are stored.
"""
data_filename_json = os.path.join(DATAFILE_PATH,'test_data.json')
data_filename_excel = os.path.join(DATAFILE_PATH,'test_data.xlsx')
self.testseq.run()
self.testseq.save(data_filename_json)
self.testseq.save(data_filename_excel,format='excel')
self.assertTrue(os.path.exists(data_filename_json),
msg='Failed to save json file')
self.assertTrue(os.path.exists(data_filename_excel),
msg='Failed to save excel file')
# Clean up
if os.path.exists(data_filename_json):
os.remove(data_filename_json)
if os.path.exists(data_filename_excel):
os.remove(data_filename_excel)
def test_save_and_load_results(self):
"""
Save data after a test run and load it back
"""
data_filename_json = os.path.join(DATAFILE_PATH,'test_data.json')
self.testseq.run()
self.testseq.save(data_filename_json)
self.assertTrue(os.path.exists(data_filename_json),
msg='Failed to save json file')
# Create new test sequence to load back the file
new_seq = ExampleTestSequence({},offline_mode=True)
new_seq.load(data_filename_json)
# Compare datasets
self.assertTrue(self.testseq.ds_results.equals(new_seq.ds_results),
msg='Reloaded Results data is not equal')
# Clean up
if os.path.exists(data_filename_json):
os.remove(data_filename_json)
def test_config_replace(self):
"""
Test replacing a config item
"""
label = 'change_me'
value = 'changed'
self.testseq.config_replace(label,value)
# Check conditions
# ==============================
for cond in self.testseq.conditions:
if label in self.testseq.conditions[cond].config:
self.assertTrue(label in self.testseq.conditions[cond].config,
msg=f'Config not replaced for condition [{cond}]')
self.assertTrue(self.testseq.conditions[cond].config[label]==value,
msg=f'Config value not replaced for condition [{cond}]')
# Check measurements
# ==============================
for meas in self.testseq.meas:
if label in self.testseq.meas[meas].config:
self.assertTrue(label in self.testseq.meas[meas].config,
msg=f'Config not replaced for meas [{meas}]')
self.assertTrue(self.testseq.meas[meas].config[label]==value,
msg=f'Config value not replaced for meas[{meas}]')
#================================================================
#%% Runner
#================================================================
if __name__ == '__main__':
# all_tests = True
all_tests = False
if all_tests:
unittest.main()
print('Run')
else:
suite = unittest.TestSuite()
suite.addTest(TestExampleSequence('test_dummy'))
suite.addTest(TestExampleSequence('test_got_conditions_and_meas'))
suite.addTest(TestExampleSequence('test_conditions_table'))
suite.addTest(TestExampleSequence('test_running_default_conditions'))
suite.addTest(TestExampleSequence('test_running_default_conditions_with_iteration'))
suite.addTest(TestExampleSequence('test_running_order'))
suite.addTest(TestExampleSequence('test_df_running_order'))
suite.addTest(TestExampleSequence('test_stacking_multiple_runs'))
suite.addTest(TestExampleSequence('test_save_results'))
suite.addTest(TestExampleSequence('test_save_and_load_results'))
suite.addTest(TestExampleSequence('test_config_replace'))
runner = unittest.TextTestRunner()
runner.run(suite) | redlegjed/test_measure_process_lib | unit_test/test_example_sequence.py | test_example_sequence.py | py | 9,031 | python | en | code | 0 | github-code | 13 |
23343505298 | from editor import *
import typer
from typing import Optional
import allpyCon
app = typer.Typer(add_completion=False)
app.add_typer(allpyCon.app, name="all")
__version__ = "0.1.0"
def version_callback(value: bool):
if value:
typer.echo(f"pyCon CLI Version: {__version__}")
raise typer.Exit()
@app.command()
def con(
image_name: str = typer.Argument(...,
help="Image name", show_default=False),
extension_convert_to: str = typer.Argument(
..., help="Extension name [Example 'png']", show_default=False),
rename: str = typer.Argument(
None, help="New image name without extension", show_default=False)
):
"""Converts image to other formats."""
convert(os.getcwd(), image_name, extension_convert_to, rename)
# raise typer.Exit()
@app.command()
def rsp(
image_name: str = typer.Argument(...,
help="Image name", show_default=False),
scale_percent: int = typer.Argument(...,
help="Input size percent", show_default=False),
rename: str = typer.Argument(
None, help="rename image and image extension", show_default=False)
):
"""Resizes image by percentage."""
resize_percent(os.getcwd(), image_name, scale_percent, rename)
@app.command()
def rs(
image_name: str = typer.Argument(...,
help="Image name", show_default=False),
width: int = typer.Argument(...,
help="Image width", show_default=False),
height: int = typer.Argument(...,
help="Image height", show_default=False),
rename: str = typer.Argument(
None, help="rename image and image extension", show_default=False)
):
"""Resizes image by width and height"""
resize(os.getcwd(), image_name, width, height, rename)
@app.command()
def bw(
image_name: str = typer.Argument(...,
help="Image name", show_default=False),
rename: str = typer.Argument(
None, help="rename image and image extension", show_default=False)
):
"""Converts image to black and white"""
black_and_white(os.getcwd(), image_name, rename)
@app.command()
def inv(
image_name: str = typer.Argument(...,
help="Image name", show_default=False),
rename: str = typer.Argument(
None, help="rename image and image extension", show_default=False)
):
"""Converts image to invert image"""
invert(os.getcwd(), image_name, rename)
@app.command()
def r(
image_name: str = typer.Argument(...,
help="Image name", show_default=False),
angle: str = typer.Argument(...,
help="rotate angle ['left', 'right','flip-down','flip-x','flip-down-x']", show_default=False),
rename: str = typer.Argument(
None, help="rename image and image extension", show_default=False)
):
"""Rotates image"""
rotate(os.getcwd(), image_name, angle, rename)
@app.command()
def blur(
image_name: str = typer.Argument(...,
help="Image name", show_default=False),
blur_value: str = typer.Argument(...,
help="blur value (positive integer number)", show_default=False),
rename: str = typer.Argument(
None, help="rename image and image extension", show_default=False)
):
"""Blur image"""
blur(os.getcwd(), image_name, blur_value, rename)
@app.command()
def pencil(
image_name: str = typer.Argument(...,
help="Image name", show_default=False),
rename: str = typer.Argument(
None, help="rename image and image extension", show_default=False)
):
"""Converts image to pencil sketch art image"""
pencilSketch(os.getcwd(), image_name, rename)
@app.command()
def ascii(
image_name: str = typer.Argument(...,
help="Image name", show_default=False),
):
"""Converts image to ascii art"""
print(Ascii(image_name))
@app.callback()
def main(
version: Optional[bool] = typer.Option(
None, "-v", "--version", help="print version number and exit", callback=version_callback
),
):
"""pyCon is a cli too for converting image."""
if __name__ == "__main__":
app()
| Bonnary/pyCon | pyCon.py | pyCon.py | py | 4,422 | python | en | code | 0 | github-code | 13 |
34835026694 | """
Find a valid itinerary from the tickets, in lexographical order if multiple poss
Uses DFS: follows possible path until reaches end point (only one possible)
These will all be accessed in lexographical order, and added to the itinerary
in reverse order (postvisit)
Essentially there is a main line from the start to end points.
Depending on lexography, some auxilliary cycles get dealt with on the way
The remaining cycles are dealt with on the way back to the beginning
Itinerary is then reversed.
Destinations is an adjacency dictionary, with adjacent nodes stored in lex order
"""
from collections import deque
def findItinerary(tickets):
tickets.sort(key=lambda x:x[1])
destinations = {}
for ticket in tickets:
if ticket[0] in destinations:
destinations[ticket[0]].append(ticket[1])
else:
destinations[ticket[0]] = deque([ticket[1]])
itinerary = []
def DFS(v):
if v in destinations:
while destinations[v]:
DFS(destinations[v].popleft())
itinerary.append(v)
DFS('JFK')
return itinerary[::-1]
tickets = [["MUC", "LHR"], ["JFK", "MUC"], ["SFO", "SJC"], ["LHR", "SFO"]]
print(findItinerary(tickets))
| BenLeong0/leetcode_etc | leetcode_problems/leetcode332.py | leetcode332.py | py | 1,234 | python | en | code | 0 | github-code | 13 |
3345840963 | '''
Multipoint communication service protocol (T.125)
'''
import logging, ptypes, protocol.gcc as gcc, protocol.ber as ber, protocol.per as per
from ptypes import *
ptypes.setbyteorder(ptypes.config.byteorder.bigendian)
### MCS protocol data (BER Encoding)
class Protocol(ber.Protocol.copy(recurse=True)):
pass
class Element(ber.Element):
Protocol = Protocol
Protocol.default = Element
class Packet(Element):
byteorder = ptypes.config.byteorder.bigendian
# FIXME: this number is actually encoded with PER-ALIGNED
class Result(pbinary.enum):
length, _values_ = 4, [
('rt-successful', 0),
('rt-domain-merging', 1),
('rt-domain-not-hierarchical', 2),
('rt-no-such-channel', 3),
('rt-no-such-domain', 4),
('rt-no-such-user', 5),
('rt-not-admitted', 6),
('rt-other-user-id', 7),
('rt-parameters-unacceptable', 8),
('rt-token-not-available', 9),
('rt-token-not-possessed', 10),
('rt-too-many-channels', 11),
('rt-too-many-tokens', 12),
('rt-too-many-users', 13),
('rt-unspecified-failure', 14),
]
def summary(self):
return "{:s}({:d})".format(self.str(), self.int())
class Reason(pbinary.enum):
length, _values_ = 3, [
('rn-domain-disconnect', 0),
('rn-provider-initiated', 1),
('rn-token-purged', 2),
('rn-user-requested', 3),
('rn-channel-purged', 4),
]
class TokenStatus(ber.ENUMERATED):
_values_ = [
('notInUse', 0),
('selfGrabbed', 1),
('otherGrabbed', 2),
('selfInhibited', 3),
('otherInhibited', 4),
('selfRecipient', 5),
('selfGiving', 6),
('otherGiving', 7),
]
class DataPriority(ber.ENUMERATED):
_values_ = [
('top', 0),
('high', 1),
('medium', 2),
('low', 3),
]
class DomainParameters(ber.SEQUENCE):
_fields_ = [
(ber.INTEGER, 'maxChannelIds'),
(ber.INTEGER, 'maxUserIds'),
(ber.INTEGER, 'maxTokenIds'),
(ber.INTEGER, 'numPriorities'),
(ber.INTEGER, 'minThroughput'),
(ber.INTEGER, 'maxHeight'),
(ber.INTEGER, 'maxMCSPDUsize'),
(ber.INTEGER, 'protocolVersion'),
]
@Protocol.Application.define
class ConnectInitial(ber.SEQUENCE):
tag = 101
_fields_ = [
(ber.OCTET_STRING, 'callingDomainSelector'),
(ber.OCTET_STRING, 'calledDomainSelector'),
(ber.BOOLEAN, 'upwardFlag'),
(DomainParameters, 'targetParameters'),
(DomainParameters, 'minimumParameters'),
(DomainParameters, 'maximumParameters'),
(ber.OCTET_STRING, 'userData'),
]
@Protocol.Application.define
class ConnectResponse(ber.SEQUENCE):
tag = 102
class Result(ber.ENUMERATED):
def str(self):
res = self.cast(Result, width=8 * self.size())
return res.str()
def int(self):
res = self.cast(Result, width=8 * self.size())
return res.int()
def summary(self):
return "{:s}({:d})".format(self.str(), self.int())
# FIXME: is this right?
_fields_ = [
(Result, 'result'),
(ber.INTEGER, 'calledConnectId'),
(DomainParameters, 'domainParameters'),
(ber.OCTET_STRING, 'userData'),
]
@Protocol.Application.define
class ConnectAdditional(ber.SEQUENCE):
tag = 103
_fields_ = [
(ber.INTEGER, 'calledConnectId'),
(DataPriority, 'dataPriority'),
]
@Protocol.Application.define
class ConnectResult(ber.SEQUENCE):
tag = 104
_fields_ = [
(ber.OCTET_STRING, 'result'),
]
### DomainMCSPDU
class DomainMCSPDU(ptype.definition):
cache = {}
class Choice(pbinary.enum):
length, _values_ = 6, [
('plumbDomainIndication', 0),
('erectDomainRequest', 1),
('mergeChannelsRequest', 2),
('mergeChannelsConfirm', 3),
('purgeChannelsIndication', 4),
('mergeTokensRequest', 5),
('mergeTokensConfirm', 6),
('purgeTokensIndication', 7),
('disconnectProviderUltimatum', 8),
('rejectMCSPDUUltimatum', 9),
('attachUserRequest', 10),
('attachUserConfirm', 11),
('detachUserRequest', 12),
('detachUserIndication', 13),
('channelJoinRequest', 14),
('channelJoinConfirm', 15),
('channelLeaveRequest', 16),
('channelConveneRequest', 17),
('channelConveneConfirm', 18),
('channelDisbandRequest', 19),
('channelDisbandIndication', 20),
('channelAdmitRequest', 21),
('channelAdmitIndication', 22),
('channelExpelRequest', 23),
('channelExpelIndication', 24),
('sendDataRequest', 25), # Each of these cases are handled by HandleAllSendDataPDUs
('sendDataIndication', 26), #
('uniformSendDataRequest', 27), #
('uniformSendDataIndication', 28), #
('tokenGrabRequest', 29),
('tokenGrabConfirm', 30),
('tokenInhibitRequest', 31),
('tokenInhibitConfirm', 32),
('tokenGiveRequest', 33),
('tokenGiveIndication', 34),
('tokenGiveResponse', 35),
('tokenGiveConfirm', 36),
('tokenPleaseRequest', 37),
('tokenPleaseIndication', 38),
('tokenReleaseRequest', 39),
('tokenReleaseConfirm', 40),
('tokenTestRequest', 41),
('tokenTestConfirm', 42),
]
Header = Choice
### Main PDU
class PDU(pstruct.type):
'''
MCS packet
'''
@pbinary.bigendian
class _header(pbinary.struct):
def __value(self):
res = self['choice']
res = DomainMCSPDU.lookup(res, 0)
return getattr(res, 'Header', 0)
_fields_ = [
(DomainMCSPDU.Header, 'choice'),
(__value, 'value'),
]
def __value(self):
res = self['header'].li
return DomainMCSPDU.get(res['choice'], ptype.undefined, __header__=res.item('value'))
_fields_ = [
(_header, 'header'),
(__value, 'value'),
]
def alloc(self, **fields):
# Check if the caller is allocating the 'value' field
if 'value' in fields and not isinstance(fields['value'], dict):
res = fields['value']
# If so, then copy its Header type into the 'header' field
hdr = fields.setdefault('header', {})
if isinstance(hdr, dict) and hasattr(res, 'Header'):
hdr.setdefault('value', res.Header)
elif isinstance(hdr, ptype.base) and hasattr(res, 'Header'):
hdr['value'] = res.Header().a
res.__header__ = hdr['value']
elif ptypes.istype(res) and not hasattr(res, 'Header'):
logging.warning("Unable to map .__header__ attribute for {:s} due to missing .Header attribute for value {:s}".format(self.classname(), res.typename()))
# Now we can finally allocate our instance
res = super(PDU, self).alloc(**fields)
# If there is currently no '__header__' attribute, then explicitly assign one
if not hasattr(res['value'], '__header__'):
res['value'].__header__ = res['header'].item('value')
return res
### DomainMCSPDU definitions
@DomainMCSPDU.define
class PlumbDomainIndication(pstruct.type):
type = 0
_fields_ = [
(dyn.clone(ber.INTEGER, length=2), 'heightLimit'),
]
def summary(self):
return "heightLimit={:d}".format(self['heightLimit'].int())
@DomainMCSPDU.define
class ErectDomainRequest(pstruct.type):
type = 1
_fields_ = [
(per.INTEGER, 'subHeight'),
(per.INTEGER, 'subInterval'),
]
def summary(self):
return "subHeight={:s} subInterval={:s}".format(self['subHeight'].summary(), self['subInterval'].summary())
@DomainMCSPDU.define
class DisconnectProviderUltimatum(ptype.undefined):
type = 8
Header = Reason
def __getitem__(self, name):
if name.lower() == 'reason':
return self.__header__
raise KeyError(name)
def summary(self):
return "reference(reason)={:s}".format(self['reason'].summary())
def set(self, **fields):
if 'reason' in fields:
self['reason'].set(fields.pop('reason'))
return super(DisconnectProviderUltimatum, self).set(**fields)
def details(self):
return "[{:x}] <reference {:s} 'reason'> {:s}".format(self.getoffset(), self['reason'].classname(), self['reason'].summary()) + '\n'
def repr(self):
return self.details()
class Diagnostic(pbinary.enum):
length, _values_ = 4, [
('dc-inconsistent-merge', 0),
('dc-forbidden-PDU-downward', 1),
('dc-forbidden-PDU-upward', 2),
('dc-invalid-BER-encoding', 3),
('dc-invalid-PER-encoding', 4),
('dc-misrouted-user', 5),
('dc-unrequested-confirm', 6),
('dc-wrong-transport-priority', 7),
('dc-channel-id-conflict', 8),
('dc-token-id-conflict', 9),
('dc-not-user-id-channel', 10),
('dc-too-many-channels', 11),
('dc-too-many-tokens', 12),
('dc-too-many-users', 13),
]
@DomainMCSPDU.define
class RejectMCSPDUUltimatum(pstruct.type):
type = 9
Header = Diagnostic
_fields_ = [
(gcc.LengthDeterminant, 'length'),
(lambda self: dyn.clone(ber.OCTET_STRING, length=self['length'].li.int()), 'initialOctets'),
]
def __field__(self, name):
if name.lower() == 'diagnostic':
return self.__header__
return super(RejectMCSPDUUltimatum, self).__field__(name)
def summary(self):
return "reference(diagnostic)={:s} initialOctets={:s}".format(self['diagnostic'].summary(), self['initialOctets'].summary())
@DomainMCSPDU.define
class AttachUserRequest(ptype.undefined):
type = 10
@DomainMCSPDU.define
class AttachUserConfirm(pstruct.type):
type = 11
class Header(pbinary.flags):
_fields_ = [
(1, 'initiatorQ'),
(Result, 'result'),
]
def __initiator(self):
res = self.__header__
return gcc.UserId if res['initiatorQ'] else dyn.clone(gcc.UserId, length=0)
_fields_ = [
(__initiator, 'initiator'),
]
def __field__(self, name):
if name.lower() == 'result':
return self.__header__.item('result')
return super(AttachUserConfirm, self).__field__(name)
def summary(self):
if self.__header__['initiatorQ']:
return "reference(result)={:s} initiator={:s}".format(self['result'].summary(), self['initiator'].summary())
return "reference(result)={:s}".format(self['result'].summary())
@DomainMCSPDU.define
class DetachUserRequest(pstruct.type):
type = 12
Header = Reason
_fields_ = [
(gcc.LengthDeterminant, 'count'),
(lambda self: dyn.array(gcc.UserId, self['count'].li.int()), 'userIds'),
]
def __field__(self, name):
if name.lower() == 'reason':
return self.__header__
return super(DetachUserRequest, self).__field__(name)
def summary(self):
res = self['userIds']
return "reference(reason)={:s} userIds=[{:s}]".format(self['reason'].summary(), ', '.join(item.summary() for item in res))
def alloc(self, **fields):
res = super(DetachUserRequest, self).alloc(**fields)
return res if 'count' in fields else res.set(count=len(res['userIds']))
def set(self, **fields):
if 'reason' in fields:
self['reason'].set(fields.pop('reason'))
return super(DetachUserRequest, self).set(**fields)
@DomainMCSPDU.define
class DetachUserIndication(DetachUserRequest):
type = 13
@DomainMCSPDU.define
class ChannelJoinRequest(pstruct.type):
type = 14
_fields_ = [
(gcc.UserId, 'initiator'),
(gcc.ChannelId, 'channelId'),
]
def summary(self):
return "initiator={:s} channelId={:s}".format(self['initiator'].summary(), self['channelId'].summary())
@DomainMCSPDU.define
class ChannelJoinConfirm(pstruct.type):
type = 15
class Header(pbinary.flags):
_fields_ = [
(1, 'channelIdQ'),
(Result, 'result'),
]
def __channelId(self):
res = self.__header__
return gcc.ChannelId if res['channelIdQ'] else dyn.clone(gcc.ChannelId, length=0)
_fields_ = [
(gcc.UserId, 'initiator'),
(gcc.ChannelId, 'requested'),
(__channelId, 'channelId'),
]
def __field__(self, name):
if name.lower() == 'result':
return self.__header__.item('result')
return super(ChannelJoinConfirm, self).__field__(name)
def summary(self):
if self.__header__['channelIdQ']:
return "reference(result)={:s} initiator={:s} requested={:s} channelId={:s}".format(self['result'].summary(), self['initiator'].summary(), self['requested'].summary(), self['channelId'].summary())
return "reference(result)={:s} initiator={:s} requested={:s}".format(self['result'].summary(), self['initiator'].summary(), self['requested'].summary())
@DomainMCSPDU.define
class ChannelLeaveRequest(pstruct.type):
type = 16
_fields_ = [
(gcc.LengthDeterminant, 'count'),
(lambda self: dyn.array(gcc.ChannelId, self['count'].li.int()), 'channelIds'),
]
def alloc(self, **fields):
res = super(ChannelLeaveRequest, self).alloc(**fields)
return res if 'count' in fields else res.set(count=len(res['channelIds']))
def summary(self):
return "({:d}) [{:s}]".format(self['count'].int(), ', '.join(ch.summary() for ch in self['channelIds']))
@DomainMCSPDU.define
class ChannelConveneRequest(pstruct.type):
type = 17
_fields_ = [
(gcc.UserId, 'initiator'),
]
def summary(self):
return "initiator={:s}".format(self['initiator'].summary())
@DomainMCSPDU.define
class ChannelDisbandRequest(pstruct.type):
type = 19
_fields_ = [
(gcc.UserId, 'initiator'),
(gcc.ChannelId, 'channelId'),
]
def summary(self):
return "initiator={:s} channelId={:s}".format(self['initiator'].summary(), self['channelId'].summary())
@DomainMCSPDU.define
class ChannelDisbandIndication(pstruct.type):
type = 20
_fields_ = [
(gcc.ChannelId, 'channelId'),
]
def summary(self):
return "channelId={:s}".format(self['channelId'].summary())
class DataPriority(pbinary.enum):
length, _values_ = 2, [
('top', 0),
('high', 1),
('medium', 2),
('low', 3),
]
class Segmentation(pbinary.integer):
def blockbits(self):
return 2
class SendDataPDU(pstruct.type):
'''
Microsoft's RDP implementation handles each of the available Send-
Data types (SendDataRequest, SendDataIndication, UniformSendDataRequest, and
UniformSendDataIndication) with the same handler since they have the exact
same structure. Due to this, we implement all of them via this definition
and use it as a base-class when assigning each one individually so that we
can test against all of them via a single individual type.
'''
@pbinary.bigendian
class _priority_segmentation(pbinary.struct):
_fields_ = [
(DataPriority, 'dataPriority'),
(Segmentation, 'segmentation'),
]
class _length_userData(pstruct.type):
_fields_ = [
(gcc.LengthDeterminant, 'length'),
(lambda self: dyn.block(self['length'].li.int()), 'data'),
]
_fields_ = [
(gcc.UserId, 'initiator'),
(gcc.ChannelId, 'channelId'),
(_priority_segmentation, 'dataAttributes'),
(_length_userData, 'userData'),
]
@DomainMCSPDU.define
class SendDataRequest(SendDataPDU):
type = 25
@DomainMCSPDU.define
class SendDataIndication(SendDataPDU):
type = 26
@DomainMCSPDU.define
class UniformSendDataRequest(SendDataRequest):
type = 27
@DomainMCSPDU.define
class UniformSendDataIndication(SendDataIndication):
type = 28
| arizvisa/syringe | template/protocol/mcs.py | mcs.py | py | 16,313 | python | en | code | 35 | github-code | 13 |
9923084643 | import plotly
from plotly.graph_objs import Scatter, Layout
x = []
y = []
with open('/tmp/mem_dump.txt',"r") as memDump:
for line in memDump:
yx = line.split(" ")
if yx[0] == '#':
continue
else:
x.append(yx[1])
y.append(yx[0])
with open('/tmp/cpu_dump.txt',"r") as memDump:
for line in memDump:
yx = line.split(" ")
if yx[0] == '#':
continue
else:
x.append(yx[1])
y.append(yx[0])
data = [
Scatter(
x=x,
y=y,
# error_y=dict(
# type='data',
# array=[1, 2, 3],
# visible=True
# )
)
]
plotly.offline.plot({
"data": data,
"layout": Layout(title="RAM usage")
})
# import plotly.plotly as py
# import plotly.graph_objs as go
#
#
# py.offline.iplot(data, filename='basic-error-bar')
| fd-rey/TFG | python/plot.py | plot.py | py | 896 | python | en | code | 0 | github-code | 13 |
44407838131 | # -*- coding: utf-8 -*-
"""
Created on Thu Dec 28 17:29:11 2017
@author: subhy
Functions to help define ABCs (abstract base classes) from a template.
"""
__all__ = [
'typename', 'ABCauto',
'get_abstracts', 'subclass_hook', 'subclass_hook_nosub',
'check_methods', 'check_attributes', 'check_properties',
]
import abc
import types
from typing import Any, Callable, List, Tuple, Union
CheckResult = Union[bool, type(NotImplemented)]
Checker = Callable[[type, str], CheckResult]
PROP_TYPES = (property, types.MemberDescriptorType)
def typename(inst: Any) -> str:
"""String of name of type"""
return type(inst).__name__
def supername(cls: type, base: type = object) -> str:
"""String of name of superclass
Searches for first subclass of `base` in `cls.__mro__` other than `cls`.
raises `ValueError` if not found.
"""
for scls in cls.__mro__:
if scls is not cls and issubclass(scls, base):
return scls.__name__
raise ValueError(f"{base.__name__} is not a superclass of {cls.__name__}")
# =============================================================================
# Type check utilities
# =============================================================================
def _check_dict(the_class: type, method: str) -> CheckResult:
"""Check if method is in class dictionary.
"""
if method in the_class.__dict__:
if the_class.__dict__[method] is None:
return NotImplemented
return True
return False
def _check_annotations(the_class: type, prop: str) -> CheckResult:
"""Check if attribute is in class annotations.
"""
return prop in getattr(the_class, '__annotations__', {})
def _check_property(the_class: type, prop: str) -> CheckResult:
"""Check if prop is in class dictionary (as a property) or annotation.
"""
is_ok = _check_dict(the_class, prop)
if is_ok is NotImplemented:
return NotImplemented
if is_ok:
return isinstance(the_class.__dict__[prop], PROP_TYPES)
return _check_annotations(the_class, prop)
def _check_generic(the_cls: type, check: Checker, *methods: str) -> CheckResult:
"""Check class for methods
"""
mro = the_cls.__mro__
for method in methods:
for super_class in mro:
is_ok = check(super_class, method)
if is_ok is NotImplemented:
return NotImplemented
if is_ok:
break
else:
return NotImplemented
return True
def check_methods(the_class: type, *methods: str) -> CheckResult:
"""Check if methods are in class dictionary.
"""
return _check_generic(the_class, _check_dict, *methods)
def check_attributes(the_class: type, *properties: str) -> CheckResult:
"""Check if attributes are in class annotations.
"""
return _check_generic(the_class, _check_annotations, *properties)
def check_properties(the_class: type, *properties: str) -> CheckResult:
"""Check if properties are in class dictionary (as property) or annotations
"""
return _check_generic(the_class, _check_property, *properties)
def get_abstracts(the_class: type) -> Tuple[List[str], ...]:
"""Get names of abstract methods and properties
"""
abstracts = getattr(the_class, '__abstractmethods__', set())
methods, properties = [], []
for abt in abstracts:
if isinstance(getattr(the_class, abt, None), property):
properties.append(abt)
else:
methods.append(abt)
return methods, properties
def subclass_hook(cls: type, subcls: type) -> CheckResult:
"""Inheritable implementation of __subclasshook__.
Use in `__subclasshook__(cls, subcls)` as
`return subclass_hook(cls, subcls)`
"""
methods, properties = get_abstracts(cls)
is_ok = check_methods(subcls, *methods)
if is_ok is not True:
return is_ok
return check_properties(subcls, *properties)
def subclass_hook_nosub(mycls: type, cls: type, subcls: type) -> CheckResult:
"""Non-inheritable implementation of __subclasshook__.
Use in `__subclasshook__(cls, subcls)` as
`return subclass_hook_nosub(MyClass, cls, subcls)`
"""
if cls is mycls:
return subclass_hook(cls, subcls)
return NotImplemented
# =============================================================================
# ABC mixin with __subclasshook__
# =============================================================================
class ABCauto(abc.ABC):
"""Base class for ABCs with automatic subclass check for abstract methods.
"""
@classmethod
def __subclasshook__(cls, subcls):
return subclass_hook(cls, subcls)
def __init_subclass__(cls, typecheckonly: bool = False):
if not typecheckonly:
supname = supername(cls, ABCauto)
raise TypeError(f'{supname} should not be used as a superclass.'
' It is meant for instance/subclass checks only.')
| subhylahiri/sl_py_tools | abc_tricks.py | abc_tricks.py | py | 4,994 | python | en | code | 1 | github-code | 13 |
664484024 | # Manny Pagan
# Sept 24th Python Course
# Assignment 5
# Due: Oct 10th
user_input = input("What calculation would you like to do? (add, sub, mult, div)")
prompt_one = int(input("What is number 1?"))
prompt_two = int(input("What is number 2?"))
def problem_4_calculator():
if "add" in user_input:
print(prompt_one + prompt_two)
elif "sub" in user_input:
print(prompt_one - prompt_two)
elif "mult" in user_input:
print(prompt_one * prompt_two)
elif "div" in user_input:
print(prompt_one / prompt_two)
else:
print("Try typing add, sub, mult or div in the first prompt. Spelling Matters.")
problem_4_calculator()
| manuelpagan/assignment4 | problem4.py | problem4.py | py | 673 | python | en | code | 0 | github-code | 13 |
26384313770 | from __future__ import division, print_function, absolute_import
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("./data/", one_hot=True)
# Training Parameters
learning_rate = 0.001
num_steps = 200
batch_size = 128
display_step = 10
# Network Parameters
# dropout 的筆記在 cnn.py
num_input = 784
num_classes = 10
dropout = 0.75
# Graph input
X = tf.placeholder(tf.float32, [None, num_input])
Y = tf.placeholder(tf.float32, [None, num_classes])
keep_prob = tf.placeholder(tf.float32) # 用 float32 存 dropout (keep probability)
# Wrapper, 把 conv2d() 包住、加上 bias 和經過 ReLU
# strides 是 「步長」,每次 filter 要走多少
def conv2d(x, W, b, strides=1):
# Conv2D wrapper, with bias and relu activation
x = tf.nn.conv2d(x, W, strides=[1, strides, strides, 1], padding='SAME')
x = tf.nn.bias_add(x, b)
return tf.nn.relu(x)
# Wrapper, 其中 ksize 是 filter 的長寬 k x k,至於頭尾的 1:
# 假設一個batch = 200筆,我們不會在 15 個 batchs 之間取 max pooling, 所以設為 1
# 假設channel = 3, RGB, 我們不會在三色之間操作(只會對各色分別取 max pooling), 所以設 1
# Hint: 盡量能讓 2 整除
def maxpool2d(x, k=2):
# MaxPool2D wrapper
return tf.nn.max_pool(x, ksize=[1, k, k, 1], strides=[1, k, k, 1],
padding='SAME')
def conv_net(x, weights, biases, dropout):
# 1-D vector of 784 features (28*28 pixels, 1 channel = gray color)
# 讓 Tensor input 變成 4-D: [Batch Size, Height, Width, Channel]
# 其中 Batch Size = -1 ,自適應
x = tf.reshape(x, shape=[-1, 28, 28, 1])
### Layer 1 - 28*28*1 to 14*14*32 ( 28->14 是因為 maxpool2d() )
# Convolution Layer
conv1 = conv2d(x, weights['wc1'], biases['bc1'])
# Max Pooling (down-sampling)
conv1 = maxpool2d(conv1, k=2)
### Layer 2 - 14*14*32 to 7*7*64 ( 14->7 是因為 maxpool2d() )
conv2 = conv2d(conv1, weights['wc2'], biases['bc2'])
conv2 = maxpool2d(conv2, k=2)
# Fully connected layer - 7*7*64 to 1024
# Reshape conv2 output to fit fully connected layer input
# 從 conv2 output 變回一般 fc 的 neurons 數
fc1 = tf.reshape(conv2, [-1, weights['wd1'].get_shape().as_list()[0]])
fc1 = tf.add(tf.matmul(fc1, weights['wd1']), biases['bd1'])
fc1 = tf.nn.relu(fc1)
# Apply Dropout
fc1 = tf.nn.dropout(fc1, dropout)
# Output, class prediction
out = tf.add(tf.matmul(fc1, weights['out']), biases['out'])
return out
# Store layers weight & bias
weights = {
# 5x5 conv, 1 input(就是灰色), 32 outputs(自訂的)
'wc1': tf.Variable(tf.random_normal([5, 5, 1, 32])),
# create 64 filters, where each filter is a depth of 32
# 5x5 conv, 32 inputs, 64 outputs
'wc2': tf.Variable(tf.random_normal([5, 5, 32, 64])),
# fully connected, 7*7*64 inputs, 1024 outputs
'wd1': tf.Variable(tf.random_normal([7*7*64, 1024])),
# 1024 inputs, 10 outputs (class prediction)
'out': tf.Variable(tf.random_normal([1024, num_classes]))
}
biases = {
'bc1': tf.Variable(tf.random_normal([32])),
'bc2': tf.Variable(tf.random_normal([64])),
'bd1': tf.Variable(tf.random_normal([1024])),
'out': tf.Variable(tf.random_normal([num_classes]))
}
logits = conv_net(X, weights, biases, keep_prob)
prediction = tf.nn.softmax(logits)
loss_op = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(
logits=logits, labels=Y))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
train_op = optimizer.minimize(loss_op)
correct_pred = tf.equal(tf.argmax(prediction, 1), tf.argmax(Y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
init = tf.global_variables_initializer()
# Start training
with tf.Session() as sess:
sess.run(init)
for step in range(1, num_steps+1):
batch_x, batch_y = mnist.train.next_batch(batch_size)
# 這裡要dropout, 令 keep_prob=0.8
sess.run(train_op, feed_dict={X: batch_x, Y: batch_y, keep_prob: 0.8})
if step % display_step == 0 or step == 1:
loss, acc = sess.run([loss_op, accuracy], feed_dict={X: batch_x,
Y: batch_y,
keep_prob: 1.0})
print("Step " + str(step) + ", Minibatch Loss= " + \
"{:.4f}".format(loss) + ", Training Accuracy= " + \
"{:.3f}".format(acc))
print("Optimization Finished!")
# Calculate accuracy for 256 MNIST test images
# 取出 256 張做 testing,這裡不用 dropout
print("Testing Accuracy:", \
sess.run(accuracy, feed_dict={X: mnist.test.images[:256],
Y: mnist.test.labels[:256],
keep_prob: 1.0}))
| AdrianHsu/tensorflow-basic-models | convolutional_network_raw.py | convolutional_network_raw.py | py | 4,917 | python | en | code | 0 | github-code | 13 |
13522692096 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys, time, json
import gym
import numpy as np
import tensorflow as tf
from easy_rl.agents import agents
from easy_rl.models import DQNModel
from easy_rl.utils.window_stat import WindowStat
FLAGS = tf.flags.FLAGS
tf.flags.DEFINE_string("ps_hosts", "", "ps_hosts")
tf.flags.DEFINE_string("memory_hosts", "", "memory_hosts")
tf.flags.DEFINE_string("actor_hosts", "", "actor_hosts")
tf.flags.DEFINE_string("learner_hosts", "", "learn_hosts")
tf.flags.DEFINE_string("job_name", "", "job_name")
tf.flags.DEFINE_integer("task_index", -1, "task_index")
tf.flags.DEFINE_string("checkpoint_dir", "", "checkpoint_dir")
tf.flags.DEFINE_string("config", "", "path of the configuration")
# set num_atoms > 1 if use Distributional-Dqn
num_atoms = 11
class MyDQN(DQNModel):
def _encode_obs(self, input_obs, scope="encode_obs"):
with tf.variable_scope(name_or_scope=scope):
h1 = tf.layers.dense(
input_obs,
units=64,
activation=tf.nn.relu,
kernel_initializer=tf.random_normal_initializer(
mean=0.0, stddev=0.01, seed=0))
h2 = tf.layers.dense(
h1,
units=64,
activation=tf.nn.relu,
kernel_initializer=tf.random_normal_initializer(
mean=0.0, stddev=0.01, seed=0))
logits = tf.layers.dense(
h2,
units=2 * num_atoms,
activation=None,
kernel_initializer=tf.random_normal_initializer(
mean=0.0, stddev=0.01, seed=0))
return logits
np.random.seed(0)
def main(_):
with open(FLAGS.config, 'r') as ips:
config = json.load(ips)
print(config)
env = gym.make("CartPole-v0")
env.seed(0)
agent_class = agents[config["agent"]["type"]]
agent = agent_class(
env.observation_space,
env.action_space,
config["agent"],
config["model"],
distributed_spec={
"ps_hosts": FLAGS.ps_hosts,
"memory_hosts": FLAGS.memory_hosts,
"actor_hosts": FLAGS.actor_hosts,
"learner_hosts": FLAGS.learner_hosts,
"job_name": FLAGS.job_name,
"task_index": FLAGS.task_index
},
custom_model=MyDQN)
if FLAGS.job_name == "ps":
print("ps starts===>")
agent.join()
elif FLAGS.job_name == "memory":
print("memory starts===>")
while not agent.should_stop():
agent.communicate()
sys.stdout.flush()
elif FLAGS.job_name == "actor":
print("actor starts===>")
reward_window = WindowStat("reward", 50)
length_window = WindowStat("length", 50)
obs, actions, rewards, new_obs, dones = list(), list(), list(), list(
), list()
agent.sync_vars()
while not agent.should_stop():
ob = env.reset()
done = False
episode_reward = .0
episode_len = 0
while not done and not agent.should_stop():
action, results = agent.act([ob], False)
new_ob, reward, done, info = env.step(action[0])
obs.append(ob)
actions.append(action[0])
rewards.append(reward)
new_obs.append(new_ob)
dones.append(done)
if agent.ready_to_send:
agent.send_experience(
obs=obs,
actions=actions,
rewards=rewards,
next_obs=new_obs,
dones=dones)
agent.sync_vars()
ob = new_ob
episode_reward += reward
episode_len += 1
reward_window.push(episode_reward)
length_window.push(episode_len)
print(reward_window)
print(length_window)
sys.stdout.flush()
elif FLAGS.job_name == "learner":
print("learner starts===>")
while not agent.should_stop():
batch_data = agent.receive_experience()
if batch_data:
extra_data = agent.learn(batch_data)
print("learning {}".format(extra_data))
sys.stdout.flush()
else:
raise ValueError("Invalid job_name.")
print("done.")
if __name__ == "__main__":
tf.app.run()
| alibaba/EasyReinforcementLearning | demo/run_apex_agent_on_cartpole.py | run_apex_agent_on_cartpole.py | py | 4,571 | python | en | code | 188 | github-code | 13 |
72093792659 | import calculate_scores
import objects
import get_grand_prix_names
import load_config
def calculate_running_totals(current_year, display_breakdown = True):
grand_prix_names = get_grand_prix_names.get_grand_prix_names(active_year = current_year)
predictor_totals = objects.predictor_totals.PredictorTotals()
for grand_prix_name in grand_prix_names:
if display_breakdown == True:
print("\n" + grand_prix_name + " GP")
calculation_scores = calculate_scores.calculate_race_score(grand_prix_name, current_year)
race_totals = objects.predictor_totals.PredictorTotals()
race_totals = calculate_scores.calculate_totals(race_totals, calculation_scores, False)
if display_breakdown == True:
calculate_scores.display_totals(race_totals)
for race_total in race_totals:
predictor_totals.add_or_update_predictor_total_points(race_total)
if display_breakdown == True:
print("\nGrand Total")
for predictor_total in sorted(predictor_totals, key = lambda x: x.points, reverse = True):
print(str(predictor_total.predictor) + "\t" + str(predictor_total.points) + " points")
if __name__== "__main__":
config = load_config.read_config()
calculate_running_totals(config.current_year)
| JamesScanlan/f1ftw | f1ftw/calculate_running_totals.py | calculate_running_totals.py | py | 1,323 | python | en | code | 0 | github-code | 13 |
10816186254 | # type hints
# specify the data type to be fixed (avoid dynamic typing)
age: int
name: str
height: float
is_human: bool
# : fixed the input data type
# -> fixed the output data type
def police_check(age: int) -> bool:
if age > 18:
can_drive = True
else:
can_drive = False
return can_drive
# the : helps to find out, before execute pgm, if input has type error with parm, the input will blink to notify
# police_check("twelve")
print(police_check(12))
| WOOAK/udemy-100-days-of-code-Python | Day 34/main.py | main.py | py | 484 | python | en | code | 0 | github-code | 13 |
21501081369 | import inspect
import os
import vcr
def path_generator(function):
func_dir = os.path.dirname(inspect.getfile(function))
file_name = '{}.yml'.format(function.__name__)
return os.path.join(func_dir, 'mocks', file_name)
replay = vcr.VCR(func_path_generator=path_generator)
dir_path = os.path.dirname(os.path.abspath(__file__))
CA_CERT_PATH = f"{dir_path}/certs/serverCA.crt"
CLIENT_CERT_PATH = f"{dir_path}/certs/client.crt"
CLIENT_KEY_PATH = f"{dir_path}/certs/client.key"
SERVER_HOST = "server.127.0.0.1.nip.io"
SERVER_PORT = 4433 | new69/mtls | test/test_setup.py | test_setup.py | py | 546 | python | en | code | 0 | github-code | 13 |
9087016503 | # standard python modules
import numpy as np
# plotting utilities
import matplotlib.pyplot as plt;import matplotlib as mpl;import matplotlib.cm as cm;import matplotlib.colors as colors;from matplotlib import rc
majortickwidth,minortickwidth,fontsize = 1.5,0.75,10
majortickwidth,minortickwidth,fontsize = 1.0,0.5,10
cmap = mpl.cm.inferno # set a default perceptually-uniform colourmap
rc('font',**{'family':'serif','serif':['Times']})
rc('text', usetex=True)
mpl.rcParams['font.weight'] = 'medium'
mpl.rcParams['font.size'] = fontsize
mpl.rcParams['axes.linewidth'] = majortickwidth
for axname in ['xtick','ytick']:
mpl.rcParams['{}.labelsize'.format(axname)] = fontsize
mpl.rcParams['{}.major.width'.format(axname)] = majortickwidth
mpl.rcParams['{}.minor.width'.format(axname)] = minortickwidth
mpl.rcParams['{}.minor.visible'.format(axname)] = True
from scipy.optimize import curve_fit
# reading import
import h5py
# exptool imports
from exptool.io import outcoef
from exptool.utils import halo_methods
from exptool.basis.compatibility import r_to_xi
# solution to the wave equation
def wave_function(t,A0,AS,gamma):
return A0*np.exp(2*t*gamma) + AS*(1-np.exp(2*t*gamma))
def make_norm(Coefs,power=True):
tnum,lnum,nnum = Coefs.shape
res = np.zeros(tnum)
for nval in range(0,nnum):
res += Coefs[:,0,nval]**2
if power:
return res
else:
return np.sqrt(res)
def make_total_l1(Coefs,power=True):
tnum,lnum,nnum = Coefs.shape
res = np.zeros([tnum,nnum])
for lval in [1,2,3]:
res += Coefs[:,lval,:]**2
if power:
return res
else:
return np.sqrt(res)
def mode_shape1(rarr,p0,d0,eftable,evtable,O1,tval=0,lindx=8):
pot = np.zeros_like(rarr)
dens = np.zeros_like(rarr)
lmax,nmax = evtable.shape
lmax-=1
l = 1
nmin=0
nmax=12
for rval in range(0,rarr.size):
for n in range(nmin,nmax):
pot[rval] += (p0*eftable[l][n])[rval]/np.sqrt(evtable[l][n])*O1.coefs[tval,lindx,n]
dens[rval] += (d0*eftable[l][n])[rval]*np.sqrt(evtable[l][n])*O1.coefs[tval,lindx,n]
# overarching sign is negative
return -pot,dens
def make_single_l1(Coefs,nmin=0,nmax=1000,power=True):
tnum,lnum,nnum = Coefs.shape
res = np.zeros(tnum)
for lval in [1,2,3]:
for nval in range(nmin,np.nanmin([nnum,nmax])):
res += Coefs[:,lval,nval]**2
if power:
return res
else:
return np.sqrt(res)
# load the EXP cache
datadir = 'data/figure3/'
sph_file = datadir+'SLGridSph.cache.run1a.6.24'
model_file = datadir+'SLGridSph.cluttonbrock'
lmax,nmax,numr,cmap,rmin,rmax,scale,ltable,evtable,eftable = halo_methods.read_cached_table(sph_file,verbose=0,retall=True)
xi,rarr,p0,d0 = halo_methods.init_table(model_file,numr,rmin,rmax,cmap=cmap,scale=scale)
datadir = 'data/figure4/'
compname = 'plummer'
# make the l=1 figure
fig = plt.figure(figsize=(7.5,2.6),facecolor='white')
fig = plt.gcf()
xmin = 0.08
ymin = 0.165
dx = 0.34
dy = 0.78
xbuf = 0.13
ax1 = fig.add_axes([xmin+0*dx ,ymin+0*dy,dx,dy],facecolor='None')
ax2 = fig.add_axes([xmin+1*dx+xbuf ,ymin+0*dy,dx,dy])
axlist = [ax1]
ztime = 200
cmin,cmax = 0.75, 1.05
for r,nrun in enumerate([2]):
Cavg = np.zeros(401)
Davg = np.zeros([3,rarr.size])
tags = ['a','b','c','d','e','f']
for n,tag in enumerate(tags):
runtag = 'run'+str(nrun)+tag
O1 = outcoef.OutCoef(datadir+'outcoef.'+compname+'.'+runtag)#+'.6.24')
O1.coefs[:,1:,11:] = 0.
N = make_norm(O1.coefs)
#C = make_single_l2(O1.coefs)
C = make_single_l1(O1.coefs,nmin=0,nmax=1)
Cavg += C/N/6.
ax1.plot((O1.T),np.log10(((C/N))),color='black',lw=0.5)
# go through and try a couple different times
for ittest,ttest in enumerate([100,200,300]):
maxmode = np.zeros(3)
for lindx in [1,2,3]:
maxmode[lindx-1] = np.nanmax(np.abs(mode_shape1(rarr,p0,d0,eftable,evtable,O1,\
tval=ttest,lindx=lindx)[1]))
pshape,dshape = mode_shape1(rarr,p0,d0,eftable,evtable,O1,tval=ttest,lindx=np.nanargmax(maxmode)+1)
dshapemax = np.nanargmax(np.abs(dshape))
dshape /= dshape[dshapemax]
Davg[ittest] += dshape/6.
# vertical line for when the measurement takes place
if ittest==0:
ax1.plot([ttest,ttest],[-6.,-2.5],color=cm.viridis(ttest*0.003,1.),lw=1.0,linestyle='dashed')
else:
ax1.plot([ttest,ttest],[-4.5,-1.],color=cm.viridis(ttest*0.003,1.),lw=1.0,linestyle='dashed')
try:
meanS += np.abs(pshape)/len(tags)
except:
meanS = np.abs(pshape)/len(tags)
ax1.plot(O1.T,np.log10(Cavg),color='black',lw=2.)
# fit the proper linear space
initial_guesses = [1.e-6,1.e-3,(-0.005)]
# force a lower bound of A0 as 1.e-6: fit converges to this value
params, covariance = curve_fit(wave_function, O1.T,Cavg,p0=initial_guesses,bounds=[(1.e-6,1.e-4,-0.1),(0.1,0.1,-1.e-4)])
A0,AS,gamma = params
print('Fitted parameters,',A0,AS,gamma)
wvval = np.log10(wave_function(O1.T,A0,AS,gamma))
ax1.plot(O1.T,wvval,color='grey',lw=2.,linestyle='dashed')
for ittest,ttest in enumerate([100,200,300]):
ax2.plot(rarr,Davg[ittest]/np.nanmax(Davg[ittest]),color=cm.viridis(ttest*0.003,1.),lw=2.)
def plummer_density(radius,scale_radius=1.0,mass=1.0,astronomicalG=1.0):
"""basic plummer density profile"""
return ((3.0*mass)/(4*np.pi))*(scale_radius**2.)*((scale_radius**2 + radius**2)**(-2.5))
def drhoPlummer(r,bc,G,M,da=1.e-5):
"""finite difference to get drho/dr"""
return (plummer_density(r+da)-plummer_density(r-da))/(2*da)
drho = drhoPlummer(rarr,1.,1.,1.)
ax2.plot(rarr,np.abs(drho/np.nanmax(np.abs(drho))),color='grey',linestyle='dashed',lw=1.)
ax2.plot([0.4,0.5],[0.82,0.82],color='grey',linestyle='dashed',lw=1.,transform=ax2.transAxes)
ax2.text(0.51,0.82,'predicted mode shape,\n${\\rm d}\\rho/{\\rm d}r$',color='grey',ha='left',va='center',transform=ax2.transAxes,size=8)
ax1.plot([0.4,0.5],[0.18,0.18],color='grey',linestyle='dashed',lw=1.,transform=ax1.transAxes)
ax1.text(0.51,0.18,'wave kinetic equation fit,\n$\\tilde{E}_{\\rm init}=1.0\\times10^{-6}$\n$\\tilde{E}_{\\rm th}=8.0\\times10^{-3}$\n$\\gamma_{\\rm M}/\\Omega_{0}=-0.001$',color='grey',ha='left',va='center',transform=ax1.transAxes,size=8)
ax1.tick_params(axis="both",direction="in",which="both")
ax1.set_xlabel('time (virial units)')
ax1.axis([0,400,-6.,-2.])
ax1.set_ylabel('$E_{\ell=1}/E_{\ell=0}$')
ax1.set_ylabel('$\\tilde{E}_{1}$')
template = np.array([1])
majorvals = [0.000001,0.00001,0.0001,0.001,0.01]
ax1.set_yticks(np.log10(np.concatenate([x*template for x in majorvals])))
ax1.set_yticklabels(['$10^{-6}$','$10^{-5}$','$10^{-4}$','$10^{-3}$','$10^{-2}$'])
ax1.tick_params(which='minor',axis='y',length=0,width=0)
# add a duplicate axis for minor ticks only
ax1ghost = fig.add_axes([xmin+0*dx ,ymin+0*dy,dx,dy],facecolor='None')
ax1ghost.axis([0,400,-6.,-2.])
template = np.array([1,2,3,4,5,6,7,8,9])
majorvals = [1.e-6,1.e-5,1.e-4,1.e-3]
ax1ghost.set_yticks(np.log10(np.concatenate([x*template for x in majorvals])))
ax1ghost.set_yticklabels(())
ax1ghost.tick_params(which='major',axis='y',width=minortickwidth,length=mpl.rcParams['ytick.minor.size'])
ax1ghost.set_xticklabels(())
ax1ghost.tick_params(axis="both",direction="in",which="both")
ax1ghost.tick_params(which='minor',axis='y',length=0,width=0)
ax2.axis([0.,3.,-0.1,1.05])
ax2.tick_params(axis="both",direction="in",which="both")
ax2.set_xlabel('radius $r/b_{\\rm c}$')
ax2.set_ylabel('$\ell=1$ density shape\n(normalised)')
ax1.text(0.02,0.98,'(a) Plummer $\ell=1$',color='black',ha='left',va='top',transform=ax1.transAxes)
ax2.text(0.98,0.98,'(b)',color='black',ha='right',va='top',transform=ax2.transAxes)
plt.savefig('../figures/Figure4.png',dpi=300)
"""
convert Figure4.png Figure4.pdf
pdftops -eps -level3 Figure4.pdf Figure4.eps
rm Figure4.pdf
"""
| michael-petersen/LinearResponse-paper | scripts/P23Figure4.py | P23Figure4.py | py | 8,147 | python | en | code | 0 | github-code | 13 |
35041419639 | ## Primary Author: Mayank Mohindra <github.com/mayankmtg>
##
## Description: Main file. Contains logic to handle all commands and start the bot
##
import discord
from discord.ext import commands
from search import perform_search
from config import Config
from cache import save_search_query, find_search_history
from db import Connection
from error import ErrorMessage
Config.load()
client = discord.Client()
conn = Connection(Config.DATABASE)
# bot starts
@client.event
async def on_ready():
print ("bot is running")
# bot receives any messages
@client.event
async def on_message(command):
# Ignore string from self
if command.author == client.user:
return
# 1. Handle Greeting: 'hi' -> 'hey'
if command.content == 'hi':
await command.channel.send('hey')
return
cmd_tokens = command.content.split()
cmd_userid = command.author.id
cmd_type = cmd_tokens[0]
cmd_args = cmd_tokens[1:]
# if command does not have the right number of tokens
if len(cmd_args) < 1:
await command.channel.send(ErrorMessage.INCORRECT_COMMAND)
return
# 2. Handle Google Search command. Response contains top 5 newline separated search results
if cmd_type == '!google':
query = ' '.join(cmd_args)
if not save_search_query(conn, cmd_userid, query):
print ("ERROR")
result = perform_search(query)
await command.channel.send('\n'.join(result))
# 3. Handle Recent search command.
# Response contains all the previous search queries containing the passed substring (newline separated)
if cmd_type == '!recent':
query = ' '.join(cmd_args)
result = find_search_history(conn, cmd_userid, query)
await command.channel.send('\n'.join(result))
# Starting the bot
print (Config.DISCORD_TOKEN)
client.run(Config.DISCORD_TOKEN)
# close connection to database
conn.shutdown()
| mayankmtg/sample-discord-bot | bot.py | bot.py | py | 1,798 | python | en | code | 0 | github-code | 13 |
29575167563 | # pylint: disable=R0901, W0613, R0201
import logging.config
from django.conf import settings
from django.db import transaction
from drf_yasg2 import openapi
from drf_yasg2.utils import swagger_auto_schema
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from post.models import Post
from post.serializers import (PostManySerializer, PostPostSerializer,
PostSerializer)
from rest_api.constants import ERROR_RESPONSE_EXAMPLE, SUCCESS_RESPONSE_EXAMPLE
from rest_api.permissions import PermissionsPostsMixin
from user.choices import CLIENT
logging.config.dictConfig(settings.LOGGING)
logger = logging.getLogger('main_logger')
class PostsViewSet(PermissionsPostsMixin, ModelViewSet):
queryset = Post.objects.all()
serializer_class = PostSerializer
post_serializer_class = PostPostSerializer
http_method_names = ['get', 'post', 'patch', 'delete', 'put']
def get_serializer_class(self):
if self.action in ['create', 'partial_update']:
if hasattr(self, 'post_serializer_class'):
return self.post_serializer_class
return super().get_serializer_class()
@action(detail=False, methods=['get'])
def list(self, request, *args, **kwargs):
return super().list(request)
def retrieve(self, request, *args, **kwargs):
post = Post.objects.get(id=self.kwargs['post_id'])
serializer = PostSerializer(post, context={'request': request})
return Response(serializer.data)
@action(detail=False, methods=['delete'])
def destroy(self, request, *args, **kwargs):
post = Post.objects.get(id=self.kwargs['post_id'])
if self.request.user.id != post.user.id:
return Response('You don`t have permission to delete this post')
post.delete()
logger.info(f'Post {post} deleted')
return Response(status.HTTP_200_OK)
def perform_create(self, serializer):
serializer.save(user=self.request.user)
title = serializer.data['title']
user = self.request.user
logger.info(f'Post {title} from user {user} created')
def partial_update(self, request, *args, **kwargs):
post = Post.objects.get(id=self.kwargs['post_id'])
if self.request.user.id != post.user.id and self.request.user.user_type == CLIENT:
return Response('You don`t have permission to change this post')
serializer = PostPostSerializer(post, data=request.data, context={'request': request}, partial=True)
serializer.is_valid(raise_exception=True)
serializer.save()
logger.info(f'Post {post} updated')
return Response(serializer.data)
@swagger_auto_schema(
operation_description='Bulk update posts topic',
request_body=openapi.Schema(
description="Data to update posts",
type=openapi.TYPE_OBJECT,
properties={
"old_topic": openapi.Schema(type=openapi.TYPE_STRING),
"new_topic": openapi.Schema(type=openapi.TYPE_STRING),
}
),
responses={
status.HTTP_200_OK: SUCCESS_RESPONSE_EXAMPLE,
status.HTTP_400_BAD_REQUEST: ERROR_RESPONSE_EXAMPLE,
}
)
@transaction.atomic
@action(detail=False, methods=['put'])
def bulk_update(self, request, *args, **kwargs):
serializer = PostManySerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.data
posts = Post.objects.filter(topic=data['old_topic'])
for post in posts:
post.topic = data['new_topic']
Post.objects.bulk_update(posts, ['topic'])
return Response({"message": 'ok'})
| MikelTopKek/topnews | backend/rest_api/views/posts.py | posts.py | py | 3,839 | python | en | code | 0 | github-code | 13 |
18091116314 | import glob
import os
from time import time
from preprocessing import file_preprocessing
from utils import folders, initializing
# Ejecutar en Terminal la siguiente línea ANTES de ejecutar por primera vez:
# sudo chmod 777 /etc/ImageMagick-6/policy.xml
# Actualizar opciones de seguridad
initializing.initialize()
# Contenedor de los archivos PDF
MAIN_FOLDER = '/home/david/Documents/pdf_process'
folders_list = folders.initialize_folders(MAIN_FOLDER)
list_files = glob.glob(os.path.join(MAIN_FOLDER, "*.pdf"))
time_list = []
npage_list = []
# counter = 1
# time0 = time()
for fil in list_files[0:3]:
time0 = time()
print(f"\nArchivo {fil}, {len(npage_list) + 1}/{len(list_files)}\n")
npags = file_preprocessing.process_file(fil, folders_list)
npage_list.append(npags)
# counter = counter + 1
time1 = time()
time_list.append(round(time1 - time0, 2))
# time1 = time()
print("Tiempo ejecución\n")
print(f"{len(npage_list)}\t archivos.")
print(f"Tiempo: {round(sum(time_list), 2)}\t segundos.")
print(f"Tiempo/archivo: {round(sum(time_list) / len(npage_list), 2)}\t segundos.")
| cdcaballeroa2/pdf_info | LAST_VERSION/main_first.py | main_first.py | py | 1,110 | python | es | code | 0 | github-code | 13 |
33566202009 | import json # yahoo.
import re # unicode replace.
import math # for millify.
try: # for google stockquote.
import xml.etree.cElementTree as ElementTree
except ImportError:
import xml.etree.ElementTree as ElementTree
import datetime # futures math.
import pytz # tzconv for relativetime.
# extra supybot libs
import supybot.ircmsgs as ircmsgs
# supybot libs.
import supybot.utils as utils
from supybot.commands import *
import supybot.plugins as plugins
import supybot.ircutils as ircutils
import supybot.callbacks as callbacks
from supybot.i18n import PluginInternationalization, internationalizeDocstring
_ = PluginInternationalization('Odds')
@internationalizeDocstring
class Stock(callbacks.Plugin):
"""Display stock and financial information."""
threaded = True
def __init__(self, irc):
self.__parent = super(Stock, self)
self.__parent.__init__(irc)
def die(self):
self.__parent.die()
######################
# INTERNAL FUNCTIONS #
######################
def _colorify(self, s, perc=False):
"""Change symbol/color depending on if gain is positive or negative."""
s = s.replace('%','')
if float(s) > 0:
if perc:
s = self._green(u'▴' + s.replace('+','') + "%")
else:
s = self._green(u'▴' + s.replace('+',''))
elif float(s) < 0:
if perc:
s = self._red(u'↓' + s.replace('-','') + "%")
else:
s = self._red(u'↓' + s.replace('-',''))
return s
def _millify(self, n):
"""Display human readable numbers."""
millnames=['','k','M','B','T']
millidx=max(0,min(len(millnames)-1, int(math.floor(math.log10(abs(n))/3.0))))
return '%.1f%s'%(n/10**(3*millidx),millnames[millidx])
def _splitinput(self, txt, seps):
"""Split input depending on separators."""
default_sep = seps[0]
for sep in seps[1:]:
txt = txt.replace(sep, default_sep)
return [i.strip() for i in txt.split(default_sep)]
def _httpget(self, url, h=None, d=None):
"""General HTTP resource fetcher. Supports b64encoded urls."""
try:
if h and d:
page = utils.web.getUrl(url, headers=h, data=d)
else:
h = {"User-Agent":"Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:17.0) Gecko/20100101 Firefox/17.0"}
page = utils.web.getUrl(url, headers=h)
return page
except utils.web.Error as e:
self.log.error("I could not open {0} error: {1}".format(url, e))
return None
def _out(self, irc, msg, txt):
"""Handle output."""
# check if we should msg users for this channel
if ircmsgs.isChannel(msg.args[0]):
if self.registryValue('msgUsersOutput', msg.args[0]):# we're in a channel and True.
irc.sendMsg(ircmsgs.privmsg(msg.nick, txt)) # send text to user.
else: # setting is off or not on a channel with it on.
irc.sendMsg(ircmsgs.privmsg(msg.args[0], txt))
else: # in a privmsg.
irc.sendMsg(ircmsgs.privmsg(msg.nick, txt))
########################
# COLOR AND FORMATTING #
########################
def _red(self, string):
"""Returns a red string."""
return ircutils.mircColor(string, 'red')
def _yellow(self, string):
"""Returns a yellow string."""
return ircutils.mircColor(string, 'yellow')
def _green(self, string):
"""Returns a green string."""
return ircutils.mircColor(string, 'green')
def _teal(self, string):
"""Returns a teal string."""
return ircutils.mircColor(string, 'teal')
def _blue(self, string):
"""Returns a blue string."""
return ircutils.mircColor(string, 'blue')
def _orange(self, string):
"""Returns an orange string."""
return ircutils.mircColor(string, 'orange')
def _bold(self, string):
"""Returns a bold string."""
return ircutils.bold(string)
def _ul(self, string):
"""Returns an underline string."""
return ircutils.underline(string)
def _bu(self, string):
"""Returns a bold/underline string."""
return ircutils.bold(ircutils.underline(string))
#####################################
# GOOGLE FUNCTIONS - used ig before #
# (later if initial breaks?) Code #
# from QSB - http://bit.ly/110PRAf #
#####################################
def _XEncodeReplace(self, match_object):
"""Convert \\xnn encoded characters.
Converts \\xnn encoded characters into their Unicode equivalent.
Args:
match: A string matched by an re pattern of '\\xnn'.
Returns:
A single character string containing the Unicode equivalent character
(always within the ASCII range) if match is of the '\\xnn' pattern,
otherwise the match string unchanged.
"""
try:
char_num_string = match_object.group(1)
char_num = int(char_num_string, 16)
replacement = chr(char_num)
return replacement
except:
return match_object
def _googlequote(self, symbol):
"""This uses a JSONP-like url from Google to return a stock Quote. (From QSB)"""
# build url.
url = 'https://www.google.com/finance/info?infotype=infoquoteall&q='
url += utils.web.urlquote(symbol)
# process "JSON".
html = self._httpget(url)
if not html:
self.log.error("_googlequote: Failed on Google Quote for {0}".format(symbol))
return None
# \xnn problems.
pattern = re.compile('\\\\x(\d{2})')
# dict container for output.
e = {}
# iterate over each line. have to split on \n because html = str object.
for line in html.split('\n'): # splitlines() ?
line = line.rstrip('\n')
line_parts = line.split(': ')
if len(line_parts) == 2:
key, value = line_parts # list back to string.
key = key.replace(',"', '').replace('"','').strip() # clean up key.
value = value.replace('"', '') # clean up value.
value = pattern.sub(self._XEncodeReplace, value) # Perform the \xnn replacements here.
if (key and value and value != ""): # if we still have, inject.
e[key] = value
# with dict above, we construct a string conditionally.
# {'c': '-6.77', 'ccol': 'chr', 'e': 'INDEXSP', 'name': 'S&P 500', 'lo': '1,755.72', 'lo52': '1,343.35',
# 'l': '1,756.54', 'vo': '624.29M', 's': '0', 'lt': 'Oct 31, 4:30PM EDT', 't': '.INX', 'hi52': '1,775.22',
# 'hi': '1,768.53', 'cp': '-0.38', 'type': 'Company', 'id': '626307', 'l_cur': '1,756.54', 'op': '1,763.24'}
# {'el': '22.56', 'eccol': 'chr', 'ec': '-0.01', 'vo': '33.24M', 'eps': '1.86', 'inst_own': '75%', 'cp': '0.02',
# CSCO
# 'id': '99624', 'hi52': '26.49', 'lo': '22.40', 'yld': '3.01', 'shares': '5.38B', 'avvo': '39.16M',
# 'lt': 'Nov 1, 4:00PM EDT', 'pe': '12.13', 'type': 'Company', 'elt': 'Nov 1, 4:43PM EDT',
# 'beta': '1.26', 'hi': '22.68', 'ecp': '-0.02', 'l_cur': '22.56', 'c': '+0.01', 'e': 'NASDAQ',
# 'name': 'Cisco Systems, Inc.', 'mc': '121.50B', 'lo52': '16.68', 'l': '22.57', 's': '2', 't': 'CSCO',
# 'el_cur': '22.56', 'div': '0.17', 'ccol': 'chg', 'op': '22.66'}
#The feed can return some or all of the following
#keys:
#
# avvo * Average volume (float with multiplier, like '3.54M')
# beta * Beta (float)
# c * Amount of change while open (float)
# ccol * (unknown) (chars)
# cl Last perc. change
# cp * Change perc. while open (float)
# e * Exchange (text, like 'NASDAQ')
# ec * After hours last change from close (float)
# eccol * (unknown) (chars)
# ecp * After hours last chage perc. from close (float)
# el * After. hours last quote (float)
# el_cur * (unknown) (float)
# elt After hours last quote time (unknown)
# eo * Exchange Open (0 or 1)
# eps * Earnings per share (float)
# fwpe Forward PE ratio (float)
# hi * Price high (float)
# hi52 * 52 weeks high (float)
# id * Company id (identifying number)
# l * Last value while open (float)
# l_cur * Last value at close (like 'l')
# lo * Price low (float)
# lo52 * 52 weeks low (float)
# lt Last value date/time
# ltt Last trade time (Same as "lt" without the data)
# mc * Market cap. (float with multiplier, like '123.45B')
# name * Company name (text)
# op * Open price (float)
# pe * PE ratio (float)
# t * Ticker (text)
# type * Type (i.e. 'Company')
# vo * Volume (float with multiplier, like '3.54M')
output = "{0} ({1})".format(self._bu(e['t']), self._bold(e['name'].replace('\u0027', "'").replace('\u0026', "&")))
if 'l' in e: # bold last.
output += " last: {0}".format(self._bold(e['l']))
if ('c' in e and 'cp' in e): # color percent changes.
output += u" {0} ({1})".format(self._colorify(e['c']), self._colorify(e['cp'], perc=True))
if ('lo' in e and 'hi' in e): # bold low and high daily ranges.
output += " Daily range:({0}-{1})".format(self._bold(e['lo']),self._bold(e['hi']))
if ('lo52' in e and e['lo52'] != "0" and 'hi52' in e and e['hi52'] != "0"):
output += " Yearly range:({0}-{1})".format(self._bold(e['lo52']),self._bold(e['hi52']))
if ('vo' in e and e['vo'] != "0"): # if we have volume, millify+orange.
output += " Volume: {0}".format(self._orange(e['vo']))
if 'lt' in e: # last trade.
output += " Last trade: {0}".format(self._blue(e['lt']))
# now return the string.
return output
###########################
# GOOGLE PUBLIC FUNCTIONS #
###########################
def googlequote(self, irc, msg, args, optsymbols):
"""<symbols>
Display's a quote from Google for a stock.
Can specify multiple stocks. Separate by a space.
Ex: GOOG AAPL (max 5)
"""
# make symbols upper, split on space or ,.
symbols = self._splitinput(optsymbols.upper(), [' ', ','])
for symbol in symbols[0:5]: # max 5.
output = self._googlequote(symbol)
if output: # if we get a quote back.
self._out(irc, msg, output)
else: # something went wrong looking up quote.
self._out(irc, msg, "ERROR fetching Google quote for: {0}. Unknown symbol?".format(symbol))
googlequote = wrap(googlequote, ['text'])
def intlindices(self, irc, msg, args):
"""
Displays international market indicies from various countries outside the US.
"""
indices = ['HSI', 'SX5E', 'PX1', 'OSPTX', 'SENSEX', 'XJO', 'UKX', 'NI225', '000001']
for index in indices: # iterate through quotes above.
output = self._googlequote(index)
if output: # if we get a quote back.
self._out(irc, msg, output)
else: # if something breaks.
self._out(irc, msg, "ERROR fetching Google quote for: {0}".format(index))
intlindices = wrap(intlindices)
def indices(self, irc, msg, args):
"""
Displays the three major indices for the US Stock Market:
Dow Jones Industrial Average, NASDAQ, and S&P500
"""
indices = ['.DJI', '.IXIC', '.INX']
for index in indices: # iterate through quotes above.
output = self._googlequote(index)
if output: # if we get a quote back.
self._out(irc, msg, output)
else: # if something breaks.
self._out(irc, msg, "ERROR fetching Google quote for: {0}".format(index))
indices = wrap(indices)
###################
# YAHOO FUNCTIONS #
###################
def _yqlquery(self, query):
"""Perform a YQL query for stock quote and return."""
# base params.
YQL_URL = "http://query.yahooapis.com/v1/public/yql?"
YQL_PARAMS = {"q":query,
"format":"json",
"env":"store://datatables.org/alltableswithkeys"}
# build and fetch url.
url = YQL_URL + utils.web.urlencode(YQL_PARAMS)
html = self._httpget(url)
if not html: # something broke.
self.log.error("_yqlquery: Failed on YQLQuery for {0}".format(query))
return None
else: # return YQL query.
return html.decode('utf-8')
def _yahoocurrency(self, symbol):
"""Internal Yahoo Currency function that wraps YQL."""
# execute YQL and return.
result = self._yqlquery("SELECT * from yahoo.finance.xchange where pair = '%s'" % symbol)
if not result: # returns None from yqlquery.
self.log.error("_yahoocurrency: Failed on YQLQuery for {0}".format(symbol))
return None
# Try and load json. Do some checking. first check count.
data = json.loads(result)
if data['query']['count'] == 0:
self.log.error("_yahoocurrency: ERROR: Yahoo Quote count 0 executing on {0}".format(symbol))
self.log.error("_yahoocurrency: data :: {0}".format(data))
return None
result = data['query']['results']['rate'] # simplify dict
# make sure symbol is valid
if result['Rate'] == "0.00":
self.log.error("_yahoocurrency: ERROR looking up currency {0}".format(symbol))
return None
# now that all is good, process results into dict for output.
e = {}
for each in result:
e[each] = result.get(each)
# now that we have a working currency translation, create output string.
dt = "{0} {1}".format(e['Date'], e['Time']) # create dtstring
dt = self._timedelta(dt) # convert to relative time.
output = "{0} :: Rate: {1} | Bid: {2} | Ask: {3} | {4}".format(self._red(e['Name']), self._bold(e['Rate']), self._bold(e['Bid']), self._bold(e['Ask']), dt)
# return.
return output.encode('utf-8')
def _timedelta(self, date):
"""Take a last trade date from Yahoo and return a relative date."""
dt = datetime.datetime.strptime(date, '%m/%d/%Y %H:%M%p') # 7/31/2013 5:55pm
dt = pytz.timezone("US/Eastern").localize(dt) # all times in "Eastern"
utc_dt = pytz.utc.normalize(dt.astimezone(pytz.utc)) # go to UTC.
# do the math.
d = datetime.datetime.utcnow().replace(tzinfo = pytz.utc) - utc_dt
# now parse and return.
if d.days:
rel_time = "%sd ago" % d.days
elif d.seconds > 3600:
rel_time = "%sh ago" % (d.seconds / 3600)
elif 60 <= d.seconds < 3600:
rel_time = "%sm ago" % (d.seconds / 60)
else:
rel_time = "%ss ago" % (d.seconds)
return rel_time
def _yahooquote(self, symbol):
"""Internal Yahoo Quote function that wraps YQL."""
# execute YQL and return.
result = self._yqlquery("SELECT * FROM yahoo.finance.quotes where symbol ='%s'" % symbol)
if not result: # returns None from yqlquery.
self.log.error("_yahooquote: Failed on YQLQuery for {0}".format(symbol))
return None
# Try and load json. Do some checking. first check count.
data = json.loads(result)
if data['query']['count'] == 0:
self.log.error("_yahooquote: ERROR: Yahoo Quote count 0 executing on {0}".format(symbol))
self.log.error("_yahooquote: data :: {0}".format(data))
return None
result = data['query']['results']['quote'] # simplify dict
# make sure symbol is valid
if result['ErrorIndicationreturnedforsymbolchangedinvalid']:
self.log.error("_yahooquote: ERROR looking up Yahoo symbol {0}".format(symbol))
return None
# now that all is good, process results into dict for output.
e = {}
for each in result:
e[each] = result.get(each)
# now that we have a working symbol, we'll need conditionals per.
output = "{0} ({1})".format(self._bu(e['symbol']), self._bold(e['Name']))
if e['LastTradePriceOnly']:
output += " last: {0}".format(self._bold(e['LastTradePriceOnly']))
if e['Change'] and e['ChangeinPercent']:
output += u" {0} ({1})".format(self._colorify(e['Change']), self._colorify(e['ChangeinPercent'], perc=True))
if e['DaysLow'] and e['DaysHigh'] and e['DaysLow'] != "0.00" and e['DaysHigh'] != "0.00":
output += " Daily range:({0}-{1})".format(self._bold(e['DaysLow']), self._bold(e['DaysHigh']))
if e['YearLow'] and e['YearHigh'] and e['YearLow'] != "0.00" and e['YearHigh'] != "0.00":
output += " Yearly range:({0}-{1})".format(self._bold(e['YearLow']),self._bold(e['YearHigh']))
if e['Volume'] and e['Volume'] != "0":
output += " Volume: {0}".format(self._orange(self._millify(float(e['Volume']))))
if e['MarketCapitalization']:
output += " MarketCap: {0}".format(self._blue(e['MarketCapitalization']))
if e['PERatio']:
output += " P/E: {0}".format(self._teal(e['PERatio']))
if e['LastTradeDate'] and e['LastTradeTime']:
timestamp = "{0} {1}".format(e['LastTradeDate'], e['LastTradeTime'])
timestamp = self._timedelta(timestamp) # convert to relative time.
output += " Last trade: {0}".format(self._blue(timestamp))
# now return the string.
return output.encode('utf-8')
##########################
# YAHOO PUBLIC FUNCTIONS #
##########################
def yahooquote(self, irc, msg, args, optsymbols):
"""<symbols>
Display's a quote from Yahoo for a stock.
Can specify multiple stocks. Separate by a space.
Ex: GOOG AAPL (max 5)
"""
# make symbols upper, split on space or ,.
symbols = self._splitinput(optsymbols.upper(), [' ', ','])
for symbol in symbols[0:5]: # limit on 5.
output = self._yahooquote(symbol)
if output: # if we have output.
self._out(irc, msg, output)
else: # if we don't have output.
self._out(irc, msg, "ERROR fetching Yahoo quote for: {0}".format(symbol))
yahooquote = wrap(yahooquote, ['text'])
def bonds(self, irc, msg, args):
"""
Displays the US Treasury Bonds Rates.
"""
indices = ['^FVX', '^TNX', '^TYX']
for index in indices: # iterate through quotes above.
output = self._yahooquote(index)
if output: # if we get a quote back.
self._out(irc, msg, output)
else: # if something breaks.
self._out(irc, msg, "ERROR fetching Google quote for: {0}".format(index))
bonds = wrap(bonds)
def currency(self, irc, msg, args, optsymbols):
"""<symbols>
Display's a quote from Yahoo for a currency.
Can specify multiple currencies. Separate by a space.
Ex: USDCAD GBPUSD (max 5)
"""
# http://openexchangerates.org/api/currencies.json
# make symbols upper, split on space or ,.
symbols = self._splitinput(optsymbols.upper(), [' ', ','])
for symbol in symbols[0:5]: # limit on 5.
output = self._yahoocurrency(symbol)
if output: # if we have output.
self._out(irc, msg, output)
else: # if we don't have output.
self._out(irc, msg, "ERROR fetching Yahoo currency for: {0}".format(symbol))
currency = wrap(currency, ['text'])
#########################
# PUBLIC STOCK FRONTEND #
#########################
def stock(self, irc, msg, args, optsymbols):
"""<ticker symbol(s)>
Returns stock information about <ticker>.
Separate multiple SYMBOLs by spaces (Max 5).
Ex: GOOG AAPL (max 5)
"""
# make a list of symbols after splitting on space or ,.
symbols = self._splitinput(optsymbols.upper(), [' ', ','])
# process each symbol.
for symbol in symbols[0:5]: # enforce max 5.
output = self._googlequote(symbol) # try google fetch first.
if not output: # if we don't, try yahoo.
output = self._yahooquote(symbol)
if not output: # if not yahoo, report error.
self._out(irc, msg, "ERROR: I could not fetch a quote for: {0}. Check that the symbol is correct.".format(symbol))
continue
# we'll be here if one of the quotes works. output.
self._out(irc, msg, output)
stock = wrap(stock, [('text')])
##########################################
# FUTURES CONTRACTS INTERNAL/PUBLIC FUNC #
# USES YAHOOQUOTE AFTER FIGURING OUT SYM #
##########################################
def _futuresymbol(self, symbol):
"""This is a horribly inaccurate calculation method to determine the precise
ticker symbol for a futures contract."""
# k,v - symbol [prefix + exchange.]
table = {'oil':['CL', 'NYM'],
'gold':['GC', 'CMX'],
'palladium':['PA', 'NYM'],
'platinum':['PL','NYM'],
'silver':['SI','CMX'],
'copper':['HG','CMX'],
'corn':['C', 'CBT'],
'oats':['O', 'CBT'],
'rice':['RR', 'CBT'],
'sbmeal':['SM', 'CBT'],
'sboil':['BO', 'CBT'],
'soybeans':['S', 'CBT']}
# letter codes for months in oil/metals
months = {'1':'F', '2':'G', '3':'H', '4':'J',
'5':'K', '6':'M', '7':'N', '8':'Q',
'9':'U', '10':'V', '11':'X', '12':'Z'}
# now.
now = datetime.datetime.now()
# different calc, depending on the symbol.
if symbol == "oil":
if now.day > 20: # 21st and on.
mon = now + datetime.timedelta(days=40)
else: # 20th and before.
mon = now + datetime.timedelta(days=30)
# CONSTRUCT SYMBOL: table prefix + month code (letter) + YR + exchange suffix.
contract = "{0}{1}{2}.{3}".format(table[symbol][0], months[str(mon.month)], mon.strftime("%y"), table[symbol][1])
# palladium, copper, platinum, silver, palladium
elif symbol in ['gold', 'silver', 'palladium', 'platinum', 'copper']:
if now.day > 25: # past 26th of the month.
mon = now + datetime.timedelta(days=30)
else:
mon = now
# CONSTRUCT SYMBOL: table prefix + month code (letter) + YR + exchange suffix.
contract = "{0}{1}{2}.{3}".format(table[symbol][0], months[str(mon.month)], mon.strftime("%y"), table[symbol][1])
# grains but only corn, oats, rice.
elif symbol in ['corn', 'oats', 'rice']:
# we have a specific table for these grains
months = {'1':'H', '2':'H', '3':'H', '4':'K',
'5':'K', '6':'N', '7':'N', '8':'U',
'9':'U', '10':'Z', '11':'Z', '12':'Z'}
if now.day > 13: # past 26th of the month.
mon = now + datetime.timedelta(days=30)
else:
mon = now
# CONSTRUCT SYMBOL: table prefix + month code (letter) + YR + exchange suffix.
contract = "{0}{1}{2}.{3}".format(table[symbol][0], months[str(mon.month)], mon.strftime("%y"), table[symbol][1])
# soybeans + associated.
elif symbol in ['sbmeal', 'soybeans', 'sboil']:
months = {'1':'F', '2':'H', '3':'H', '4':'K',
'5':'K', '6':'N', '7':'N', '8':'Q',
'9':'U', '10':'X', '11':'X', '12':'Z'}
if now.day > 13: # past 26th of the month.
mon = now + datetime.timedelta(days=30)
else:
mon = now
contract = "{0}{1}{2}.{3}".format(table[symbol][0], months[str(mon.month)], mon.strftime("%y"), table[symbol][1])
# finally, return contract.
return contract
def grains(self, irc, msg, args):
"""
Display the latest quote for grains (corn, oats, rice).
"""
for symbol in ['corn', 'oats', 'rice', 'sbmeal', 'sboil', 'soybeans']:
symbol = self._futuresymbol(symbol) # grab the proper symbol.
output = self._yahooquote(symbol)
if not output: # if not yahoo, report error.
self._out(irc, msg, "ERROR: I could not fetch a quote for: {0}. Check that the symbol is correct.".format(symbol))
else:
self._out(irc, msg, output)
grains = wrap(grains)
def oil(self, irc, msg, args):
"""
Display the latest quote for Light Sweet Crude Oil (WTI).
"""
symbol = self._futuresymbol('oil') # get oil symbol.
output = self._yahooquote(symbol)
if not output: # if not yahoo, report error.
self._out(irc, msg, "ERROR: I could not fetch a quote for: {0}. Check that the symbol is correct.".format(symbol))
else:
self._out(irc, msg, output)
oil = wrap(oil)
def gold(self, irc, msg, args):
"""
Display the latest quote for Gold Futures.
"""
symbol = self._futuresymbol('gold') # get gold symbol.
output = self._yahooquote(symbol)
if not output: # if not yahoo, report error.
self._out(irc, msg, "ERROR: I could not fetch a quote for: {0}. Check that the symbol is correct.".format(symbol))
else:
self._out(irc, msg, output)
gold = wrap(gold)
def metals(self, irc, msg, args):
"""
Display the latest quote for metals (gold, silver, palladium, platinum, copper).
"""
# do all metals @ once.
for symbol in ['gold', 'silver', 'palladium', 'platinum', 'copper']:
symbol = self._futuresymbol(symbol)
output = self._yahooquote(symbol)
if not output: # if not yahoo, report error.
self._out(irc, msg, "ERROR: I could not fetch a quote for: {0}. Check that the symbol is correct.".format(symbol))
else:
self._out(irc, msg, output)
metals = wrap(metals)
###########################################################
# MISC FINANCIAL FUNCTIONS FOR SYMBOL SEARCH/COMPANY NEWS #
###########################################################
def _companylookup(self, optinput):
"""Internal function to lookup company ticker symbols."""
# construct url
url = "http://d.yimg.com/autoc.finance.yahoo.com/autoc?query=%s" % utils.web.urlquote(optinput)
url += "&callback=YAHOO.Finance.SymbolSuggest.ssCallback"
# try and fetch json.
html = self._httpget(url)
if not html: # something broke.
self.log.error("_companylookup: failed to get URL.")
return None
# decode
html = html.decode('utf-8')
# we need to mangle the JSONP into JSON here.
html = html.replace("YAHOO.Finance.SymbolSuggest.ssCallback(", "").replace(")", "")
# make sure the JSON is proper, otherwise return None.
data = json.loads(html)
results = data["ResultSet"]["Result"]
if len (results) == 0: # if we have no results, err.
return None
else: # otherwise, return results. It's a list of dicts.
# {u'typeDisp': u'Equity', u'name': u'Alcoa Inc.', u'symbol': u'AA', u'exchDisp': u'NYSE', u'type': u'S', u'exch': u'NYQ'}
return results
def symbolsearch(self, irc, msg, args, optinput):
"""<company name>
Search for a stock symbol given company name.
Ex: Google
"""
results = self._companylookup(optinput)
if not results: # if we don't have any results.
self._out(irc, msg, "ERROR: I did not find any symbols for: {0}".format(optinput))
return
# container for out.
output = []
# now iterate over and output each symbol/result.
for r in results[0:5]:
symbol = r.get('symbol').encode('utf-8')
typeDisp = r.get('typeDisp').encode('utf-8')
exch = r.get('exch').encode('utf-8')
name = r.get('name').encode('utf-8')
if symbol and typeDisp and exch and name: # have to have all. display in a table.
#output.append("{0:15} {1:12} {2:5} {3:40}".format(symbol, typeDisp, exch, name))
output.append("{0} ({1}) {2} {3}".format(symbol, typeDisp, exch, name))
# output as single line
self._out(irc, msg, " | ".join(output))
symbolsearch = wrap(symbolsearch, ['text'])
Class = Stock
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=250:
| andrewtryder/Stock | plugin.py | plugin.py | py | 29,652 | python | en | code | 1 | github-code | 13 |
43551390739 | from application import app
import database
from flask import request, jsonify
from urllib.parse import urlencode
def fetch_products_query(search_q, page, count, store=None):
if page.isnumeric() and page != "0":
page = int(page)
else:
raise ValueError("Invalid value for param page!")
if count.isnumeric() and count != "0":
count = int(count)
else:
raise ValueError("Invalid value for param count!")
offset = (page - 1) * count
db = database.Pgsql()
if search_q is not None:
like_pattern = "%{}%".format(search_q)
db.query(
"select count(id) from product where product_name ILIKE %s", (like_pattern,)
)
total = db.cur.fetchone()[0]
if offset > total:
raise ValueError("Out of range value for param page")
db.query(
"select id,product_name,price from product where product_name ILIKE %s order by id asc limit %s offset %s",
(like_pattern, count, offset),
)
else:
store_join = ''
args_total = []
args = [count, offset]
if store is not None:
store_join = " JOIN store_product on store_product.product_id=product.id WHERE store_product.store_id=%s"
args_total = [store]
args = [store, count, offset]
db.query("select count(id) from product" + store_join, args_total)
total = db.cur.fetchone()[0]
if offset > total:
raise ValueError("Out of range value for param page")
db.query(
"select id,product_name,price from product" + store_join + " order by id asc limit %s offset %s",
args,
)
row_count = db.cur.rowcount
res = [
{"id": col1, "product_name": col2, "price": col3}
for (col1, col2, col3) in db.cur.fetchall()
]
db.close
response = {"total": total, "page": page, "products": res}
if total > row_count + offset:
args = request.args.to_dict()
args["page"] = page + 1
response["next"] = request.path + "?" + urlencode(args)
return response
# Routes
@app.get("/api/products/")
def get_products():
page = request.args.get("page", "1")
count = request.args.get("count", "10")
search_q = request.args.get("search")
if search_q is not None and len(search_q) < 3:
raise InvalidAPIUsage("Please enter at least 3 characters!")
try:
return fetch_products_query(search_q, page, count)
except ValueError as e:
raise InvalidAPIUsage(str(e))
@app.get("/api/store/<int:store_id>/products/")
def get_Store_products(store_id):
page = request.args.get("page", "1")
count = request.args.get("count", "10")
try:
return fetch_products_query(None, page, count, store_id)
except ValueError as e:
raise InvalidAPIUsage(str(e))
@app.route("/api/product/<int:id>/", methods=["PATCH"])
def update_product(id):
user_id = request.headers.get("X-User-Id")
if user_id is not None:
body = request.get_json()
if "price" in body:
price = body["price"]
if type(price) == int and price > 0:
price = int(price)
db = database.Pgsql()
db.query(
"UPDATE product SET price = %s WHERE id = %s RETURNING id, product_name, price",
(price, id),
)
res = db.cur.fetchone()
if res is not None:
return {
"product": {
"id": res[0],
"product_name": res[1],
"price": res[2],
}
}
else:
raise InvalidAPIUsage("Product not found!", 404)
else:
raise InvalidAPIUsage("Incorrect format for param price!")
else:
raise InvalidAPIUsage("Missing required param price!")
else:
raise InvalidAPIUsage("Authorization missing for user!", 401)
class InvalidAPIUsage(Exception):
status_code = 400
def __init__(self, message, status_code=None, payload=None):
super().__init__()
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv["message"] = self.message
return rv
@app.errorhandler(InvalidAPIUsage)
def invalid_api_usage(e):
return jsonify(e.to_dict()), e.status_code
| gianani/ecom_api | routes.py | routes.py | py | 4,621 | python | en | code | 0 | github-code | 13 |
73645013137 |
import string
from string import punctuation
import tweepy
from tweepy import OAuthHandler
from tweepy import Stream
import json
import matplotlib.pyplot as plt
consumer_key = 'i2Eego1GgNga1ND3Oxpq2wwxm'
consumer_secret = '2bNZvOqlg7MvqQeCsqP7Ma64Gh77xCvdmlNI5Th6SmfxLELQQu'
access_token = '1373172530-UqKl5faRYzTWuYnC6TcocHMOgUhbzS2cjVZH59M'
access_secret = 'P9JV8cJl36RpA2MA7GEGz73kMO8kTApPyLXaFYcCMPVJi'
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_secret)
class Text_Analyzer(object):
def __init__(self, input_file):
# add code to initialize an instance
self.input_file = input_file
#self.output_file = output_file
def analyze(self):
tweets=[]
with open(self.input_file, 'r') as f:
tweettext_content=""
for line in f:
tweet = json.loads(line)
tweettext_content += tweet.get("text").rstrip("\n")
tweets.append(tweet)
#print tweet.get("text")
#print tweettext_content
tweettext_content_list = tweettext_content.split()
#print tweettext_content_list
json.dump(tweets, open("all_tweets.json",'w'))
tweets=json.load(open("all_tweets.json",'r'))
count_tweettext={}
for x in tweettext_content_list:
x = x.lower()
x = x.strip(punctuation)
if x == "":
continue
if x in count_tweettext:
count_tweettext[x]+=1
else:
count_tweettext[x]=1
count_tweettext_list=count_tweettext.items()
tweettext_result = sorted(count_tweettext_list , key=lambda item:-item[1])
#print tweettext_result
top_50_words=tweettext_result[0:50]
words, counts=zip(*top_50_words)
print(words, counts)
#tweettext_count_list=tweettext_result.items()
count_per_topic={}
for t in tweets:
if "entities" in t and "hashtags" in t["entities"]:
topics=set([hashtag["text"].lower() for hashtag in t["entities"]["hashtags"]])
for topic in topics:
topic=topic.lower()
if topic in count_per_topic:
count_per_topic[topic]+=1
else:
count_per_topic[topic]=1
topic_count_list=count_per_topic.items()
sorted_topics=sorted(topic_count_list, key=lambda item:-item[1])
#print(sorted_topics)
x_pos = range(len(words))
plt.bar(x_pos, counts)
plt.xticks(x_pos, words)
plt.ylabel('Count of Text of Tweet')
plt.title('Count of Text per Word')
plt.xticks(rotation=90)
plt.show()
#top_50_topics=sorted_topics[0:50]
topics, counts=zip(*sorted_topics)
#print("\nTopics and counts in separated tuples:")
print(topics, counts)
# get a range as the horizontal position of each topic
x_pos = range(len(topics))
# plot the bar chat
plt.bar(x_pos, counts)
# add the legend of each bar
plt.xticks(x_pos, topics)
# add the label for Y-axis
plt.ylabel('Count of Tweets')
# add title
plt.title('Count of Tweets per Topic')
# vetically align the text of each topic
plt.xticks(rotation=90)
# display the plot
plt.show()
if __name__ == "__main__":
analyzer=Text_Analyzer("python.txt")
vocabulary=analyzer.analyze()
| haoweichen/Web-Scraping-2 | Assignment3.py | Assignment3.py | py | 3,940 | python | en | code | 0 | github-code | 13 |
28171702911 | # Create your views here.
from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.views import generic
from django.utils import timezone
from django.http import Http404
from polls.models import Choice, Poll
class IndexView(generic.ListView):
template_name = 'polls/index.html'
context_object_name = 'latest_poll_list'
def get_queryset(self):
"""
Return the last five published polls (not including those set to be
published in the future).
"""
return Poll.objects.filter(
pub_date__lte=timezone.now()
).order_by('-pub_date')[:5]
def index(request):
latest_poll_list = Poll.objects.all().order_by('-pub_date')[:5]
context = {'latest_poll_list': latest_poll_list}
return render(request, 'polls/index.html', context)
class DetailView(generic.DetailView):
model = Poll
template_name = 'polls/detail.html'
def get_queryset(self):
"""
Excludes any polls that aren't published yet.
"""
return Poll.objects.filter(pub_date__lte=timezone.now())
class ResultsView(generic.DetailView):
model = Poll
template_name = 'polls/results.html'
class PollCreationView(generic.TemplateView):
template_name = 'polls/poll_creation.html'
def results(request, poll_id):
poll = get_object_or_404(Poll, pk=poll_id)
return render(request, 'polls/results.html', {'poll': poll})
def detail(request, poll_id):
try:
poll = Poll.objects.get(pk=poll_id)
except Poll.DoesNotExist:
raise Http404
today = datetime.datetime.now()
return render(request, 'polls/detail.html', {'poll': poll})
def vote(request, poll_id):
p = get_object_or_404(Poll, pk=poll_id)
try:
selected_choice = p.choice_set.get(pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
# Redisplay the poll voting form.
return render(request, 'polls/detail.html', {
'poll': p,
'error_message': "You didn't select a choice.",
})
else:
selected_choice.votes += 1
selected_choice.save()
# Always return an HttpResponseRedirect after successfully dealing
# with POST data. This prevents data from being posted twice if a
# user hits the Back button.
return HttpResponseRedirect(reverse('polls:results', args=(p.id,)))
def submit_poll(request):
question = request.POST['question']
number = int(request.POST['num_choices'])
end_date = request.POST['end_date']
errors = []
if not question or not number:
if not question:
errors.append("You didn't put a question")
if not number:
errors.append("You didn't specify a number of options")
if not end_date:
errors.append("You didn't specify an end date for the poll")
return render(request, 'polls/poll_creation.html', {
'error_message': errors
})
else:
poll = Poll()
poll.question=question
poll.pub_date=timezone.now()
poll.start_date=timezone.now()
poll.end_date=end_date
poll.save()
return HttpResponseRedirect(reverse('polls:option_creation', args=(number, poll.id)))
def create_options(request, number, poll_id):
p = get_object_or_404(Poll, pk=poll_id)
return render(request, "polls/option_creation.html", {
'poll': p,
'number': range(0,int(number))
})
def submit_options(request):
options = dict(request.POST)['option']
poll_id = request.POST['poll_id']
p = get_object_or_404(Poll, pk=int(poll_id))
if isinstance(options, basestring):
opt = Choice()
opt.poll=p
opt.choice_text=options
opt.votes=0
opt.save()
else:
for opt in options:
c = Choice()
c.poll = p
c.choice_text=opt
c.votes=0
c.save()
return HttpResponseRedirect(reverse('polls:index'))
| lydiaxmm/Module5 | mysite/polls/views.py | views.py | py | 4,082 | python | en | code | 0 | github-code | 13 |
20294713204 | from coincurve import PublicKey, PrivateKey
from raiden_libs import utils
import binascii
import time
import json
trip_along_ringbahn_raw = [
{'timestamp': int(time.time()) - 10000, 'lat': {'val1': 52, 'val2': 536131},
'lon': {'val1': 13, 'val2': 447444}, 'temperature': {"val1": 19, "val2": 0}},
{'timestamp': int(time.time()) - 9000, 'lat': {'val1': 52, 'val2': 538221},
'lon': {'val1': 13, 'val2': 44376}, 'temperature': {"val1": 20, "val2": 0}},
{'timestamp': int(time.time()) - 8000, 'lat': {'val1': 52, 'val2': 540247},
'lon': {'val1': 13, 'val2': 43899}, 'temperature': {"val1": 21, "val2": 0}},
{'timestamp': int(time.time()) - 7000, 'lat': {'val1': 52, 'val2': 541751},
'lon': {'val1': 13, 'val2': 43513}, 'temperature': {"val1": 22, "val2": 0}},
{'timestamp': int(time.time()) - 6000, 'lat': {'val1': 52, 'val2': 54264},
'lon': {'val1': 13, 'val2': 433692}, 'temperature': {"val1": 23, "val2": 0}},
{'timestamp': int(time.time()) - 5000, 'lat': {'val1': 52, 'val2': 543007},
'lon': {'val1': 13, 'val2': 431339}, 'temperature': {"val1": 24, "val2": 0}},
{'timestamp': int(time.time()) - 4000, 'lat': {'val1': 52, 'val2': 543755},
'lon': {'val1': 13, 'val2': 428731}, 'temperature': {"val1": 25, "val2": 0}},
{'timestamp': int(time.time()) - 3000, 'lat': {'val1': 52, 'val2': 544295},
'lon': {'val1': 13, 'val2': 427207}, 'temperature': {"val1": 27, "val2": 0}},
]
private_key = PrivateKey()
trip_along_ringbahn = []
for t in trip_along_ringbahn_raw:
t_json = json.dumps(t)
signature = utils.sign(private_key, t_json.encode()).hex().upper()
trip_along_ringbahn.append((t_json, signature))
print('curl http://localhost:5000/data -X POST --header "X-Anyledger-Sig: {}" --header "Content-Type: application/json" -d \'{}\''.format(signature, t_json))
| Anylsite/anyledger-backend | signtest.py | signtest.py | py | 1,846 | python | en | code | 1 | github-code | 13 |
37314918864 | """Config module."""
import logging
from pathlib import Path
logging.basicConfig(
filename="execution_logs.log",
level=logging.INFO,
format="%(asctime)s - %(message)s",
)
DATE = "20231203"
ROOT_DIR = Path(__file__).parents[1]
DATA_DIR = ROOT_DIR / "data"
DATE_DIR = ROOT_DIR / "data" / DATE
HTML_DIR = DATE_DIR / "html"
GOOGLE_HTML_DIR = HTML_DIR / "google"
BOX_HTML_DIR = HTML_DIR / "boxes"
RAW_CSV_DIR = DATE_DIR / "csv"
BOX_INFO = "div[class='VkpGBb']"
BOX_NAME = "span[class='OSrXXb']::text"
BOX_URL = "a[class='yYlJEf Q7PwXb L48Cpd brKmxb']"
| aingelmo/spanish-crossfit-data | src/config.py | config.py | py | 563 | python | en | code | 0 | github-code | 13 |
4547620077 | import os
import copy
import glob
import pickle
import sys
import tensorflow as tf
import numpy as np
from ray import tune
from softlearning.environments.utils import get_goal_example_environment_from_variant
from softlearning.algorithms.utils import get_algorithm_from_variant
from softlearning.policies.utils import get_policy_from_variant, get_policy
from softlearning.replay_pools.utils import get_replay_pool_from_variant
from softlearning.samplers.utils import get_sampler_from_variant
from softlearning.value_functions.utils import get_Q_function_from_variant
from softlearning.models.utils import get_reward_classifier_from_variant
from softlearning.misc.generate_goal_examples import get_goal_example_from_variant
from softlearning.misc.utils import set_seed, initialize_tf_variables
from examples.instrument import run_example_local
from examples.development.main import ExperimentRunner
class ExperimentRunnerClassifierRL(ExperimentRunner):
def _build(self):
variant = copy.deepcopy(self._variant)
training_environment = self.training_environment = (
get_goal_example_environment_from_variant(variant))
evaluation_environment = self.evaluation_environment = (
get_goal_example_environment_from_variant(variant))
replay_pool = self.replay_pool = (
get_replay_pool_from_variant(variant, training_environment))
sampler = self.sampler = get_sampler_from_variant(variant)
Qs = self.Qs = get_Q_function_from_variant(variant, training_environment)
policy = self.policy = get_policy_from_variant(variant, training_environment, Qs)
initial_exploration_policy = self.initial_exploration_policy = (
get_policy('UniformPolicy', training_environment))
algorithm_kwargs = {
'variant': self._variant,
'training_environment': self.training_environment,
'evaluation_environment': self.evaluation_environment,
'policy': policy,
'initial_exploration_policy': initial_exploration_policy,
'Qs': Qs,
'pool': replay_pool,
'sampler': sampler,
'session': self._session,
}
if self._variant['algorithm_params']['type'] in ['SACClassifier', 'RAQ', 'VICE', 'VICEGAN', 'VICERAQ']:
reward_classifier = self.reward_classifier \
= get_reward_classifier_from_variant(self._variant, training_environment)
algorithm_kwargs['classifier'] = reward_classifier
goal_examples_train, goal_examples_validation = \
get_goal_example_from_variant(variant)
algorithm_kwargs['goal_examples'] = goal_examples_train
algorithm_kwargs['goal_examples_validation'] = \
goal_examples_validation
self.algorithm = get_algorithm_from_variant(**algorithm_kwargs)
initialize_tf_variables(self._session, only_uninitialized=True)
self._built = True
def _restore(self, checkpoint_dir):
assert isinstance(checkpoint_dir, str), checkpoint_dir
checkpoint_dir = checkpoint_dir.rstrip('/')
with self._session.as_default():
pickle_path = self._pickle_path(checkpoint_dir)
with open(pickle_path, 'rb') as f:
picklable = pickle.load(f)
training_environment = self.training_environment = picklable[
'training_environment']
evaluation_environment = self.evaluation_environment = picklable[
'evaluation_environment']
replay_pool = self.replay_pool = (
get_replay_pool_from_variant(self._variant, training_environment))
if self._variant['run_params'].get('checkpoint_replay_pool', False):
self._restore_replay_pool(checkpoint_dir)
sampler = self.sampler = picklable['sampler']
Qs = self.Qs = picklable['Qs']
# policy = self.policy = picklable['policy']
policy = self.policy = (
get_policy_from_variant(self._variant, training_environment, Qs))
self.policy.set_weights(picklable['policy_weights'])
initial_exploration_policy = self.initial_exploration_policy = (
get_policy('UniformPolicy', training_environment))
algorithm_kwargs = {
'variant': self._variant,
'training_environment': self.training_environment,
'evaluation_environment': self.evaluation_environment,
'policy': policy,
'initial_exploration_policy': initial_exploration_policy,
'Qs': Qs,
'pool': replay_pool,
'sampler': sampler,
'session': self._session,
}
if self._variant['algorithm_params']['type'] in ['SACClassifier', 'RAQ', 'VICE', 'VICEGAN', 'VICERAQ']:
reward_classifier = self.reward_classifier = picklable['reward_classifier']
algorithm_kwargs['classifier'] = reward_classifier
goal_examples_train, goal_examples_validation = \
get_goal_example_from_variant(variant)
algorithm_kwargs['goal_examples'] = goal_examples_train
algorithm_kwargs['goal_examples_validation'] = \
goal_examples_validation
self.algorithm = get_algorithm_from_variant(**algorithm_kwargs)
self.algorithm.__setstate__(picklable['algorithm'].__getstate__())
tf_checkpoint = self._get_tf_checkpoint()
status = tf_checkpoint.restore(tf.train.latest_checkpoint(
os.path.split(self._tf_checkpoint_prefix(checkpoint_dir))[0]))
status.assert_consumed().run_restore_ops(self._session)
initialize_tf_variables(self._session, only_uninitialized=True)
# TODO(hartikainen): target Qs should either be checkpointed or pickled.
for Q, Q_target in zip(self.algorithm._Qs, self.algorithm._Q_targets):
Q_target.set_weights(Q.get_weights())
self._built = True
@property
def picklables(self):
picklables = {
'variant': self._variant,
'training_environment': self.training_environment,
'evaluation_environment': self.evaluation_environment,
'sampler': self.sampler,
'algorithm': self.algorithm,
'Qs': self.Qs,
'policy_weights': self.policy.get_weights(),
}
if hasattr(self, 'reward_classifier'):
picklables['reward_classifier'] = self.reward_classifier
return picklables
def main(argv=None):
"""Run ExperimentRunner locally on ray.
To run this example on cloud (e.g. gce/ec2), use the setup scripts:
'softlearning launch_example_{gce,ec2} examples.development <options>'.
Run 'softlearning launch_example_{gce,ec2} --help' for further
instructions.
"""
# __package__ should be `development.main`
run_example_local('classifier_rl.main', argv)
if __name__ == '__main__':
main(argv=sys.argv[1:])
| avisingh599/reward-learning-rl | examples/classifier_rl/main.py | main.py | py | 6,982 | python | en | code | 361 | github-code | 13 |
8971211903 | import os
if not os.path.isfile("config.json"):
print("config.json not found, please create one")
exit()
import json
with open("config.json", "r") as f:
IMAGE_ROOT_PATH = json.load(f)["image-path"]
import sqlite3
connection = sqlite3.connect("database.db")
cursor = connection.cursor()
cursor.row_factory = sqlite3.Row
def get_1024x512_images():
return cursor.execute("SELECT path FROM image WHERE width = '512' AND height = '1024'").fetchall()
def search(query: str, page: int = 0, page_size: int = 30):
try:
return cursor.execute(f"SELECT * FROM image WHERE {query} LIMIT {page_size} OFFSET {page * page_size}").fetchall()
except sqlite3.OperationalError:
return []
def read_site() -> str:
with open("searchsite.html", "r") as f:
return "".join(f.readlines())
def create_image_string(query: str, local=False, page: int = 0, page_size: int = 30) -> str:
results = search(query, page, page_size)
sum_string = ""
for result in results:
options = f"Path: {result['path'].replace(IMAGE_ROOT_PATH, '').strip('/')}\nCreation Time: {result['creation_time']}\nPrompt: {result['prompt']}\nNegative Prompt: {result['negative_prompt']}\nSeed: {result['seed']}\nModel: {result['model']}\nSize: {result['width']}x{result['height']}\nSampler: {result['sampler']}\nSteps: {result['steps']}\nGuidance Scale: {result['guidance_scale']}\nLoRA: {result['lora']}\nUpscaling: {result['upscaling']}\nFace Correction: {result['face_correction']}\n"
sum_string += f"<img src='{result[0].replace(IMAGE_ROOT_PATH, '').strip('/') if local else result[0]}' title='{options}'>\n"
return sum_string
def search_to_html(query: str):
site = read_site().replace("<br>", create_image_string(query))
with open("resultsite.html", "w") as f:
f.write(site)
import sys
if __name__ == "__main__" and len(sys.argv) > 1:
search_to_html(sys.argv[1])
| ManInDark/EasyDiffusionSearch | search.py | search.py | py | 1,920 | python | en | code | 0 | github-code | 13 |
18101526615 | import sys
import warnings
import torch
import torch.nn as nn
from torch.optim.lr_scheduler import ReduceLROnPlateau
from data.data import prepare_folds
from loops import train, evaluate
from utils.checkpoint import save
from utils.setup import setup_network, setup_hparams
warnings.filterwarnings("ignore")
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
def run(net):
folds = prepare_folds()
trainloader, valloader = folds[int(hps['fold_id'])]
net = net.to(device)
optimizer = torch.optim.SGD(net.parameters(), lr=hps['lr'], momentum=0.9, nesterov=True, weight_decay=0.0001)
scheduler = ReduceLROnPlateau(optimizer, mode='max', factor=0.5, patience=10, verbose=True)
criterion = nn.CrossEntropyLoss()
best_acc_v = 0
print("Training", hps['name'], hps['fold_id'], "on", device)
for epoch in range(hps['n_epochs']):
acc_tr, loss_tr = train(net, trainloader, criterion, optimizer)
acc_v, loss_v = evaluate(net, valloader, criterion)
# Update learning rate if plateau
scheduler.step(acc_v)
# Save the best network and print results
if acc_v > best_acc_v:
save(net, hps, desc=hps['fold_id'])
best_acc_v = acc_v
print('Epoch %2d' % (epoch + 1),
'Train Accuracy: %2.2f %%' % acc_tr,
'Val Accuracy: %2.2f %%' % acc_v,
'Network Saved',
sep='\t\t')
else:
print('Epoch %2d' % (epoch + 1),
'Train Accuracy: %2.2f %%' % acc_tr,
'Val Accuracy: %2.2f %%' % acc_v,
sep='\t\t')
if __name__ == "__main__":
# Important parameters
hps = setup_hparams(sys.argv[1:])
net = setup_network(hps)
if hps['fold_id'] is None:
raise RuntimeError("Please select which fold to train")
elif hps['fold_id'] not in {'1', '2', '3', '4', '0'}:
raise RuntimeError("Please select a valid fold_id")
# Convert to fp16 for faster training
net.half()
for layer in net.modules():
if isinstance(layer, nn.BatchNorm2d):
layer.float()
run(net)
| usef-kh/Cassava-Leaf-Disease-Classification | cv_train.py | cv_train.py | py | 2,182 | python | en | code | 0 | github-code | 13 |
22845696785 | '''
Created on Aug 9, 2012
@author: PENNETTI
'''
'''
A perfect number is a number for which
the sum of its proper divisors is
exactly equal to the number. For
example, the sum of the proper divisors
of 28 would be 1 + 2 + 4 + 7 + 14 = 28,
which means that 28 is a perfect number.
A number n is called deficient if the
sum of its proper divisors is less than
n and it is called abundant if this sum
exceeds n.
As 12 is the smallest abundant number,
1 + 2 + 3 + 4 + 6 = 16, the smallest
number that can be written as the sum of
two abundant numbers is 24. By
mathematical analysis, it can be shown
that all integers greater than 28123 can
be written as the sum of two abundant
numbers. However, this upper limit cannot
be reduced any further by analysis even
though it is known that the greatest number
that cannot be expressed as the sum of two
abundant numbers is less than this limit.
Find the sum of all the positive integers
which cannot be written as the sum of two
abundant numbers.
'''
from math import sqrt
def sum_divisors(n):
# 1 is a factor common to all numbers
sum = 1
sqrt_n = sqrt(n)
for m in range(2, int(sqrt_n) + 1):
if n % m == 0:
# Add the divisor and it's compliment
sum += (m + n / m)
if sqrt_n == int(sqrt_n):
# Remove duplicate swaure factors
sum -= sqrt_n
return sum
# Range of numbers from 24-20162
def abundant_sums():
sum_nonabundant_sums = 0
abundants = set()
for i in range(1, 20162):
if sum_divisors(i) > i:
abundants.add(i)
# If a number minus an abundant number is not an
# abundant number, then it cannot be expressed as the
# sum of two abundant numbers
if not any( ((i - a) in abundants) for a in abundants):
sum_nonabundant_sums += i
return sum_nonabundant_sums
print(abundant_sums())
| pennetti/project-euler | python/Problem_023.py | Problem_023.py | py | 1,981 | python | en | code | 0 | github-code | 13 |
73254230416 | import supybot.conf as conf
import supybot.registry as registry
def configure(advanced):
# This will be called by supybot to configure this module. advanced is
# a bool that specifies whether the user identified himself as an advanced
# user or not. You should effect your configuration by manipulating the
# registry as appropriate.
conf.registerPlugin("JIRA", True)
default_fields = ("status", "resolution", "created", "updated", "reporter",
"assignee")
class FieldOfIssue(registry.OnlySomeStrings):
"""That is not a valid field of issue."""
validStrings = default_fields
class AcceptedFieldsOfIssue(registry.SpaceSeparatedListOfStrings):
Value = FieldOfIssue
JIRA = conf.registerPlugin("JIRA")
conf.registerGlobalValue(JIRA, "installs",
registry.SpaceSeparatedListOfStrings([], """A list of JIRA installs that
have been added with the 'add' command."""))
conf.registerChannelValue(JIRA, "issue_format",
AcceptedFieldsOfIssue(default_fields,
"""The fields to list when describing an issue. Possible values
include: %s.""" % " ".join(default_fields)))
conf.registerChannelValue(JIRA, "show_link",
registry.Boolean(True,
"""If true the bot will show the URL of the issue at the end of the
summary."""))
conf.registerChannelValue(JIRA, 'snarfer_timeout',
registry.PositiveInteger(300,
"""If an issue has been mentioned in the last few seconds, don't fetch its
data again. If you change the value of this variable, you must reload this
plugin for the change to take effect."""))
| amirdt22/supybot-jira | JIRA/config.py | config.py | py | 1,595 | python | en | code | 1 | github-code | 13 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.