blob_id stringlengths 40 40 | language stringclasses 1
value | repo_name stringlengths 5 133 | path stringlengths 2 333 | src_encoding stringclasses 30
values | length_bytes int64 18 5.47M | score float64 2.52 5.81 | int_score int64 3 5 | detected_licenses listlengths 0 67 | license_type stringclasses 2
values | text stringlengths 12 5.47M | download_success bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
8ffa464d7b0f6a9f58513693415f73f81a5c04e7 | Python | Bhooterbaccha/google-ngram-analytics | /scrapengram.py | UTF-8 | 1,615 | 2.796875 | 3 | [] | no_license | import urllib.request, json
import sys,nltk,time
fil=sys.argv[1] #get file name
#https://books.google.com/ngrams/json is the base url, followed by the params
def getURL(query,start,end,corpus,smoothing,sensitivity):
query='+'.join([w for w in query.split(' ')])
sensitivity=str(sensitivity).lower()
url = 'https://books.google.com/ngrams/json?content=%s&year_start=%d&year_end=%d&corpus=%d&smoothing=%d&case_insensitive=%s'%(query,start,end,corpus,smoothing,sensitivity)
return url
with open(fil,'r') as r: #read file
l={}
cont=r.read()
cont=cont.split('\n')
cont=cont[:10000] #take first 10k sentences
for line in cont:
p=1
mylist = list(nltk.bigrams(line.split(' '))) #check by bigram,can be modified here.
for j in mylist:
time.sleep(3)#not to send too many requests at a time, increase as much as possible to avoid error 429
GoToURL=getURL(j,2015,2019,26,3,True)
with urllib.request.urlopen(GoToURL) as url:
data = json.loads(url.read().decode()) #you can also keep a check with status code or HTTPErrors if need arises
if len(data) == 0: #bigram doesn't exist
p*=1e-6 #can be changed as you wish for
continue
for i in data:
if i['type']=='CASE_INSENSITIVE' or i['type']=='NGRAM': #we considered all variations as well as those with no variation
if sum(i['timeseries'])==0:
p*=1e-6
else:
p*=float(sum(i['timeseries'])/5) # we take an average over the last five years
l[line]=p
time.sleep(2)
pickle.dump(l,open('dump.pkl','wb')) #get dictionary stored as a pickle
| true |
e58652665f897ae2f1bed3bbf00cbacd6cb887fe | Python | SeregaPro1/hello-world | /Lessons/delete files.py | UTF-8 | 449 | 3.34375 | 3 | [] | no_license | import os
import shutil
path = 'D:\\test2.txt'
try:
os.remove(path) #delete a file
os.rmdir(path) #delete a file or empy folder
shutil.rmtree(path) # dele files and or folders
except FileNotFoundError:
print('That file was not found')
except PermissionError:
print('You do not have permission to delete that function')
except OSError:
print('That folder contains files')
else:
print(path+" was deleted") | true |
6cd378a7de5fa32dfd0f179d75d533ba1e1bb5c4 | Python | sinitsa2001/geekshop | /basketapp/models.py | UTF-8 | 1,878 | 2.546875 | 3 | [] | no_license | from django.db import models
from authapp.models import User
from mainapp.models import Product
class BasketQuerySet(models.QuerySet):
def delete(self):
for object in self:
object.product.quantity += object.quantity
object.product.save()
super().delete()
class Basket(models.Model):
objects = BasketQuerySet.as_manager()
user = models.ForeignKey(User,on_delete=models.CASCADE)
product =models.ForeignKey(Product, on_delete=models.CASCADE)
quantity = models.PositiveSmallIntegerField(default=0)
created_timestamp = models.DateTimeField(auto_now_add=True)
def __str__(self):
return f'Корзина для {self.user.username} | Продукт {self.product.name}'
def sum(self):
return self.quantity * self.product.price
def total_quantity(self):
baskets = Basket.objects.filter(user=self.user)
return sum(basket.quantity for basket in baskets)
def total_sum(self):
baskets = Basket.objects.filter(user=self.user)
return sum(basket.sum() for basket in baskets)
############### новый вариант ###############
@staticmethod
def get_items(user):
return Basket.objects.filter(user=user).order_by('product__category')
@staticmethod
def get_product(user, product):
return Basket.objects.filter(user=user, product=product)
@classmethod
def get_product_quantity(cls, user):
basket_items = cls.get_items(user)
basket_items_dic ={}
[basket_items_dic.update({item.product: item.quantity}) for item in basket_items]
return basket_items_dic
@staticmethod
def get_item(pk):
return Basket.objects.filter(pk=pk).first()
def delete(self):
self.product.quantity += self.quantity
self.product.save()
super().delete()
| true |
1c55c9d77d79c144a4098acc1038217c5b7f3667 | Python | lcneuro/pub_t2dm_age_meta | /cognition/cognition_fig.py | UTF-8 | 11,100 | 2.546875 | 3 | [] | no_license | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Sep 22 23:04:45 2020
@author: botond
Notes:
This script generates a multi-panel figure from cognitive data.
"""
import os
import numpy as np
import pandas as pd
import itertools
import matplotlib.pyplot as plt
import matplotlib.ticker as mtc
import seaborn as sns
from IPython import get_ipython
get_ipython().run_line_magic('cd', '..')
from helpers import plotting_style
from helpers.plotting_style import plot_pars, plot_funcs
get_ipython().run_line_magic('cd', 'cognition')
get_ipython().run_line_magic('matplotlib', 'inline')
# =============================================================================
# Setup
# =============================================================================
# Filepaths
HOMEDIR = os.path.abspath(os.path.join(__file__, "../../../")) + "/"
SRCDIR = HOMEDIR + "data/"
OUTDIR = HOMEDIR + "results/cognition/"
# Inputs
# Case specific values
cases = ["age", "diab", "meta"]
titles = [
"Age: UK Biobank Dataset (HC only)",
"T2DM: UK Biobank Dataset (T2DM+ vs. HC)",
"T2DM: Meta-Analysis of Published Literature (T2DM+ vs. HC)",
]
ylabeltexts = [
"Percentage change in cognitive\nperformance across age (% per year)",
"Percentage difference in cognitive\nperformance T2DM+ vs. HC (%)",
"Standardized mean difference\nT2DM+ vs. HC (Cohen's d)"
]
colors = ["PiYG", "YlGnBu", "Purples"]
ylims = [[-2.5, 0.3], [-23.0, 4.5], [-0.95, 0.2]]
sfs = [3e0, 1e0, 0.02] # Marker size factors
sfscf = [5000, 2000, 25] # Marker size scale factors
sdxo = [0.76, 0.79, 0.94] # x axis offset of scale info
sfscf2 = [10000, 1000, 10] # Marker size scale factors
sdxo2 = [0.76, 0.75, 0.9] # x axis offset of scale info
cms = [20, 4, 1] # Colormap scaling to distort - gradient
cmo = [4.5, 2, 1] # Colormap offset - intensity
textpads = [-0.05, 1.5, 0.04] # Padding for text along y axis
xtickrots = [0, 0, 55] # Rotation of xticks
xtickvas = ["top", "top", "top"] # Vertical alignment for xticks
xtickpads = [0, 0, 0] # Padding fo xticks
# raise
# %%
# Load data
# ------
# Dict to store data
data = {}
# Iterate over all cases (age, diab, meta...)
for case in cases:
# Load
df = pd.read_csv(
OUTDIR + f"stats/pub_meta_cognition_stats_{case}.csv",
index_col=0
)
# Transfomrations specific to case
if case in ["age", "diab"]:
# Cast strings to float arrays
df["sample_sizes"] = \
df["sample_sizes"].apply(lambda item: np.array(item[1:-1].split(", "), dtype=float))
df["conf_int"] = \
df["conf_int"].apply(lambda item:
np.array([float(val) for val in item[1:-1].split(" ") if len(val) > 0])
)
elif case in ["meta"]:
# Transform df
df = df \
.reset_index() \
.pipe(lambda df:
df.assign(**{
"label": df["Cognitive Domain"],
"conf_int": [[row[1]["LB"], row[1]["UB"]] \
for row in df.iterrows()]
})
)
else:
raise(ValueError("Unknown case!"))
# Order
if case == cases[0]:
order = df.sort_values(by="beta").reset_index(drop=True)["label"]
order_dict = dict((v,k) for k,v in order.to_dict().items())
df = df.sort_values(by="label", key=lambda x: x.map(order_dict), ignore_index=True)
# Assign transformed df to data dict
data[case] = df
# %%
# =============================================================================
# Figure
# =============================================================================
# Unpack plotting utils
fs, lw = plot_pars
p2star, colors_from_values, float_to_sig_digit_str, pformat = plot_funcs
# Figure
f = plt.figure(figsize=(7.25, 9))
plt.suptitle("Domain Specific Cognitive Deficits Associated with Age and T2DM\n")
# Panels A & B
# ------
for c, case in enumerate(cases):
# Current case's dataframe
df = data[case]
# Add new line character into x labels
df["label"] = df["label"].str.replace("_", "\n")
df["label"] = df["label"].str.replace(" ", "\n")
# df["label"] = df["label"].str.replace("Short\nTerm\nMemory", "Short-Term\nMemory")
# Sort labels alphabetically
# df = df.sort_values(by="label", ignore_index=True)
# Pick subplot
plt.subplot(len(cases), 1, c+1)
# Populate plot
if case in ["age", "diab"]:
# Colors
colors_all = colors_from_values(
np.array(list(-df["beta"])*cms[c] + [df["beta"].min() + cmo[c], df["beta"].max()*cms[c]]),
colors[c])
for i, item in enumerate(df.iterrows()):
# Extract data
x = item[0]
ss, p, y, t, conf_int = \
item[1][["sample_sizes", "pval", "beta", "tval", "conf_int"]]
conf_dist = abs(y - np.array(conf_int))[:, None]
# Blob for representing value and sample size
plt.scatter(x=x, y=y, s=sum(ss)/sfs[c], color=colors_all[i])
#"mediumblue")
# Plot center of estimate
plt.scatter(x=x, y=y, s=15*lw, color="k")
# Errorbars
plt.errorbar(x, y, yerr=conf_dist, capsize=4*lw, capthick=0.75*lw,
elinewidth=0.75*lw, color="black")
# # Annotate stats as text
# text = f"T={t:.1f} \n {pformat(p)}" + p2star(p) \
# + f"\n$\mathbf{{N_{{T2DM}}}}$={ss[1]:,}\n$\mathbf{{N_{{ctrl}}}}$={ss[0]:,}"
#
# text_y = 0.2 # 0.5 if max(conf_int) < 0 else max(conf_int) + 0.5
# va = "bottom" # "bottom" if y > 0 else "top"
# plt.annotate(text, xy=[x, text_y],
# fontsize=9*fs, ha="center", va=va)
# Add statistical asterisks
text = p2star(p)
text_x = x + 0.00
if y < 0:
text_y = min(min(conf_int), y-np.sqrt(sum(ss)/sfs[c])/6e1) - textpads[c]
else:
text_y = max(max(conf_int), y+np.sqrt(sum(ss)/sfs[c])/6e1) + textpads[c]
va = "top" if y < 0 else "bottom"
plt.annotate(text, xy=[text_x, text_y], fontsize=8*fs,
ha="center", va=va, fontweight="bold",
rotation=0)
elif case in ["meta"]:
# Colors
colors_all = colors_from_values(
np.array(list(-df["EFFS"])*cms[c] + [df["EFFS"].min() + cmo[c],
df["EFFS"].max()*cms[c]]),
colors[c])[:-2]
for i, item in enumerate(df.iterrows()):
# Extract data
x = item[0]
y, conf_int, K, Q, I2, p = \
item[1][["EFFS", "conf_int", "K", "Q", "I2", "p"]]
conf_dist = abs(y - np.array(conf_int))[:, None]
# Blob for representing value and sample size
plt.scatter(x=x, y=y, s=K/sfs[c], color=colors_all[i])
# Plot center of estimate
plt.scatter(x=x, y=y, s=15*lw, color="k")
# Errorbars
plt.errorbar(x, y, yerr=conf_dist, capsize=4*lw, capthick=0.75*lw,
elinewidth=0.75*lw, color="black")
# # Annotate stats as text
# text = \
# f"K={K}" \
# f"\nQ={Q}" \
# f"\n$\mathbf{{I^2}}$={I2}" \
# f"\n{pformat(p)}" + p2star(p)
#
# f"\n {pformat(p)}" + p2star(p) \
# + f"\n$\mathbf{{N^{2}$={ss[1]}\n$\mathbf{{N_{{ctrl}}}}$={ss[0]}"
# text_y = 0.2 # 0.5 if max(conf_int) < 0 else max(conf_int) + 0.5
# va = "bottom" # "bottom" if y > 0 else "top"
# plt.annotate(text, xy=[x, text_y],
# fontsize=6.5*fs, ha="center", va=va)
# Add statistical asterisks
text = p2star(p)
text_x = x + 0.00
if y < 0:
text_y = min(min(conf_int), y-np.sqrt(K/sfs[c])/2e2) - textpads[c]
else:
text_y = max(max(conf_int), y-K/sfs[c]/3.4e-1) + textpads[c]
va = "top" if y < 0 else "bottom"
plt.annotate(text, xy=[text_x, text_y], fontsize=8*fs,
ha="center", va=va, fontweight="bold",
rotation=0)
# Format
# Add title
plt.title(titles[c])
# Limits
plt.xlim([-0.5, len(df)-0.5])
plt.ylim(ylims[c])
# Labels
plt.ylabel(ylabeltexts[c])
if c == len(cases)-1:
plt.xlabel("\nCognitive domains")
plt.gca().get_yaxis().set_major_formatter(
mtc.FuncFormatter(lambda x, p: format(f"{x:.1f}")))
# Ticks/lines
plt.axhline(0, linewidth=0.75*lw, color="black", dashes=[4, 4])
plt.xticks(ticks=np.arange(len(df)), labels=df["label"],
rotation=xtickrots[c], va=xtickvas[c])
plt.gca().tick_params(axis="x", pad=xtickpads[c])
plt.gca().xaxis.tick_bottom()
plt.gca().yaxis.tick_left()
for sp in ['bottom', 'top', 'left', 'right']:
plt.gca().spines[sp].set_linewidth(.75*lw)
plt.gca().spines[sp].set_color("black")
plt.gca().xaxis.grid(False)
plt.gca().yaxis.grid(True)
plt.gca().set_axisbelow(True)
# Add scale
plt.scatter(x=len(df)-sdxo[c], y=ylims[c][0] * 0.88, s=sfscf[c]/sfs[c],
color="gray")
# plt.scatter(x=len(df)-sdxo2[c], y=ylims[c][0] * 0.88, s=sfscf2[c]**2/sfs[c],
# color="lightgray")
plt.annotate(text=f"Scale:\n{'N' if c<2 else 'K'}={sfscf[c]}",
xy=[len(df)-sdxo[c], ylims[c][0] * 0.88], va="center", ha="center")
## Caption info, !: not quite correct, cases with scores are more complex than
## just > 0. FOr some it's >=0, and others it's >0
## ---------
#
#stn = list(df.columns).index("f4282")
#id_vars = df.columns[:stn]
#
## Total sample size
#
## Gross total sample size and age
#print(
# df \
# .melt(id_vars=id_vars, var_name="feat", value_name="score") \
# .query(f'feat not in {excl}') \
# .dropna(subset=["score"], axis=0) \
# .query('score > 0') \
# .fillna(value={"age_onset": "NAN"}, axis=0) \
# .pivot_table(
# index=['eid', 'age', 'college', 'diab', 'age_onset'],
# columns="feat") \
# .reset_index() \
# .groupby("diab") \
# ["age"] \
# .describe()
# )
#
## Sample sizes and Age per cognitive task
#print(df \
# .melt(id_vars=id_vars, var_name="feat", value_name="score") \
# .query(f'feat not in {excl}') \
# .dropna(subset=["score"], axis=0) \
# .query('score > 0') \
# .rename({"feat": "index"}, axis=1) \
# .merge(labels, on="index") \
# .groupby(["label", "diab"])["age"] \
# .describe())
#
## Beta coefficients
#print(df[["label", "beta", "tval", "pval"]])
#print(df.groupby("diab")["age"].describe())
# Save
# ------
plt.tight_layout(h_pad=2)
plt.savefig(OUTDIR + "figures/JAMA_meta_figure_cognition.pdf",
transparent=True)
plt.close()
| true |
f279067aca3a51795837f290e4732aa72d088105 | Python | ronistone/toilter-APP | /toilter-FRONT/app/models/decorators.py | UTF-8 | 482 | 2.671875 | 3 | [] | no_license | from flask import g, abort
# python decorators are functions defined above other functions
# it will receive another function and do something with it
# this decorators verify if the user id in the arguments is the same
# as the logged in user
# if it is, it calls the original function, else the user is unauthorized
def is_user(func):
def func_wrapper():
if g.user is not None:
abort(401)
else:
return func()
return func_wrapper
| true |
847f97157a78a34200ff3dfee31f327c123bde16 | Python | mohamadsahebi/python-exercises | /t1for.py | UTF-8 | 203 | 3.421875 | 3 | [] | no_license | number = int(input('Yek adad vared konid : ' ))
while number != 0:
if number % 2 != 0:
print('prime')
else:
print('not prime')
number = int(input('Yek adad vared konid : '))
| true |
fb55ae6b063bed769506d3c575f46b7f8d6ca30a | Python | Neoqck/ATM_gwc | /core/src.py | UTF-8 | 7,498 | 2.828125 | 3 | [] | no_license | from interface import user_interface
from interface import bank_interface
from interface import shoping_interface
from lib import common
from interface import admin_interface
import datetime
user_info = {
'user': None
}
def register():
while True:
print('---注册---')
user_name = input('请输入用户名:').strip()
passwd = input('请输入密码:').strip()
passwd_d = input('确认密码:').strip()
# 接口
flat = user_interface.check_user_interface(user_name)
if flat:
print('用户已存在,重新输入!')
continue
elif passwd == passwd_d:
# 接口
user_interface.register_interface(user_name, passwd)
print('注册成功!')
break
def login():
while True:
print('---登录---')
user = input('输入用户名:').strip()
passwd = input('输入密码:').strip()
flag, msg = user_interface.login_interface(user, passwd)
if flag:
print(msg)
user_info['user'] = user
break
else:
print(msg)
break
@common.outter
def transfer():
while True:
print('---转账---')
to_name = input('输入转账目标用户:').strip()
to_user = user_interface.check_user_interface(to_name)
if to_user:
money = input('请输入转账金额:').strip()
if money.isdigit():
money = int(money)
flaw, msg = bank_interface.transfer_interface(to_name, money, user_info['user'])
if flaw:
print(msg)
break
else:
print(msg)
break
else:
print('输入不正确!!')
continue
else:
print('用户不存在,重新输入!')
continue
@common.outter
def check_balance():
print('---查询余额---')
bank_interface.select_money(user_info['user'])
@common.outter
def repayment():
print('---还款---')
money = input('请输入还款金额:').strip()
if money.isdigit():
money = int(money)
bank_interface.repayment_interface(user_info['user'], money)
else:
print('输入不正确!')
@common.outter
def withdraw_money():
print('---取款---')
money = input('请输入取款金额:').strip()
if money.isdigit():
money = int(money)
if money >= 0:
bank_interface.withdraw_interface(user_info['user'], money)
else:
print('必须大于0')
else:
print('输入不正确!')
@common.outter
def view_pipelining():
print('---查看流水---')
bank_interface.see_worter_interface(user_info['user'])
@common.outter
def shopping():
# 购买过的商品
pay_list = []
num_money = 0
while True:
print('---购物---')
shopping_list = [
['QBZ95自动步枪', 999],
['M4A1', 999],
['手雷', 99],
['防弹衣', 299],
['尼泊尔军刀', 199],
['坦克', 5000000],
['神秘武器VIP', 1000000],
]
# 打印商品列表
for index, i in enumerate(shopping_list):
print(index, i)
print('q.退出 w.结账 e.查看已选商品')
choice = input('请快速配置你的装备:').strip()
if choice == 'q':
break
elif choice == 'w':
yes = input('是否结账?y/n:')
if yes == 'y':
# 调用结账接口
bank_interface.payment(num_money, user_info['user'])
# 调用接口保存购买商品
shoping_interface.save_car(pay_list, user_info['user'])
break
elif yes == 'n':
continue
elif choice == 'e':
print('---已选商品---')
for index, i in enumerate(pay_list):
print(index, i)
continue
# 1.判断是否为数字
if not choice.isdigit():
print('输入不合法!!!你还有两次机会')
continue
# 2.输入的为字符串,转成int数字型
choice = int(choice)
# 3.判断选择是否在范围内
if 0 <= choice <= len(shopping_list):
name, money = shopping_list[choice]
# 4.添加到已选商品
now_time = datetime.datetime.today()
now_time = str(now_time)
# 时间处理操作 2019-11-21 18:45:18.803910 处理为2019-11-21 18:45:18
now_time = now_time[0:19]
# 添加时间
shopping_list[choice].append(now_time)
pay_list.append(shopping_list[choice])
# 计价
num_money += money
print('添加成功')
continue
else:
print('请选择正确的范围!!!')
continue
@common.outter
def shopping_cat():
while True:
print('---查看购买商品---')
shoping_interface.select_car(user_info['user'])
break
def admin():
while True:
print('''
1: 冻结用户
2:解冻用户
q: 退出
''')
dict = {
'1': lock,
'2': unlock
}
choice = input('请输入你的功能:').strip()
if choice == 'q':
break
elif not choice.isdigit():
print('请输入数字!!')
continue
elif choice in dict:
dict[choice]()
else:
print('你的输入有误,重新输入!!!')
continue
def lock():
print('---冻结用户---')
user_name = input('请输入你要冻结的用户名:').strip()
yes = input('确认冻结该用户? y/n:')
if yes == 'y':
res = admin_interface.lock_interface(user_name)
print(res)
elif yes == 'n':
print('已取消冻结!')
else:
print('输入有误,重新输入!')
def unlock():
print('---解冻用户---')
username = input('输入你要解冻的用户名:').strip()
yes = input('确认解冻该用户? y/n:')
if yes == 'y':
res = admin_interface.unlock_interface(username)
print(res)
elif yes == 'n':
print('已取消解冻!')
else:
print('输入有误,重新输入!')
def run():
while True:
print('''
1.注册
2.登录
3.转账
4.查询余额
5.还款
6.取款
7.查看流水
8.购物
9.查看购买商品
10.管理员
q.注销
''')
list_dic = {
'1': register,
'2': login,
'3': transfer,
'4': check_balance,
'5': repayment,
'6': withdraw_money,
'7': view_pipelining,
'8': shopping,
'9': shopping_cat,
'10': admin
}
choice = input('请选择功能编号:').strip()
if choice == 'q':
break
elif choice in list_dic:
list_dic.get(choice)()
else:
print('选择功能有误,请重新输入!')
continue
| true |
b3866a54598fda1b77c428ae3fe51d0a32832b52 | Python | jananiarunachalam/dsba6155project | /dsba6155project/web/dashapps/example/example.py | UTF-8 | 2,910 | 2.515625 | 3 | [] | no_license | from dash import Dash
from dash_core_components import Slider, Dropdown, Graph
from dash_html_components import Div
from dash.dependencies import Input, Output
from json import loads
import plotly.express as px
from dashapps.example.data_loader import Model
def getBubbleData(df, labelFilter, filterValue=50):
#__file__ = "C:\\Users\\Abhijeet\\Documents\\GitHub\\dsba6155project\\dsba6155project\\web\\d3.py"
ndf = df[df["label"] == labelFilter]
ndf = ndf[ndf["count"] > filterValue]
#return ndf[[ "text" , "count" , "category"]]
return ndf
def get_app(server,path):
df = Model().df
ldesc = Model().ldesc
dash_example = Dash(
__name__,
server=server,
url_base_pathname =path
)
label_map = loads(ldesc.to_json(orient="index"))
dash_example.layout = Div(
className="dash-div",
children=[
Dropdown(
id='label-dropdown',
options=[{'label': label_map[i]["label_description"], 'value':i } for i in df['label'].unique()],
value=df['label'].unique()[0]
),
Slider(
id='filter-slider',
min=0,
max=20,
step=1,
value=10
),
Graph(id="bubble-chart")
]
)
@dash_example.callback(
Output('bubble-chart', 'figure'),
[Input('label-dropdown', 'value'),Input('filter-slider', 'value')])
def update_figure(label,value):
df = Model().df
ndf = getBubbleData(df,label,value)
#print(ndf.head())
bar = px.bar(ndf, y="text", x="count", color="category", orientation='h',barmode='group')
bar.update_layout(autosize=False,
width=960,
height=550 ,
paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)',
hovermode = 'closest',
font=dict(
family="Courier New, monospace",
size=18,
color="white"
))
#return fig
return bar
@dash_example.callback(
Output('filter-slider', 'value'),
[Input('label-dropdown', 'value')])
def update_slider_min(label):
df = Model().df
ndf = getBubbleData(df,label)
#print(ndf.head())
return ndf["count"].min()
@dash_example.callback(
Output('filter-slider', 'min'),
[Input('label-dropdown', 'value')])
def update_slider_min(label):
df = Model().df
ndf = getBubbleData(df,label)
#print(ndf.head())
return ndf["count"].min()
@dash_example.callback(
Output('filter-slider', 'max'),
[Input('label-dropdown', 'value')])
def update_slider_max(label):
df = Model().df
ndf = getBubbleData(df,label)
#print(ndf.head())
return ndf["count"].max()
return dash_example
| true |
1ed7ca4305d146ca1b6cbeede695f1f63f178595 | Python | shivaummethala/recipe-app-api | /app/user/tests/test_user_api.py | UTF-8 | 5,885 | 2.765625 | 3 | [
"MIT"
] | permissive | from django.test import TestCase
from django.contrib.auth import get_user_model
from django.urls import reverse
from rest_framework.test import APIClient # test client to request to our API
from rest_framework import status # response status
CREATE_USER_URL = reverse('user:create') # create users
TOKEN_URL = reverse('user:token') # create authentication token
ME_URL = reverse('user:me') # manage the users(update the users)
def create_user(**params):
"""Helper function to create dummy users"""
return get_user_model().objects.create_user(**params)
# separate class defined for public and private user authentication
class PublicUserApiTests(TestCase):
"""Test the users API (public)"""
def setUp(self):
self.client = APIClient()
def test_create_valid_user_success(self):
"""Test creating user with valid payload is successful"""
payload = {
'email': 'shivaummethala@gmail.com',
'password': 'testpass',
'name': 'Shiva'
}
res = self.client.post(CREATE_USER_URL, payload)
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
# get the user details
user = get_user_model().objects.get(**res.data)
# check if password matched
self.assertTrue(user.check_password(payload['password']))
# check password not in data returned
self.assertNotIn('password', res.data)
def test_user_exists(self):
"""Test creating a user that already exists fails"""
payload = {'email': 'shivaummethala@gmail.com', 'password': 'testpass', 'name': 'Shiva'}
create_user(**payload) # passes as email = 'shivaummethala@gmail.com', password' = 'testpass'
res = self.client.post(CREATE_USER_URL, payload)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def test_password_too_short(self):
"""Test that the password must be more than 5 characters"""
payload = {
'email': 'shivaummethala@gmail.com',
'password': 'pw',
'name': 'Test',
}
res = self.client.post(CREATE_USER_URL, payload)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
user_exists = get_user_model().objects.filter(
email=payload['email']
).exists()
self.assertFalse(user_exists)
def create_token_for_user(self):
"""Test that a token is created for existing user"""
payload = {'email': 'shivaummethala@gmail.com', 'password': 'testpass'}
# create a user
create_user(**payload)
res = self.client.post(TOKEN_URL, payload)
self.assertIn('token', res.data)
self.assertEqual(res.status_code, status.HTTP_200_OK)
def test_create_token_invalid_credentials(self):
"""Test that token is not created if invalid credentials are given"""
# create a user
create_user(email='shivaummethala@gmail.com', password='test123')
payload = {'email': 'shivaummethala@gmail.com', 'password': 'wrong'}
# hit a token url with a payload
res = self.client.post(TOKEN_URL, payload)
self.assertNotIn('token', res.data)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_token_no_user(self):
"""Test that the token is not created if user doesn't exist"""
payload = {'email': 'shivaummethala@gmail.com', 'password': 'testpass'}
res = self.client.post(TOKEN_URL, payload)
self.assertNotIn('token', res.data)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_token_missing_field(self):
"""Test that email and password are required"""
res = self.client.post(TOKEN_URL, {'email': 'one', 'password': ''})
self.assertNotIn('token', res.data)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def test_retrieve_user_unauthorized(self):
"""Test that authentication is required for users"""
res = self.client.get(ME_URL) # do get request to URL
self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
# Update the users using put and patch, post is creating the users
# Private class is defined below and it requires authentication
class PrivateUserApiTests(TestCase):
"""Test API requests that require authentication"""
def setUp(self):
"""setUp method defined for all tests and this setup method
will run before every test"""
self.user = create_user(
email='shivaummethala@gmail.com',
password='testpass',
name='name'
)
self.client = APIClient()
self.client.force_authenticate(user=self.user)
def test_retrieve_profile_success(self):
"""Test retrieving profile for logged in user"""
# self.setUp() is called by default
res = self.client.get(ME_URL)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data, {'name': self.user.name, 'email': self.user.email})
def test_post_me_not_allowed(self):
"""Test that POST is not allowed on the me url"""
res = self.client.post(ME_URL, {})
self.assertEqual(res.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_update_me_not_allowed(self):
"""Test updating the user profile for authenticated users"""
# update user name and password
payload = {'name': 'new name', 'password': 'newpassword123'}
res = self.client.patch(ME_URL, payload)
# use refresh from DB helper function to get the updated user values
self.assertEqual(self.user.name, payload['name'])
self.assertTrue(self.user.check_password(payload['password']))
self.assertEqual(res.status_code, status.HTTP_200_OK)
"""Note: Users created as part of one test are not going to be accessed in another test case
because every single test it runs, database is refreshed"""
| true |
688927be5ae032195cb5b28aee39399721a15cc6 | Python | concpetosfundamentalesprogramacionaa19/ejercicios-clases5-020519-davisalex22 | /miproyecto/run2.py | UTF-8 | 513 | 3.71875 | 4 | [] | no_license | """
file: misvariables.py
autor: @David Salazar
"""
from misvariables import *
# Declaracion de variables e ingreso de valores
nota = input("Ingrese nota 1: ")
nota2 = input("Ingrese nota 2: ")
nota = int(nota)
nota2 = int(nota2)
# CONDICIONALES
if nota >= 18:
print("%s, su valor de nota es %d" % (mensaje,nota))
else:
print("%s, su valor de nota es %d" % (mensaje2,nota))
if nota2 >= 18:
print("%s, su valor de nota es %d" % (mensaje,nota2))
else:
print("%s, su valor de nota es %d" % (mensaje2,nota2))
| true |
55dea0990ce17b986d0cd94abc11ce4af5c8e688 | Python | chang-1/BrownUniversity | /ENGN2020/HW0/hw0_1a.py | UTF-8 | 309 | 2.609375 | 3 | [] | no_license | # -*- coding: utf-8 -*-
"""
Created on Wed Mar 20 11:47:07 2019
@author: xumin
"""
from submission.client import submit, check_score
def hello(word):
"""A function that takes in a word and says hello to it."""
phrase = 'Hello,' + word + '!'
return phrase
submit(hello, 'hw0_1a')
check_score()
| true |
5f49d28025f1f03927b8103558fdbb91f686ef96 | Python | Dan777678/consoleApp | /main.py | UTF-8 | 996 | 3.78125 | 4 | [] | no_license | # stała przyspieszenia ziemskiego
g = 9.780318
# H - wysokosc w metrach
# m - masa w kg
def energia_kinetyczna(H: float, m: float):
# Ek = mv^2
# v = sqrt(2*gH)
v = 2 * g * H
# E = m * v / 2
E = m * v
E = E / 2
return E
# Wyświetlanie wyników.
def wyswietl_wynik(E: float, m: float, H: float):
print("Wynik")
print("Masa: ", m, "kg")
print("Wysokość: ", H, "m")
print("Energia kinetyczna: ", E, "J")
# Funkcja kotrolująca pobranie danych, przypisanie danych do obliczeń i wyświetlanie wyników.
def main():
try:
m: float
H: float
print("Dane")
m = float(input("Podaj mase: "))
H = float(input("Podaj wyoskość: "))
E = energia_kinetyczna(H, m)
wyswietl_wynik(E, m, H)
return True
except ValueError:
print("Wystąpił bład w danych wejściowych")
return False
if __name__ == "__main__":
t = main()
while t == False:
t = main()
| true |
6a86e3e66f6ab2e1cb5f23554d6e4c75a5f09144 | Python | xufeix/python_test | /set_inter.py | UTF-8 | 466 | 3.359375 | 3 | [] | no_license | # /usr/bin/env python3.8
# coding:utf-8
"""
@file: set_inter.py
@time: 2021/1/20 2:21 下午
@author:XF
"""
"""
set的交集
"""
a = ['dewei', 'xiaomu', 'xiaohua', 'xiaoguo']
b = ['xiaohua', 'dewei', 'xiaoman', 'xiaolin']
c = ['xiaoguang', 'xiaobai', 'dewei', 'xiaoyuan']
a_set = set(a)
b_set = set(b)
c_set = set(c)
print(a, b, c)
result = a_set.intersection(b_set, c_set)
print(result)
xiaotou = list(result)
print('{}谁是这个小偷'.format(xiaotou[0])) | true |
102e7e0f6b36a98b2c00d8c451a559cacbb78590 | Python | Sutharso/E-bot | /e-bot.py | UTF-8 | 47,226 | 2.703125 | 3 | [] | no_license | import discord
import math as meth
import os
import dotenv
import requests
import json
import random
import datetime
from discord.ext import commands
import randfacts
dotenv.load_dotenv()
client = commands.Bot(command_prefix='$')
client.remove_command('help')
def get_quote():
response = requests.get("https://zenquotes.io/api/random")
json_data = json.loads(response.text)
quote = json_data[0]['q'] + " -" + json_data[0]['a']
return (quote)
hello_words=[
'hola',
'hello random discord user',
'hello there e-bot here',
'hello i am a bot :robot:',
'hi',
':person_raising_hand:'
]
wordd=[
'abberation : a departure from what is normal, usual, or expected, typically an unwelcome one\nExample-they described the outbreak of violence in the area as an aberration',
'abnegation : the action of renouncing or rejecting something \nExample-abnegation of political power',
'apathetic : showing or feeling no interest, enthusiasm, or concern.\nExample-an apathetic electorate',
'arbitrary :based on random choice or personal whim, rather than any reason or system\nExample-an arbitrary decision'
'annex : add (territory) to ones own territory by appropriation\nExample-the left bank of the Rhine was annexed by France in 1797',
'belittle : dismiss (someone or something) as unimportant\nExample-she belittled his riding skills whenever she could',
'beguile : charm or enchant (someone), often in a deceptive way.\nExample-he beguiled the voters with his good looks',
'cogent : (of an argument or case) clear, logical, and convincing\nExample-they put forward cogent arguments for British membership',
'comply : act in accordance with a wish or command\nExample-we are unable to comply with your request',
'consign : deliver (something) to a persons keeping.\nExample-he consigned three paintings to Sotheby',
'construed : interpret (a word or action) in a particular way\nExample-his words could hardly be construed as an apology',
'contusion : a region of injured tissue or skin in which blood capillaries have been ruptured; a bruise\nExample-a dark contusion on his cheek was beginning to swell',
'defunct : no longer existing or functioning \nExample - the now defunct Somerset & Dorset railway line',
'dilatory : slow to act\nExample-he had been dilatory in preparing for his exams',
'dirge : a mournful song, piece of music, or sound.\nExample-singers chanted dirges',
'embezzlement : theft or misappropriation of funds placed in ones trust or belonging to ones employer\nExample-charges of fraud and embezzlement',
'emulate : match or surpass (a person or achievement)\nExample-most rulers wished to emulate Alexander the Great',
'enormity : the scale or extent of something percieved as bad or morally wrong\nExample-a thorough search disclosed the full enormity of the crime',
'equanimity : calmness and composure, especially in a difficult situation\nExample-she accepted both the good and the bad with equanimity',
'exhort : strongly encourage or urge (someone) to do something\nExample-I exhorted her to be a good child',
'exigent : pressing; demanding\nExample-the exigent demands of her contemporaries music took a toll on her voice',
'gullible : easily persuaded to believe something; credulous.\nExample-an attempt to persuade a gullible public to spend their money',
'hoi polloi : the masses; the common people\nExample-the politician decreased the taxes to appease the hoi polloi',
'ignominious : deserving or causing public disgrace or shame\nExample-no other party risked ignominious defeat',
'impetuous : acting or done quickly and without thought or care \nExample-she might live to regret this impetuous decision',
'inane : lacking sense or meaning; silly\nExample-dont badger people with inane questions',
'inchoate : just begun so not yet fully developed\nExample-a still inchoate democracy',
'indefatigable : persisting continuosly and tirelessly\nExample-an indefatigable defender of human rights',
'infringe : actively break the terms of (a law, agreement, etc.) \nExample-making an unauthorized copy would infringe copyright',
'martinate : a person who demands complete obedience; a strict disciplinarian\nExample-a martinant dictator ruled the kingdom',
'mores : the essential or characteristic customs and conventions of a society or community\nExample-an offence against social mores',
'munificent : characterized by or displaying great generosity\nExample-a munificent bequest',
'nonplussed : so surprised and confused that one is unsure how to react.\nExample-Henry looked completely nonplussed',
'noxious : harmful, poisonous, or very unpleasant\nExample-they were overcome by the noxious fumes',
'paradigm : a typical example or pattern of something,\nExampleThe object-oriented paradigm is a new and different way of thinking about programming',
'phlogmatic : having an unemotional and stolidly calm disposition\nExample-the phlegmatic British character',
'portent : a sign or warning that a momentous or calamitous event is likely to happen\nExample-many birds are regarded as being portents of death',
'potenate : a monarch or ruler, especially an autocratic one\nExample-Valdemar was now, after the king of England, the most powerful potentate in the north of Europe',
'relegate : assign an inferior rank or position to\nExample-they aim to prevent her from being relegated to a secondary role',
'remiss : lacking care or attention to duty; negligent\nExample-it would be very remiss of me not to pass on that information',
'saguine : optimistic or positive, especially in an apparently bad or difficult situation\nExample-he is sanguine about prospects for the global economy',
'staid : sedate, respectable, and unadventurous\nExample-staid law firms'
'unabashed : not embarrassed, disconcerted, or ashamed\nExample-he was unabashed despite failing in his test',
'uncanny : strange or mysterious, especially in an unsettling way\nExample-an uncanny feeling that she was being watched',
'veracity : conformity to facts; accuracy\nExamples-officials expressed doubts concerning the veracity of the story'
]
microbes=[
"Rhinovirus\nType:virus\nDisease:Common cold",
"AIDS(Acquired Immunodeficiency Disease)\nType:virus\nDisease:HIV(Human Immunodeficiency Disease)",
"Salmonella Typhi\nType:bacteria\nDisease:Typhoid",
"Vibrio Cholerae\nType:Bacteria\nDisease:Cholera",
"Bacillus Anthracis\nType:Bacteria\nDisease:Anthrax",
"Varicella Zoster virus\nType:virus\nDisease:Chickenpox",
"Yersinia Pestis\nType:bacteria\nDisease-Black Plague",
"Variola virus\nType:virus\nDisease:Smallpox",
"Epstein-Barr virus\nType:virus\nDisease:Mononucleosis",
"Typhoid\nType:bacteria\nDisease:Salmonella Typhi",
"Mycobacterium Tuberculsosis\nType:bacteria\nDisease:Tuberculosis"
]
lost_game=[
'noooooooooooo i lost',
'damm it u won',
'i lost :('
]
won_game=[
'i won!!!! :)',
'yay i wonnnnnnnn',
'i guess i am better than u in this game'
]
tie_game=[
'its a tie bruh :/',
'tie,atleast i didnt lose.',
'ufffff close,its a tie'
]
sourcee=[
'source - mind your decisions (youtube)',
'source - michael penn (youtube)',
'source - michael penn (youtube)',
'source - general question',
'source - some math textbook',
'source - michael penn (youtube)',
'source - some math textboo'
]
math_probs=[
'math_probs/koink.png',
'math_probs/koinktwo.png',
'math_probs/koinkthree.png',
'math_probs/koinkfour.png',
'math_probs/koinkfive.png',
'math_probs/koinksix.png',
'math_probs/koinkseven.png'
]
math_answers=[
'answer- ||90 ||',
'answer - ||n=2,3||',
'answer - ||12-4e||',
'answer - ||pi ||',
'answer - ||root pi||',
'answer - ||ln(e^x/(e^x+1))||',
'answer - ||x= 1 or -1||'
]
client = commands.Bot(command_prefix='$')
client.remove_command('help')
@client.event
async def on_ready():
await client.change_presence(activity=discord.Game(name=f"on {len(client.guilds)} servers |$help"))
print("Logged in")
@client.command()
async def help(ctx):
await ctx.send("$hello \n$inspire(gives a random quote) \n$fact(gives a random fact)\n$word(gives a random english word)\n$microbe(gives basic information about a random microbe :microbe:) \n$event <date> <month>(gives the international events on given date)\nthe date and month should be i integer form for example-\n``$event 14 6`` gives the events on 23rd may. \n$game(shows the game commands) \n$math(shows the math commands)\n$extras")
@client.command()
async def hello(ctx):
await ctx.send(random.choice(hello_words))
@client.command()
async def inspire(ctx):
quote = get_quote()
await ctx.send(quote)
@client.command()
async def word(ctx):
await ctx.send(random.choice(wordd))
@client.command()
async def microbe(ctx):
await ctx.send(random.choice(microbes))
@client.command()
async def fact(ctx):
facts = randfacts.get_fact()
await ctx.send(facts)
@client.command()
async def game(ctx):
await ctx.send('$rps <rock or paper or scissor> (plays a game of rockpaperscissor)')
@client.command()
async def rps(ctx,inputt):
computer = random.choice(['rock', 'scissor', 'paper'])
if inputt == computer:
await ctx.send(f"my choice was- {computer}")
await ctx.send(random.choice(tie_game))
elif (inputt == 'rock' and computer == 'scissor') or (inputt == 'scissor' and computer == 'paper') or (
inputt == 'paper' and computer == 'rock'):
await ctx.send(f"my choice was- {computer}")
await ctx.send(random.choice(lost_game))
elif inputt != 'rock' and inputt != 'scissor' and inputt != 'paper':
await ctx.send(f"my choice was- {computer}")
await ctx.send('give valid inputs duhh')
elif (inputt == 'scissor' and computer == 'rock') or (inputt == 'rock' and computer == 'paper') or (
inputt == 'paper' and computer == 'scissor'):
await ctx.send(f"my choice was- {computer}")
await ctx.send(random.choice(won_game))
else:
await ctx.send("an error occured")
@client.command()
async def math(ctx):
await ctx.send("$add/subtract/multiply/divide/exponent <number 1> <number 2>\n$randnum <number1> <number2> (for example- ``$randnum 5 10`` gives a random number between 5 and 10)(both numbers should be integers)\ntrigonometry comands - $cos/sin/tan/cot/cosec/sec <number>(works in radians)\n$mathprob(gives a random math problem)")
@client.command()
async def mathprob(ctx):
x=len(math_probs)
y = random.randint(0,x-1)
await ctx.send(sourcee[y])
await ctx.send(file=discord.File(math_probs[y]))
await ctx.send(math_answers[y])
@client.command()
async def add(ctx, x: float, y: float):
await ctx.send(x+y)
@client.command()
async def subtract(ctx, x: float, y: float):
await ctx.send(x-y)
@client.command()
async def multiply(ctx, x: float, y: float):
await ctx.send(x*y)
@client.command()
async def divide(ctx, x: float, y: float):
while True:
try:
await ctx.send(x/y)
break
except ZeroDivisionError:
await ctx.send("not defined")
break
@client.command()
async def exponent(ctx, x: float, y: float):
while True:
try:
await ctx.send(x**y)
break
except OverflowError:
await ctx.send("exponent too high")
break
@client.command()
async def cos(ctx,x:float):
await ctx.send(meth.cos(x))
@client.command()
async def sin(ctx,x:float):
await ctx.send(meth.sin(x))
@client.command()
async def tan(ctx,x:float):
await ctx.send(meth.tan(x))
@client.command()
async def sec(ctx,x:float):
await ctx.send(1/meth.cos(x))
@client.command()
async def cosec(ctx,x:float):
await ctx.send(1/meth.sin(x))
@client.command()
async def cot(ctx,x:float):
await ctx.send(1/meth.tan(x))
@client.command()
async def randnum(ctx, x: float, y: float):
await ctx.send(random.randint(x,y))
@client.command()
async def extras(ctx):
await ctx.send("$source - shows some of the sources for the commands\n$update-shows the recent updates for the bot")
@client.command()
async def source(ctx):
await ctx.send("$word-English Oxford Dictionary\n$fact-Randfacts package")
@client.command()
async def update(ctx):
await ctx.send(f"added one more english word to the $word list(current number of english words {len(wordd)})\nadded more microbes for microbe command\nfixed math help command\nfinally added mathprob command :partying_face:\n-developer of e-bot")
@client.command()
async def event(ctx,date:int,month:int):
# january
if month == 1:
if date == 4:
await ctx.send(f"the international days on {date}/{month} are:-\n World Braille Day")
elif date == 14:
await ctx.send(f"the international days on {date}/{month} are:-\n World Logic Day")
elif date == 17:
await ctx.send(f"the international days on {date}/{month} are:-\n World Religion Day")
elif date == 24:
await ctx.send(
f"the international days on {date}/{month} are:-\n International Day of Education \n World Day for African and Afrodescendant Culture")
elif date == 27:
await ctx.send(f"the international days on {date}/{month} are:-\n International Holocaust Day")
elif date == 30:
await ctx.send(f"the international days on {date}/{month} are:-\n World Leprosy Eradication Day")
elif date > 31 or date < 0:
await ctx.send("no such dates exist in the given month")
else:
await ctx.send(f"no inportant international days on {date}/{month}")
# february
elif month == 2:
if date == 1:
await ctx.send(f"the international days on {date}/{month} are:-\n World Interfaith Harmony Week")
elif date == 2:
await ctx.send(f"the international days on {date}/{month} are:-\n World Wetlands Day")
elif date == '04':
await ctx.send(
f"the international days on {date}/{month} are:-\n International Day of Human Fraternity \n World Cancer Day")
elif date == 6:
await ctx.send(
f"the international days on {date}/{month} are:-\n International Day of Zero Tolerance to Female Genital Mutilation")
elif date == 10:
await ctx.send(f"the international days on {date}/{month} are:-\n World Pulses Day")
elif date == 11:
await ctx.send(f"the international days on {date}/{month} are:-\n International Day of Women and Girls in Science")
elif date == 13:
await ctx.send(f"the international days on {date}/{month} are:-\n World Radio Day")
elif date == 20:
await ctx.send(f"the international days on {date}/{month} are:-\n World Day of Social Justice")
elif date == 21:
await ctx.send(f"the international days on {date}/{month} are:-\n International Mother Language Day")
elif date == 23:
await ctx.send(f"the international days on {date}/{month} are:-\n World Peace and Understanding Day")
elif date > 29 or date < 0 or date != int:
await ctx.send("no such dates exist in the given month")
else:
await ctx.send(f"no inportant international days on {date}/{month}")
# march
elif month == 3:
if date == 1:
await ctx.send(f"the international days on {date}/{month} are:-\n Zero Discrimination Day")
elif date == 3:
await ctx.send(f"the international days on {date}/{month} are:-\n World Wildlife Day")
elif date == 4:
await ctx.send(f"the international days on {date}/{month} are:-\n World Engineering Day for Sustainable Development")
elif date == 8:
await ctx.send(f"the international days on {date}/{month} are:-\n International Women’s Day")
elif date == 10:
await ctx.send(f"the international days on {date}/{month} are:-\n International Day of Women Judges")
elif date == 14:
await ctx.send(
f"the international days on {date}/{month} are:-\n International Day of Mathematics \n International Day of Action for Rivers")
elif date == 15:
await ctx.send(f"the international days on {date}/{month} are:-\n World Consumer Rights Day")
elif date == 20:
await ctx.send(
f"the international days on {date}/{month} are:-\n World Sparrow Day \n French Language Day \n International Francophonie Day \n International Day of Happiness")
elif date == 21:
await ctx.send(
f"the international days on {date}/{month} are:-\n International Day of Forests \n World Down Syndrome Day \n International Day of Nowruz \n World Poetry Day \n International Day for the Elimination of Racial Discrimination")
elif date == 22:
await ctx.send(f"the international days on {date}/{month} are:-\n World Water Day")
elif date == 23:
await ctx.send(f"the international days on {date}/{month} are:-\n World Meteorological Day")
elif date == 24:
await ctx.send(f"the international days on {date}/{month} are:-\n Right to Truth Day \n World Tuberculosis Day")
elif date == 25:
await ctx.send(
f"the international days on {date}/{month} are:-\n International Day of Remembrance of the Victims of Slavery and the Transatlantic Slave Trade \n International Day of Solidarity with Detained and Missing Staff Members")
elif date > 31 or date < 0:
await ctx.send("no such dates exist in the given month")
else:
await ctx.send(f"no inportant international days on {date}/{month}")
# april
elif month == 4:
if date == 2:
await ctx.send(f"the international days on {date}/{month} are:-\n World Autism Awareness Day")
elif date == 4:
await ctx.send(
f"the international days on {date}/{month} are:-\n International Day for Mine Awareness and Assistance in Mine Action")
elif date == 5:
await ctx.send(f"the international days on {date}/{month} are:-\n International Day of Conscience")
elif date == 6:
await ctx.send(f"the international days on {date}/{month} are:-\n International Day of Sport for Development and Peace")
elif date == 7:
await ctx.send(f"the international days on {date}/{month} are:-\n World Health Day ")
elif date == 12:
await ctx.send(f"the international days on {date}/{month} are:-\n International Day of Human Space Flight")
elif date == 14:
await ctx.send(f"the international days on {date}/{month} are:-\n World Chagas Disease Day")
elif date == 15:
await ctx.send(f"the international days on {date}/{month} are:- \n World Art Day")
elif date == 17:
await ctx.send(f"the international days on {date}/{month} are:- \n World Hemophilia Day")
elif date == 18:
await ctx.send(f"the international days on {date}/{month} are:- \n World Heritage Day")
elif date == 20:
await ctx.send(f"the international days on {date}/{month} are:- \n Chinese Language Day")
elif date == 21:
await ctx.send(f"the international days on {date}/{month} are:- \n World Creativity and Innovation Day")
elif date == 22:
await ctx.send(f"the international days on {date}/{month} are:- \n International Mother Earth Day")
elif date == 23:
await ctx.send(
f"the international days on {date}/{month} are:- \n World Book and Copyright Day \n English Language Day \n Spanish Language Day \n International Girls in ICT Day")
elif date == 24:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day of Multilateralism and Diplomacy for Peace")
elif date == 25:
await ctx.send(f"the international days on {date}/{month} are:- \n International Delegate’s Day \n World Malaria Day")
elif date == 26:
await ctx.send(
f"the international days on {date}/{month} are:- \n World Intellectual Property Day \n International Chernobyl Disaster Remembrance Day")
elif date == 28:
await ctx.send(f"the international days on {date}/{month} are:- \n World Day for Safety and Health at Work")
elif date == 30:
await ctx.send(f"the international days on {date}/{month} are:- \n International Jazz Day")
elif date > 30 or date < 0:
await ctx.send("no such dates exist in the given month")
else:
await ctx.send(f"no inportant international days on {date}/{month}")
# may
elif month == 5:
if date == 1:
await ctx.send(f"the international days on {date}/{month} are:- \n Labour Day")
elif date == 2:
await ctx.send(f"the international days on {date}/{month} are:- \n World Tuna Day")
elif date == 3:
await ctx.send(f"the international days on {date}/{month} are:- \n World Press Freedom Day \n World Asthma Day ")
elif date == 5:
await ctx.send(
f"the international days on {date}/{month} are:- \n African World Heritage Day \n World Portuguese Language Day")
elif date == 7:
await ctx.send(f"the international days on {date}/{month} are:- \n “Vesak”, the Day of the Full Moon")
elif date == 8:
await ctx.send(
f"the international days on {date}/{month} are:- \n Time of Remembrance and Reconciliation for Those Who Lost Their Lives During the Second World War \n World Migratory Bird Day \n World Red Cross Day")
elif date == 10:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of Argania")
elif date == 15:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Astronomy Day \n International Day of Families")
elif date == 16:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day of Light \n International Day of Living Together in Peace ")
elif date == 18:
await ctx.send(f"the international days on {date}/{month} are:- \n International Museum Day")
elif date == 20:
await ctx.send(f"the international days on {date}/{month} are:- \n World Metrology Day \n World Bee Day")
elif date == 21:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Tea Day \n World Day for Cultural Diversity for Dialogue and Development")
elif date == 22:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day for Biological Diversity")
elif date == 23:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day to End Obstetric Fistula")
elif date == 24:
await ctx.send(f"the international days on {date}/{month} are:- \n Commonwealth Day")
elif date == 28:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of Action for Women’s Health")
elif date == 29:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day of UN Peacekeepers \n International Mount Everest Day")
elif date == 31:
await ctx.send(f"the international days on {date}/{month} are:- \n World no tobacco Day")
elif date > 31 or date < 0:
await ctx.send("no such dates exist in the given month")
else:
await ctx.send(f"no inportant international days on {date}/{month}")
# june
elif month == 6:
if date == 1:
await ctx.send(f"the international days on {date}/{month} are:- \n Global Day of Parents")
elif date == 3:
await ctx.send(f"the international days on {date}/{month} are:- \n World Bicycle Day")
elif date == 4:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day of Innocent Children Victims of Aggression")
elif date == 5:
await ctx.send(
f"the international days on {date}/{month} are:- \n World Environment Day \n International Day for the Fight against Illegal, Unreported and Unregulated Fishing")
elif date == 6:
await ctx.send(f"the international days on {date}/{month} are:- \n Russian Language Day")
elif date == 7:
await ctx.send(f"the international days on {date}/{month} are:- \n World Food Safety Day")
elif date == 8:
await ctx.send(f"the international days on {date}/{month} are:- \n World Oceans Day")
elif date == 12:
await ctx.send(f"the international days on {date}/{month} are:- \n World Day Against Child Labou")
elif date == 13:
await ctx.send(f"the international days on {date}/{month} are:- \n International Albinism Awareness Day")
elif date == 14:
await ctx.send(f"the international days on {date}/{month} are:- \n World Blood Donor Day")
elif date == 15:
await ctx.send(f"the international days on {date}/{month} are:- \n World Elder Abuse Awareness Day")
elif date == 16:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of Family Remittances")
elif date == 17:
await ctx.send(f"the international days on {date}/{month} are:- \n World Day to Combat Desertification and Drought")
elif date == 18:
await ctx.send(f"the international days on {date}/{month} are:- \n Sustainable Gastronomy Day")
elif date == 19:
await ctx.send(f"the international days on {date}/{month} are:- \n World Sickle Cell Day \n ")
elif date == 20:
await ctx.send(f"the international days on {date}/{month} are:- \n World Refugee Day")
elif date == 21:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day of Yoga \n International Day of the Celebration of the Solstice \n World Music Day")
elif date == 23:
await ctx.send(
f"the international days on {date}/{month} are:- \n United Nations Public Service Day \n International Widows’ Day \n International Olympic Day")
elif date == 25:
await ctx.send(f"the international days on {date}/{month} are:- \n Day of the Seafarer")
elif date == 26:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day against Drug Abuse and Illicit Trafficking \n United Nations International Day in Support of Victims of Torture ")
elif date == 27:
await ctx.send(f"the international days on {date}/{month} are:- \n Micro-, Small and Medium-sized Enterprises Day")
elif date == 29:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of the Tropics")
elif date == 30:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day of Parliamentarism \n International Asteroid Day")
elif date > 30 or date < 0:
await ctx.send("no such dates exist in the given month")
else:
await ctx.send(f"no inportant international days on {date}/{month}")
# july
elif month == 7:
if date == 2:
await ctx.send(f"the international days on {date}/{month} are:- \n World Sports Journalists Day \n World UFO Day")
elif date == 3:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day of Cooperatives \n International Plastic Bag Free Day \n International Co-operative Day")
elif date == 5:
await ctx.send(f"the international days on {date}/{month} are:- \n Bikini Day")
elif date == 6:
await ctx.send(f"the international days on {date}/{month} are:- \n World Zoonoses Day \n International Kissing Day")
elif date == 7:
await ctx.send(f"the international days on {date}/{month} are:- \n Global Forgiveness Day \n World Chocolate Day")
elif date == 11:
await ctx.send(f"the international days on {date}/{month} are:- \n World Population Day")
elif date == 12:
await ctx.send(f"the international days on {date}/{month} are:- \n Paper Bag Day")
elif date == 15:
await ctx.send(f"the international days on {date}/{month} are:- \n World Youth Skills Day")
elif date == 17:
await ctx.send(f"the international days on {date}/{month} are:- \n World Emoji Day \n World Day for International Justice")
elif date == 18:
await ctx.send(f"the international days on {date}/{month} are:- \n Nelson Mandela International Day")
elif date == 20:
await ctx.send(f"the international days on {date}/{month} are:- \n World Chess Day \n apollo 11 moon lading anniversary ")
elif date == 22:
await ctx.send(f"the international days on {date}/{month} are:- \n Pi Approximation Day")
elif date == 25:
await ctx.send(f"the international days on {date}/{month} are:- \n World Drowning Prevention Day")
elif date == 26:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day for the Conservation of the Mangrove Ecosystem")
elif date == 28:
await ctx.send(f"the international days on {date}/{month} are:- \n World Nature Conservation Day \n World Hepatitis Day")
elif date == 29:
await ctx.send(f"the international days on {date}/{month} are:- \n International Tiger Day")
elif date == 30:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day of Friendship \n World Day against Trafficking in Persons")
elif date == 31:
await ctx.send(f"the international days on {date}/{month} are:- \n World Ranger Day")
elif date > 31 or date < 0:
await ctx.send("no such dates exist in the given month")
else:
await ctx.send(f"no inportant international days on {date}/{month}")
# august
elif month == 8:
if date == 1:
await ctx.send(f"the international days on {date}/{month} are:- \n World Breastfeeding Week \n International Mahjong Day")
elif date == 2:
await ctx.send(f"the international days on {date}/{month} are:- \n World Breastfeeding Week")
elif date == 3:
await ctx.send(f"the international days on {date}/{month} are:- \n World Breastfeeding Week")
elif date == 4:
await ctx.send(f"the international days on {date}/{month} are:- \n World Breastfeeding Week")
elif date == 5:
await ctx.send(f"the international days on {date}/{month} are:- \n World Breastfeeding Week")
elif date == 6:
await ctx.send(f"the international days on {date}/{month} are:- \n Hiroshima Day \n World Breastfeeding Week")
elif date == 7:
await ctx.send(f"the international days on {date}/{month} are:- \n World Breastfeeding Week")
elif date == 8:
await ctx.send(f"the international days on {date}/{month} are:- \n International Infinity Day")
elif date == 9:
await ctx.send(
f"the international days on {date}/{month} are:- \n Nagasaki Day \n International Day of the World’s Indigenous Peoples \n International Coworking Day")
elif date == 10:
await ctx.send(f"the international days on {date}/{month} are:- \n World Lion Day")
elif date == 12:
await ctx.send(f"the international days on {date}/{month} are:- \n International Youth Day \n World Elephant Day")
elif date == 13:
await ctx.send(f"the international days on {date}/{month} are:- \n International Lefthanders Day")
elif date == 14:
await ctx.send(f"the international days on {date}/{month} are:- \n World Lizard Day")
elif date == 18:
await ctx.send(f"the international days on {date}/{month} are:- \n Never Give Up Day")
elif date == 19:
await ctx.send(
f"the international days on {date}/{month} are:- \n World Humanitarian Day \n World Photography Day \n International Orangutan Day \n International Bow Day")
elif date == 20:
await ctx.send(f"the international days on {date}/{month} are:- \n World Mosquito Day")
elif date == 21:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day of Remembrance and Tribute to the Victims of Terrorism \n World Senior Citizen Day")
elif date == 22:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day Commemorating the Victims of Acts of Violence Based on Religion or Belief")
elif date == 23:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day for the Remembrance of the Slave Trade and Its Abolition \n Black Ribbon Day ")
elif date == 26:
await ctx.send(f"the international days on {date}/{month} are:- \n Women’s Equality Day \n International Dog Day")
elif date == 29:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day against Nuclear Tests")
elif date == 30:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Whale Shark Day \n International Day of the Victims of Enforced Disappearances \n ")
elif date == 31:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Overdose Awareness Day \n International Day for People of African Descent")
elif date > 31 or date < 0:
await ctx.send("no such dates exist in the given month")
else:
await ctx.send(f"no inportant international days on {date}/{month}")
# september
elif month == 9:
if date == 2:
await ctx.send(f"the international days on {date}/{month} are:- \n World Coconut Day")
elif date == 5:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of Charity")
elif date == 7:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of Clean Air for Blue Skies")
elif date == 8:
await ctx.send(f"the international days on {date}/{month} are:- \n International Literacy Day")
elif date == 9:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day to Protect Education from Attack")
elif date == 12:
await ctx.send(f"the international days on {date}/{month} are:- \n United Nations Day for South-South Cooperation")
elif date == 15:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of Democracy")
elif date == 16:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day for the Preservation of the Ozone Layer")
elif date == 17:
await ctx.send(f"the international days on {date}/{month} are:- \n World Patient Safety Day")
elif date == 18:
await ctx.send(f"the international days on {date}/{month} are:- \n International Equal Pay Day")
elif date == 19:
await ctx.send(f"the international days on {date}/{month} are:- \n International Talk Like a Pirate Day")
elif date == 20:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of University Spor")
elif date == 21:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day of Peace \n World Alzheimer’s Day \n Biosphere Day")
elif date == 22:
await ctx.send(f"the international days on {date}/{month} are:- \n World Rhino Day")
elif date == 23:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of Sign Languages")
elif date == 24:
await ctx.send(f"the international days on {date}/{month} are:- \n World Maritime Day")
elif date == '26':
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day for the Total Elimination of Nuclear Weapons")
elif date == 27:
await ctx.send(f"the international days on {date}/{month} are:- \n World Tourism Day")
elif date == 28:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of Awareness of Food Loss and Waste")
elif date == 29:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of Awareness of Food Loss and Waste")
elif date == 30:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Translation Day \n International Blasphemy Rights Day")
elif date > 30 or date < 0:
await ctx.send("no such dates exist in the given month")
else:
await ctx.send(f"no inportant international days on {date}/{month}")
# ocotber
elif month == 10:
if date == 1:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of Older Persons")
elif date == 2:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of Non-Violence")
elif date == 4:
await ctx.send(
f"the international days on {date}/{month} are:- \n World Animal Welfare Day \n World Habitat Day \n World Space Week")
elif date == 5:
await ctx.send(f"the international days on {date}/{month} are:- \n World Teachers’ Day \n World Space Week")
elif date == 6:
await ctx.send(f"the international days on {date}/{month} are:- \n World Space Week")
elif date == 7:
await ctx.send(f"the international days on {date}/{month} are:- \n World Space Week")
elif date == 8:
await ctx.send(f"the international days on {date}/{month} are:- \n World Space Week")
elif date == 9:
await ctx.send(
f"the international days on {date}/{month} are:- \n World Post Day \n World Migratory Bird Day \n World Space Week")
elif date == '10':
await ctx.send(
f"the international days on {date}/{month} are:- \n World Mental Health Day \n World Migratory Bird Day \n World Space Week")
elif date == 11:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day of the Girl Child")
elif date == 13:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day for Disaster Risk Reduction")
elif date == 15:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of Rural Women")
elif date == 16:
await ctx.send(f"the international days on {date}/{month} are:- \n World Food Day")
elif date == 17:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day for the Eradication of Poverty")
elif date == 20:
await ctx.send(
f"the international days on {date}/{month} are:- \n World Statistics Day \n International Day of the Air Traffic Controller")
elif date == 24:
await ctx.send(
f"the international days on {date}/{month} are:- \n United Nations Day \n World Development Information Day \n World Polio Day")
elif date == 27:
await ctx.send(f"the international days on {date}/{month} are:- \n World Day for Audiovisual Heritage")
elif date == 30:
await ctx.send(f"the international days on {date}/{month} are:- \n World Thrift Day")
elif date == 31:
await ctx.send(f"the international days on {date}/{month} are:- \n World Cities Day ")
elif date > 31 or date < 0:
await ctx.send("no such dates exist in the given month")
else:
await ctx.send(f"no inportant international days on {date}/{month}")
# november
elif month == 11:
if date == 2:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day to End Impunity for Crimes against Journalists")
elif date == 5:
await ctx.send(
f"the international days on {date}/{month} are:- \n World Tsunami Awareness Day \n World Day of Romani Language")
elif date == 6:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day for Preventing the Exploitation of the Environment in War and Armed Conflict")
elif date == 10:
await ctx.send(
f"the international days on {date}/{month} are:- \n World Immunization Day \n World Science Day for Peace and Development")
elif date == 12:
await ctx.send(f"the international days on {date}/{month} are:- \n World Pneumonia Day")
elif date == 13:
await ctx.send(f"the international days on {date}/{month} are:- \n World Kindness Day ")
elif date == 14:
await ctx.send(
f"the international days on {date}/{month} are:- \n World Diabetes Day \n International Day against Illicit Trafficking in Cultural Property")
elif date == 15:
await ctx.send(f"the international days on {date}/{month} are:- \n World Day of Remembrance for Road Traffic Victims")
elif date == 16:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day for Tolerance ")
elif date == 17:
await ctx.sendd(f"the international days on {date}/{month} are:- \n International Students Day")
elif date == 18:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of Islamic Art")
elif date == 19:
await ctx.send(
f"the international days on {date}/{month} are:- \n World Toilet Day \n World Philosophy Day \n International Men’s Day")
elif date == 20:
await ctx.send(
f"the international days on {date}/{month} are:- \n Africa Industrialization Day \n World Children’s Day \n Transgender Day of Remembrance")
elif date == 21:
await ctx.send(f"the international days on {date}/{month} are:- \n World Fisheries Day \n World Television Day")
elif date == 25:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day for the Elimination of Violence against Women \n National Day of Mourning")
elif date == 26:
await ctx.send(f"the international days on {date}/{month} are:- \n World Olive Tree Day")
elif date == 29:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day of Solidarity with the Palestinian People")
elif date == 30:
await ctx.send(
f"the international days on {date}/{month} are:- \n Day of Remembrance for all Victims of Chemical Warfare")
elif date > 30 or date < 0:
await ctx.send("no such dates exist in the given month")
else:
await ctx.send(f"no inportant international days on {date}/{month}")
# december
elif month == 12:
if date == 1:
await ctx.send(f"the international days on {date}/{month} are:- \n World AIDS Day ")
elif date == 2:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day for the Abolition of Slavery")
elif date == 3:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of Persons with Disabilities")
elif date == 4:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of Banks")
elif date == 5:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Volunteer Day for Economic and Social Development \n World Soil Day")
elif date == 7:
await ctx.send(f"the international days on {date}/{month} are:- \n International Civil Aviation Day")
elif date == 9:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day of Commemoration and Dignity of the Victims of the Crime of Genocide and of the Prevention of this \n International Anti-Corruption Day")
elif date == 10:
await ctx.send(f"the international days on {date}/{month} are:- \n Human Rights Day \n International Animal Rights Day")
elif date == 11:
await ctx.send(f"the international days on {date}/{month} are:- \n International Mountain Day")
elif date == 12:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Day of Neutrality \n International Universal Health Coverage Day")
elif date == 18:
await ctx.send(
f"the international days on {date}/{month} are:- \n International Migrants Day \n World Arabic Language Day")
elif date == 20:
await ctx.send(f"the international days on {date}/{month} are:- \n International Human Solidarity Day ")
elif date == 27:
await ctx.send(f"the international days on {date}/{month} are:- \n International Day of Epidemic Preparedness")
elif date > 31 or date < 0:
await ctx.send("no such dates exist in the given month")
else:
await ctx.send(f"no inportant international days on {date}/{month}")
else:
await ctx.send("non existent or non integer month given")
TOKEN=os.getenv('TOKEN')
client.run(TOKEN)
| true |
3d4ff831b495b457dbbb86743300bebb98190712 | Python | vigneshragul/python_complete_tutorial-beginners | /Generators/gen.py | UTF-8 | 1,603 | 4.34375 | 4 | [] | no_license | # GENERATORS:
# Generators simplifies creation of iterators. A generator is a function that produces a sequence of results
# instead of a single value.
def foo():
print "begin"
for i in range(3):
print "before yield", i
yield i
print "after yield", i
print "end"
f=foo()
for i in range(f):
print(i)
# EXAMPLE 2
def integers():
"""Infinite sequence of integers."""
i = 1
while True:
yield i
i = i + 1
def squares():
for i in integers():
yield i * i
def norms():
return 1
def take(n, seq):
"""Returns first n values from the given sequence."""
# seq = iter(seq)
# print(seq)
result = []
try:
for i in range(n):
result.append(seq.next())
except StopIteration:
pass
return result
print take(5, squares()) # prints [1, 4, 9, 16, 25]
# EXAMPLE 3
import random
def lottery():
# returns 6 numbers between 1 and 40
for i in range(6):
yield random.randint(1, 40)
# returns a 7th number between 1 and 15
yield random.randint(1,15)
for random_number in lottery():
print("And the next number is... ", )
# EXAMPLE FOR FIBONACCI SERIES USING GENERATORS
def flow():
i=1
while True:
yield i
i+1
def operation():
n1=0
n2=1
for i in flow():
nth=n1+n2
yield nth
n1=n2
n2=nth
def fib(n,jai):
result=[]
for i in range(n):
result.append(jai.next())
return result
print(fib(10,operation()))
| true |
93f036d8ebfb14ffd4e1aaade913b6ba8ba2da20 | Python | codelooper75/UserInterviewAPI | /scripts/api_testing_with_requests.py | UTF-8 | 702 | 2.84375 | 3 | [] | no_license | import json
import requests
from requests.auth import HTTPBasicAuth
BASE_URL= "http://127.0.0.1:8000/"
ENDPOINT = 'api/polls'
def get_polls():
r = requests.get(BASE_URL + ENDPOINT, auth=HTTPBasicAuth('roman', '4826'))
data = r.json() #type list
# json_data = json.dumps(data) #type 'str'
print(r.status_code) #200
for obj in data:
"""Just print obj"""
print(obj)
"""Print some key"""
print(obj['title'])
"""request details (not implemented yet)"""
# r2 = requests.get(BASE_URL + ENDPOINT + str(obj['id']), auth=HTTPBasicAuth('roman', '4826'))
# print(r2.json())
return data
get_polls() | true |
d397b44439cadffc0ba3288a31203378dc0a1654 | Python | kaytech23/net | /tensor_minst_test.py | UTF-8 | 2,376 | 2.84375 | 3 | [] | no_license | import tensorflow as tf
import pickle
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("/tmp/data/", one_hot=True)
#with open('mnist_data', 'wb') as f:
# pickle.dump(mnist, f)
n_nodes_hl1 = 500
n_nodes_hl2 = 500
n_nodes_hl3 = 500
n_classes = 10
batch_size = 1
# height x width
x = tf.placeholder('float', [None, 28 * 28])
y = tf.placeholder('float')
def neural_network_model(data):
hidden_1_layer = {'weights': tf.Variable(tf.random_normal([784, n_nodes_hl1])),
'biases': tf.Variable(tf.random_normal([n_nodes_hl1]))}
hidden_2_layer = {'weights': tf.Variable(tf.random_normal([n_nodes_hl1, n_nodes_hl2])),
'biases': tf.Variable(tf.random_normal([n_nodes_hl2]))}
hidden_3_layer = {'weights': tf.Variable(tf.random_normal([n_nodes_hl2, n_nodes_hl3])),
'biases': tf.Variable(tf.random_normal([n_nodes_hl3]))}
output_layer = {'weights': tf.Variable(tf.random_normal([n_nodes_hl3, n_classes])),
'biases': tf.Variable(tf.random_normal([n_classes]))}
# (input data * weights) + biases -> activation
l1 = tf.add(tf.matmul(data, hidden_1_layer['weights']), hidden_1_layer['biases'])
l1 = tf.nn.relu(l1)
l2 = tf.add(tf.matmul(l1, hidden_2_layer['weights']), hidden_2_layer['biases'])
l2 = tf.nn.relu(l2)
l3 = tf.add(tf.matmul(l2, hidden_3_layer['weights']), hidden_3_layer['biases'])
l3 = tf.nn.relu(l3)
output = tf.add(tf.matmul(l3, output_layer['weights']), output_layer['biases'])
return output
def train_neural_network(x, y):
prediction = neural_network_model(x)
softmax = tf.nn.softmax_cross_entropy_with_logits(logits=prediction, labels=y)
#softmax = tf.nn.softmax(logits=prediction)
sq_loss = tf.losses.mean_squared_error(labels=y, predictions=prediction)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
epoch_images, epoch_labels = mnist.train.next_batch(batch_size)
res = sess.run(prediction, feed_dict={x: epoch_images})
res1 = sess.run(softmax, feed_dict={x: epoch_images, y: epoch_labels})
sq_loss1 = sess.run(sq_loss, feed_dict={x: epoch_images, y: epoch_labels})
print(res)
print(res1)
print(sq_loss1)
print(epoch_labels)
train_neural_network(x, y)
| true |
d3a0089bb8f8b67cebaa942264d1ee65d6d21ee2 | Python | will-a/Practice | /Python/mergetwosortedlinkedlists.py | UTF-8 | 695 | 3.765625 | 4 | [] | no_license | # https://leetcode.com/problems/merge-two-sorted-lists/
# Definition for singly-linked list.
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def mergeTwoLists(self, l1: ListNode, l2: ListNode) -> ListNode:
# base cases
if not l1:
return l2
if not l2:
return l1
if l1.val < l2.val:
# build the next chain of the list with the lower value and
# continue the recursive call
return ListNode(l1.val, next=self.mergeTwoLists(l1.next, l2))
else:
return ListNode(l2.val, next=self.mergeTwoLists(l1, l2.next))
| true |
ed702f8cfc28dd7a8aa2949be8356e7539b3946d | Python | xuychen/Leetcode | /1-100/11-20/15-3Sum/3Sum.py | UTF-8 | 1,945 | 3.21875 | 3 | [
"MIT"
] | permissive | # a version that has O(n^2), too slow
class Solution(object):
def threeSum(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
less = filter(lambda x: x < 0, nums)
greater = filter(lambda x: x > 0, nums)
lessLen = len(less)
greaterLen = len(greater)
zeroLen = len(nums) - lessLen - greaterLen
result = self.findingSum(greater+[0]*int(bool(zeroLen)), less)
result += self.findingSum(less, greater)
if zeroLen >= 3:
result.append([0,0,0])
return result
# less means length is less
# greater means length is longer
def findingSum(self, less, greater):
lessLen = len(less)
greaterLen = len(greater)
result = []
dictionary = {}
uniq = {}
for num in greater:
dictionary[-num] = True
for i in range(lessLen):
for j in range(i+1, lessLen):
pair = (less[i], less[j]) if less[i] < less[j] else (less[j], less[i])
if uniq.get(pair, False) == False:
uniq[pair] = True
else:
continue
if dictionary.get(less[i] + less[j], False) == True:
result.append([less[i], less[j], -less[i] - less[j]])
return result
def threeSum3(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
nums.sort()
nums_dict = {}
result = set()
length = len(nums)
for i in range(length):
nums_dict[nums[i]] = i
for i in range(length):
target = nums[i]
for left in range(i+1, length-1):
if -target-nums[left] in nums_dict and left < nums_dict[-target-nums[left]]:
result.add((target, nums[left], -target-nums[left]))
return list(result) | true |
b10f467d96b1b67d3ef636407c219aae875e8561 | Python | navnath-auti/College | /Sem4/OS/Scheduling algorithm/roundrobin.py | UTF-8 | 1,844 | 3.375 | 3 | [] | no_license | from prettytable import PrettyTable
def rr():
pid,at,bt,tt,wt = [],[],[],[],[]
print()
z = int(input("Enter number of Process: "))
ct = [0]*z
quantum = int(input("Enter the quantum time: "))
print()
for i in range(0,z):
pid.append(int(input("Enter Proccess id:")))
print()
at.append(int(input("Enter arrival time:")))
print()
bt.append(int(input("Enter burst time:")))
print()
for i in range(0,z):
min = [pid[i],at[i],bt[i]]
j = i-1
while(j>=0 and at[j]>min[1]):
at[j+1],pid[j+1],bt[j+1] = at[j],pid[j],bt[j]
j = j-1
pid[j+1],at[j+1],bt[j+1] = min[0],min[1],min[2]
rem_bt = bt.copy()
tot = 0
while(True):
status = True
for x in range(0,z):
if rem_bt[x] > 0:
status = False
if (rem_bt[x]-quantum)>0:
rem_bt[x] -= quantum
tot += quantum
else:
tot += rem_bt[x]
ct[x] = tot+1
rem_bt[x] = 0
if status:
break
tt.append(ct[0]-at[0])
wt.append(tt[0]-bt[0])
for i in range(1,z):
tt.append(ct[i]-at[i])
wt.append(tt[i]-bt[i])
x = PrettyTable()
x.field_names = ["Process id","Arrival Time","Burst Time","Completion Time","Turnaround Time","Waiting Time"]
for a,b,c,d,e,f in zip(pid,at,bt,ct,tt,wt):
x.add_row([a,b,c,d,e,f])
print(x)
print("Total turnaround time: "+str(sum(tt))+"\nTotal waiting time: "+str(sum(wt)))
print("Average turnaround time: "+str(sum(tt)/z)+"\nAverage waiting time: "+str(sum(wt)/z))
if __name__ == "__main__":
print("55_Adnan_Shaikh")
rr() | true |
7127a95e444eec604473f941fa4abaeac5bedf40 | Python | Eligijus112/python-app | /master.py | UTF-8 | 1,215 | 2.875 | 3 | [] | no_license | """
The main script that controls the pipeline
"""
### Loading modules
import numpy as np
import pandas as pd
import os
### Loading custom functions
from modules.photo_module import create_path_frame, img_read
from modules.utility_module import read_NN_model, construct_fit_frame
### Reading the class decoder
class_df = pd.read_csv('main_model/class_decoder.csv')
### Loading the model that is used in production
main_model = read_NN_model('main_model/model_specs.json',
'main_model/model_weights.h5')
### Reading and preprocesing all the photos
all_photo = create_path_frame('input', return_mapper = True)
if all_photo.empty is not True:
d = [img_read(x, h = 28, w = 28) for x in all_photo['path']]
d = np.asarray(d)
### Predicting the image label probabilities
fit = main_model.predict(d)
### Constructing a data frame to store the results in
fit_df = construct_fit_frame(fit, class_df)
fit_df = fit_df.merge(all_photo, on = 'image_nr')
fit_df = fit_df.sort_values(['image_nr'], ascending = True)
### Saving the results
os.mkdir('output')
fit_df.to_csv('output/fitted_clases.csv', index = False)
| true |
653d1e92c109400390fb67bf4e2f57a67ccee38f | Python | avin82/Programming_Data_Structures_and_Algorithms_using_Python | /list_rotation.py | UTF-8 | 744 | 4.5625 | 5 | [] | no_license | '''A list rotation consists of taking the last element and moving it to the front. For instance, if we rotate the list [1,2,3,4,5], we get [5,1,2,3,4]. If we rotate it again, we get [4,5,1,2,3].
Write a Python function rotatelist(l,k) that takes a list l and a positive integer k and returns the list l after k rotations. If k is not positive, your function should return l unchanged. Note that your function should not change l itself, and should return the rotated list.'''
def rotatelist(l, k):
nl = l[:]
if k <= 0:
return l
while k > 0:
nl = nl[-1:] + nl[:-1]
k = k - 1
return nl
print(rotatelist([1, 2, 3, 4, 5], 1))
print(rotatelist([1, 2, 3, 4, 5], 3))
print(rotatelist([1, 2, 3, 4, 5], 12))
| true |
980f503eceef0de2495f7b56af29249857ecee4e | Python | bylexus/adventofcode2017 | /18-duet.py | UTF-8 | 5,359 | 2.609375 | 3 | [] | no_license | import time
import lib
import math
import re
import itertools
from collections import deque
class CPU:
def __init__(self, prg_mem):
self.prg_mem = list(prg_mem)
self.reset()
def reset(self):
self.registers = dict()
self.iptr = 0
self.running = True
self.last_freq = 0
self.rec_freq = 0
def get_reg_val(self, reg):
return self.registers.get(reg, 0)
def set_reg_val(self, reg, val):
self.registers[reg] = val
def exec_next(self, stop_at_rcv = False):
if self.iptr < 0 or self.iptr >= len(self.prg_mem):
self.running = False
return False
instr = self.prg_mem[self.iptr]
op = instr[0]
val_1 = instr[1]
val_2 = instr[2] if len(instr) > 2 else None
real_val_1 = val_1 if isinstance(val_1, int) else self.get_reg_val(val_1)
real_val_2 = val_2 if isinstance(val_2, int) else (self.get_reg_val(val_2) if val_2 else None)
inc_ptr = 1
if op == 'snd':
# print("Sound: {}".format(real_val_1))
self.last_freq = real_val_1
elif op == 'set':
self.set_reg_val(val_1, real_val_2)
elif op == 'add':
self.set_reg_val(val_1, self.get_reg_val(val_1) + real_val_2)
elif op == 'mul':
self.set_reg_val(val_1, self.get_reg_val(val_1) * real_val_2)
elif op == 'mod':
self.set_reg_val(val_1, self.get_reg_val(val_1) % real_val_2)
elif op == 'rcv':
if real_val_1 != 0:
self.rec_freq = self.last_freq
if stop_at_rcv:
self.running = False
return True
elif op == 'jgz':
if real_val_1 > 0:
inc_ptr = real_val_2
self.iptr += inc_ptr
return True
class CPU2:
def __init__(self, prg_mem, nr):
self.prg_mem = list(prg_mem)
self.receiver_cpu = None
self.nr = nr
self.reset()
def reset(self):
self.dataq = deque()
self.registers = dict()
self.registers['p'] = self.nr
self.iptr = 0
self.running = True
self.send_counter = 0
def get_reg_val(self, reg):
return self.registers.get(reg, 0)
def set_reg_val(self, reg, val):
self.registers[reg] = val
def exec_next(self):
self.running = True
if self.iptr < 0 or self.iptr >= len(self.prg_mem):
self.running = False
return False
instr = self.prg_mem[self.iptr]
op = instr[0]
val_1 = instr[1]
val_2 = instr[2] if len(instr) > 2 else None
real_val_1 = val_1 if isinstance(val_1, int) else self.get_reg_val(val_1)
real_val_2 = val_2 if isinstance(val_2, int) else (self.get_reg_val(val_2) if val_2 else None)
inc_ptr = 1
if op == 'snd':
self.receiver_cpu.dataq.append(real_val_1)
self.send_counter += 1
elif op == 'set':
self.set_reg_val(val_1, real_val_2)
elif op == 'add':
self.set_reg_val(val_1, self.get_reg_val(val_1) + real_val_2)
elif op == 'mul':
self.set_reg_val(val_1, self.get_reg_val(val_1) * real_val_2)
elif op == 'mod':
self.set_reg_val(val_1, self.get_reg_val(val_1) % real_val_2)
elif op == 'rcv':
if len(self.dataq):
data = self.dataq.popleft()
self.set_reg_val(val_1, data)
else:
inc_ptr = 0
self.running = False
elif op == 'jgz':
if real_val_1 > 0:
inc_ptr = real_val_2
self.iptr += inc_ptr
return True
def read_input():
# ops = list(map(lambda line:line.split(' '), lib.remove_empty(lib.readfile('inputs/18-input-sample.txt'))))
# ops = list(map(lambda line:line.split(' '), lib.remove_empty(lib.readfile('inputs/18-input-sample2.txt'))))
ops = list(map(lambda line:line.split(' '), lib.remove_empty(lib.readfile('inputs/18-input.txt'))))
for i in range(0, len(ops)):
try:
nr = int(ops[i][1])
ops[i][1] = nr
except Exception:
pass
try:
nr = int(ops[i][2])
ops[i][2] = nr
except Exception:
pass
return ops
def problem1(input):
cpu = CPU(input)
solution = 0
while cpu.running:
ret = cpu.exec_next(stop_at_rcv=True)
if ret and cpu.running == False:
solution = cpu.rec_freq
break
print("Solution 1: {}".format(solution))
def problem2(input):
cpu0 = CPU2(input, 0)
cpu1 = CPU2(input, 1)
cpu0.receiver_cpu = cpu1
cpu1.receiver_cpu = cpu0
solution = 0
while cpu0.running or cpu1.running:
cpu0.exec_next()
cpu1.exec_next()
solution = cpu1.send_counter
print("Solution 2: {}".format(solution))
def main():
title="Advent of Code 2017!"
print("{title}\n{line}\n\n".format(title=title, line="="*len(title)))
input = read_input()
t1=lib.measure(lambda: problem1(input))
print("Problem 1 took {:.3f}s to solve.\n\n".format(t1))
t2=lib.measure(lambda: problem2(input))
print("Problem 2 took {:.3f}s to solve.".format(t2))
if __name__ == "__main__":
main()
| true |
24e089ab34fc7c5414b4150648a178ae54ff4d66 | Python | minhazur9/minecraft-droprates | /sim.py | UTF-8 | 1,362 | 3.4375 | 3 | [] | no_license | import matplotlib.pyplot as plt
import numpy as np
def simulate(rateA,sampleA,rateB, sampleB, iterations=1000, y1label='SampleA',y2label='SampleB'):
y1 = []
y2 = []
for i in range(0,iterations):
populationA = list(range(1,sampleA+1))
populationB = list(range(1,sampleB+1))
resultsA = generateNumbers(sampleA)
resultsB = generateNumbers(sampleB)
y1.append(getSuccesses(populationA,resultsA,rateA))
y2.append(getSuccesses(populationB,resultsB,rateB))
generatePlot(iterations,y1,y2,y1label,y2label)
def generateNumbers(sample):
y = np.random.random(sample)
return y
def getSuccesses(population,successes,rate):
size = 50
rate = rate / 100
multiplier = int(len(population)/2)
hits = 0
for outcome in successes:
if outcome <= rate:
hits += 1
return hits
def generatePlot(x,y1,y2,y1label,y2label):
size = 50
x = list(range(0,x))
maxA = max(y1)
maxB = max(y2)
if(len(x) > 500):
size *= 500 / len(x)
plt.scatter(x,y1,s=size,label=y1label,alpha=0.65)
plt.scatter(x,y2,s=size,label=y2label,c='red',alpha=0.65)
plt.legend([f'Most {y1label} - {maxA}',f'Most {y2label} - {maxB}'])
plt.show()
simulate(rateA=3.7,sampleA=262,rateB=50,sampleB=305,iterations=1000,y1label='Enderpearls',y2label="Blazerods")
| true |
e1a5646cb2cf13af675b3423df1706ad1917bfdd | Python | jmunro94/PySnake | /Snake/Plays.py | UTF-8 | 1,629 | 2.875 | 3 | [] | no_license | import random
import copy
from Snake import *
def new_prize():
co = random.choice(empty_squares)
empty_squares.remove(co)
prize_coords.__init__(co)
def setup_snake():
body.pop(0)
x = int(grid_size/2); y = int(grid_size/2)
body.append(piece(vert, x, y))
body.append(piece(vert, x, y + 1))
body.append(piece(vert, x, y + 2))
for i in range(0, 2): empty_squares.remove(body[i].coordinates)
def play(last_i_d):
temp_old_back_pos = [0,0]
if next_dir == 0:
direction = body[0].direction
else:
direction = next_dir
new_front = find_new_front()
if new_front[1] < 0 or new_front[1] >= grid_size or new_front[0] < 0 or new_front[0] >= grid_size:
return [False, last_i_d, [0, 0]]
for bod in body:
if new_front == bod.coordinates:
return [False, last_i_d, [0, 0]]
body.insert(0, piece(direction, new_front[0], new_front[1]))
if prize_coords != new_front:
empty_squares.remove(body[0].coordinates)
temp_old_back_pos = copy.copy(body[last_i_d + 1].coordinates)
new_back = remove_old_last(last_i_d + 1)
body.pop(last_i_d + 1)
for i in range(0, 2): new_back[i] = int(new_back[i])
empty_squares.append(temp_old_back_pos)
last_i_d = len(body) - 1
return [True, last_i_d, temp_old_back_pos]
grid_size = 6
speed = 3.8 # number of squares per second
empty_squares = list()
prize_coords = [0, 0]
if grid_size < 5: grid_size = 5
for x in range(0, grid_size):
for y in range(0, grid_size):
empty_squares.append([x, y])
new_prize()
setup_snake()
next_dir = 0
| true |
8cd82ef1e8fd55acf6b63f22227e7f20bde10fe4 | Python | alexmihalyk23/SummerSchool2021 | /meetanalogvirtualcamera.py | UTF-8 | 2,583 | 2.59375 | 3 | [
"MIT"
] | permissive | import pyvirtualcam
from tkinter import *
import mediapipe as mp
import numpy as np
import tkinter as tk
from PIL import Image, ImageTk
import cv2
import tkinter.filedialog as tkFileDialog
mp_drawing = mp.solutions.drawing_utils
mp_selfie_segmentation = mp.solutions.selfie_segmentation
path = None
def select_image():
global path
path = tkFileDialog.askopenfilename()
def meet(image):
with mp_selfie_segmentation.SelfieSegmentation(
model_selection=1) as selfie_segmentation:
bg_image = None
if path is not None:
bg_image = cv2.resize(cv2.cvtColor(cv2.imread(path), cv2.COLOR_BGR2RGB), (640, 480))
else:
bg_image = cv2.GaussianBlur(image, (25, 25), 0)
results = selfie_segmentation.process(image)
condition = np.stack(
(results.segmentation_mask,) * 3, axis=-1) > 0.1
if bg_image is None:
bg_image = np.zeros(image.shape, dtype=np.uint8)
bg_image[:] = (192, 192, 192)
output_image = np.where(condition, image, bg_image)
return output_image
class MainWindow():
def __init__(self, window, cap):
self.window = window
self.cap = cap
self.width = self.cap.get(cv2.CAP_PROP_FRAME_WIDTH)
self.height = self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT)
self.interval = 20 # Interval in ms to get the latest frame
# Create canvas for image
self.canvas = tk.Canvas(self.window, width=self.width, height=self.height)
self.canvas.grid(row=0, column=0)
self.btn = Button(self.window, text="Select an image", command=select_image)
self.btn.grid(row=1, column=0)
with pyvirtualcam.Camera(width=640, height=480, fps=20) as cam:
while True:
self.update_image(cam)
def update_image(self,cam):
# Get the latest frame and convert image format
# self.image = Image.fromarray(self.image) # to PIL format
# self.image = ImageTk.PhotoImage(self.image) # to ImageTk format
# Update image
# self.canvas.create_image(0, 0, anchor=tk.NW, image=self.image)
# Repeat every 'interval' ms
self.image = cv2.cvtColor(self.cap.read()[1], cv2.COLOR_BGR2RGB) # to RGB
self.image = meet(self.image)
# frame = cv2.cvtColor(, cv2.COLOR_BGR2RGB)
cv2.waitKey(0)
cam.send(cv2.resize(self.image, (640, 480)))
cam.sleep_until_next_frame()
if __name__ == "__main__":
root = tk.Tk()
MainWindow(root, cv2.VideoCapture(0))
root.mainloop()
| true |
715c34148eacb96d6c92ce45d758c2982e01ef5b | Python | archerckk/PyTest | /Ar_Script/ar_003_输入天数打印小时数.py | UTF-8 | 184 | 4.09375 | 4 | [
"MIT"
] | permissive | #输入你要转换的天数,打印出对应的小时数
days=input('请输入你要输入转换的天数:')
hours=24*int(days)
print(days+'天里面包含'+str(hours)+'小时') | true |
8277aa445b305feddbedc5455879459db981f475 | Python | ph4nt0mgui1d/ML_Projects | /Bibliography.py | UTF-8 | 672 | 3.3125 | 3 | [] | no_license | class book:
def __init__(self, last, first, title, place, publisher, year):
self.authorlast = last
self.authorfirst = first
self.title = title
self.place = place
self.publisher = publisher
self.year = year
def write_bib_entry(self):
return self.authorlast + "," + self.authorfirst + "," + self.title + "," + self.place + "," + self.publisher + "," + str(self.year)
beauty = book( "Dubay", "Thomas", "The Evidential Power of Beauty", "San Francisco", "Ignatius Press", 1999 )
pynut = book( "Martelli", "Alex", "Python in a Nutshell", "Sebastopol, CA", "O'Reilly Media, Inc.", 2003 )
print(beauty.write_bib_entry())
print(pynut.write_bib_entry())
# DONE | true |
a658ca179c0558cbaa5b04cf03fc2cab7fcb7451 | Python | jfairfie/InterpreterProject | /Interpreter/SymbolTable.py | UTF-8 | 7,845 | 3.3125 | 3 | [] | no_license | '''
@author: jfairfie
'''
import sys
class SymbolTable:
#Symbol table uses a simple linked list
def __init__(self):
self.front = None
def insertSymbol(self, name, category, type):
if (self.front == None):
self.front = Symbol(name, category, type)
else:
head = self.front
while (head.next):
head = head.next
head.next = Symbol(name, category, type)
def emptyAll(self):
self.front = None
#Returns symbol node of linked list
def lookUpName(self, name):
head = self.front
while (head != None):
if (head.name == name):
return head
head = head.next
return None
#Removes symbol
def removeSymbol(self, name):
if (self.front == None):
return
elif (self.front.next == None and self.front.returnName() == name):
self.front = None
#Sets attribute of either type or category
def setAttribute(self, name, Type, Category):
if (Type != None):
node = self.lookUpName(name)
node.setType(Type)
if (Category != None):
node = self.lookUpName(name)
node.setCategory(Category)
#Function creates output file for symbol table
def outputFile(self):
head = self.front
output = open('output.txt', 'w+')
output.write('---Symbol Table---')
while (head != None):
if (head.returnValue() == None):
output.write('\n' + head.returnName() + ' ' + head.returnType() + ' ' + head.returnCategory())
elif (head.returnValue()):
output.write('\n' + head.returnName() + ' ' + head.returnType() + ' ' + head.returnCategory() + ' ' + str(head.returnValue()))
head = head.next
output.close()
#Function simply prints symbol table (linked list)
def printList(self):
head = self.front
while (head != None):
head.printData()
head = head.next
class Symbol:
#Name - name
#Category - variable, procedure, etc.
#Type - type checking
def __init__(self, name, category, type):
self.type = type
self.name = name
self.category = category
self.value = None
self.next = None
def insertValue(self, value):
self.value = value
def returnValue(self):
return self.value
def returnCategory(self):
return self.category
def setCategory(self, category):
self.category = category
def returnName(self):
return self.name
def returnType(self):
return self.type
def setType(self, type):
self.type = type
def insertAfter(self, link):
self.next = link
def printData(self):
print(self.name, self.category, self.type, self.value)
#Builds symbol table from root node of parse tree
class TableBuilder:
def __init__(self, root):
self.root = root
self.symbolTable = SymbolTable()
def printTable(self):
self.symbolTable.printList()
def addSymbols(self):
#Visiting Program
cursor = self.root
#Vising stmts
cursor = cursor.down
branches = cursor.branches
#Going to statement branches
for branch in branches:
self.addSymbolBranch(branch)
self.symbolTable.outputFile()
def addSymbolBranch(self, branch):
if (branch.value == '<VarAssign>' or branch.value == '<IdentifierAssign>'):
symbol = branch.left.returnType()
if (branch.value == '<IdentifierAssign>' and self.symbolTable.lookUpName(symbol[1]) == None):
sys.exit('Error:: variable not declared')
if (self.symbolTable.lookUpName(symbol[1]) == None):
self.symbolTable.insertSymbol(symbol[1], symbol[0], None)
name = symbol[1]
if (branch.right):
cursor = branch.right
list = []
#Taking potentially nested list and turning into flat list
self.output = []
list = cursor.returnType()
self.removeNested(list)
list = self.output
check = None
for x in range(len(list)):
if (type(list[x]) == int or type(list[x]) == float):
check = type(list[x])
if (check == None):
if (self.symbolTable.lookUpName(list[0]) != None):
if (self.symbolTable.lookUpName(list[0]).returnType() == 'Integer'):
check = type(1)
elif (self.symbolTable.lookUpName(list[0]).returnType() == 'Float'):
check = type(1.0)
else:
sys.exit('Error:: symbol ' + str(list[0]) + ' not initialized')
for symbol in list:
if (type(symbol) == str):
if (self.symbolTable.lookUpName(symbol) == None):
sys.exit('Variable ' + str(symbol) + ' unknown symbol')
elif (self.symbolTable.lookUpName(symbol) != None and self.symbolTable.lookUpName(symbol).returnType() == None):
sys.exit ('Variable ' + str(symbol + ' is not initialized'))
if (self.symbolTable.lookUpName(symbol).returnType() == 'Integer'):
symbol = 1
elif (self.symbolTable.lookUpName(symbol).returnType() == 'Float'):
symbol = 1.0
if (type(symbol) != check):
sys.exit('Error:: cannot have ' + str(check) + ' in a ' + str(type(symbol)) + ' expression')
if (check == int):
self.symbolTable.setAttribute(name, 'Integer', None)
elif (check == float):
self.symbolTable.setAttribute(name, 'Float', None)
elif (branch.value == '<IfStmt>' or branch.value == '<elif>'):
elifNodes = branch.elifBranches
for b in elifNodes:
branches = b.stmts.branches
for x in branches:
self.addSymbolBranch(x)
branches = branch.stmts.branches
for b in branches:
self.addSymbolBranch(b)
def removeNested(self, l):
for item in l:
if (type(item) == list):
self.removeNested(item)
else:
self.output.append(item[1])
'''
Returns what kind of type an expression is,
Integer, Double, etc.
'''
def evaluateExpr(self, root):
cursor = root
if (root.value == '<VarAssign>' or root.value == '<IdentifierAssign>'):
if (root.right):
cursor = cursor.right
if (cursor.left.value == '<constant>'):
token = cursor.left.returnType()
# print(type(token[1]), token[1])
else:
return None
return 'Integer'
def returnTable(self):
return self.symbolTable | true |
0bf8be23a87e0c43443903a96327a9849bbac631 | Python | howlfu/classify-photos-by-date | /photo_cl.py | UTF-8 | 2,060 | 3.328125 | 3 | [] | no_license | '''
Created on 2017年4月5日
@author: HL
'''
import os
from datetime import datetime
import shutil
class photo_classify(object):
'''
1. make object
path = path you want
obj = photo_classify(path)
2.use get_picture_info to get path and date information
3.use move_all_images_to_targate to move photos
4.Check all done.
'''
get_path = ""
photo_info = {}
def __init__(self, path_of_photo):
self.get_path = path_of_photo
def __str__(self, *args, **kwargs):
string = self.get_path + " have already been classified."
return string
def get_picture_info(self):
filenames = os.listdir(self.get_path)
for file in filenames:
path = os.path.join(self.get_path,file)
if not self.is_image(path):
#continue if dir
continue
else:
# save as image path and date
path = os.path.join(self.get_path,file)
timestamp = os.path.getmtime(path)
time = datetime.fromtimestamp(timestamp)
photo_time = time.strftime("%Y-%m-%d")
self.photo_info[path] = photo_time
#print(self.photo_info)
def is_image(self,img_file):
if os.path.isdir(img_file):
return False
return img_file[-4:] in ['.jpg','.dng','.png']
def move_all_images_to_targate(self):
for photo,date in self.photo_info.items():
targetPath = os.path.join(self.get_path,date)
#make new dir
if not os.path.exists(targetPath):
os.mkdir(targetPath)
#move photo to new place by date
try:
shutil.move(photo, targetPath)
except:
print(photo + ' is existed under ' + targetPath)
print('removed')
os.remove(photo)
if __name__ == '__main__':
test = photo_classify('D:\手機照片')
test.get_picture_info()
test.move_all_images_to_targate()
| true |
25f4b3e965df9f6110e82524163c193d58fb6f03 | Python | youzi-YJY/LeetCode-Practice | /lianbiao/fenge_Link/fenge.py | UTF-8 | 1,050 | 3.75 | 4 | [] | no_license | # Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def partition(self, head,x):
# 双链表实现,小于x的组成一链表,大于x的组成另一链表,然后两链表拼接
if not head:
return None
#初始化
fnode = ListNode(-1)
snode = ListNode(-1)
first = fnode
second = snode
while head:
if head.val < x:
first.next = head
first = first.next
else:
second.next = head
second = second.next
#在原始的列表中继续
head = head.next
#如果分配正确后,组合成一个列表并返回。
first.next = snode.next
#second的最后一个节点修改为节点的结束
second.next = None
return fnode.next
if __name__=="__main__":
S=Solution()
head=[1,2,2,3,5,2]
x=3
print(S.partition(head,x)) | true |
e98a4ec8efadb8f728afde70714b1460fe15e29f | Python | capalvarez/CheckConnection | /devices/devices_functions.py | UTF-8 | 1,915 | 2.828125 | 3 | [] | no_license | import re
from exceptions.exceptions import PingFailedException
# HP
def ping_a_caller(source, destination, pings):
return 'ping ' + ' -a ' + source + ' -c ' + pings + ' ' + destination
# Dell
def ping_source_ip_caller(source, destination, pings, vrf):
return 'ping vrf ' + vrf + ' ' + destination + ' count ' + pings + ' source ip ' + source
# Cisco
def ping_source_repeat_caller(source, destination, pings, vrf):
if vrf:
return 'ping vrf ' + vrf + ' ' + destination + ' repeat ' + pings + ' source ' + source
else:
return 'ping ' + destination + ' repeat ' + pings + ' source ' + source
# A10
def ping_reverse_order(source, destination, pings):
return 'ping source ' + source + ' repeat ' + pings + ' ' + destination
def ping_parser_dot(output):
results = re.compile('\n[\.!.]+\n')
match_results = results.search(output)
if match_results:
match_results.group()
start, end = match_results.span()
total_packets = output[start:end].replace('\n', '')
hits = total_packets.count('!')
fails = total_packets.count('.')
return hits, fails
else:
raise PingFailedException
def ping_parser_list(output):
total = re.compile('\d+\s*(packet\(?s\)?)*\s*transmitted')
hits = re.compile('\d+\s*(packet\(?s\)?)*\s*received')
match_total = total.search(output)
match_hits = hits.search(output)
total_packets = 0
hits_packets = 0
if match_total and match_hits:
match_total.group()
start_total, end_total = match_total.span()
total_packets = int(output[start_total:end_total].rstrip().split()[0])
match_hits.group()
start_hits, end_hits = match_hits.span()
hits_packets = int(output[start_hits:end_hits].rstrip().split()[0])
return hits_packets, (total_packets - hits_packets)
else:
raise PingFailedException
| true |
9d448626b0b67fcf7349de4fceb150421e824163 | Python | yukai-chiu/CodingPractice | /LeetCode/Problems/Python/221. Maximal Square.py | UTF-8 | 2,561 | 3.40625 | 3 | [] | no_license | #My first try
#Brute force with memo
class Solution:
def maximalSquare(self, matrix: List[List[str]]) -> int:
#brute force
#traverse the matrix
#if it is 1, find edge by go right and down, compare for the minimum
#if it is larger than 1, find in the area
def checkArea(i,j,edge):
for row in range(edge):
for col in range(edge):
#print(i+row,j+col, matrix[i+row][j+col],edge)
if i+row >= len(matrix) or j+col >= len(matrix[0]) or matrix[i+row][j+col] !="1":
return False
return True
if not matrix:
return 0
max_area = 0
dp = [[0] * len(matrix[0]) for _ in range(len(matrix))]
for i in range(len(matrix)):
for j in range(len(matrix[0])):
if matrix[i][j] =="1":
#start to find edge
col = j
row = i
while col < len(matrix[0]) and matrix[i][col] =="1":
col+=1
while row < len(matrix) and matrix[row][j] =="1":
row+=1
edge = min(row-i,col-j)
print(edge)
if edge >= 1:
if edge > dp[i][j]:
#find in the area
for e in range(dp[i][j]+1,edge+1):
if checkArea(i,j,e):
max_area = max(max_area,e**2)
for row in range(e):
for col in range(e):
dp[i+row][j+col] = e
return max_area
#Dynamic Programming
#Time: O(n*m)
#Space: O(n*m)
class Solution:
def maximalSquare(self, matrix: List[List[str]]) -> int:
if not matrix:
return 0
max_size = 0
dp = [[0] * len(matrix[0]) for _ in range(len(matrix))]
for i in range(len(matrix)):
for j in range(len(matrix[0])):
if matrix[i][j] =="1":
if i==0 or j==0:
dp[i][j] = 1
else:
dp[i][j] = min(dp[i-1][j], dp[i][j-1], dp[i-1][j-1])+1
max_size = max(max_size, dp[i][j])
return max_size**2
| true |
70de76de4ff80d37032279063a769744b0dc8607 | Python | rosspf/pytracks | /test/ex_track_biomass_lifetime.py | UTF-8 | 2,286 | 2.84375 | 3 | [] | no_license | import pytracks.input
import pytracks.track
import matplotlib.pyplot as plot
from matplotlib.path import Path
from matplotlib.collections import LineCollection
import numpy
# Add in start and end points
# add in biomass at start and end
def colorline(x, y, data, normalize=plot.Normalize(0.0, 1.0)):
z = numpy.asarray(data)
segments = make_segments(x, y)
lc = LineCollection(segments, array=z, cmap=plot.get_cmap('copper'), norm=normalize)
ax = plot.gca()
ax.add_collection(lc)
return lc
def make_segments(x, y):
points = numpy.array([x, y]).T.reshape(-1, 1, 2)
segments = numpy.concatenate([points[:-1], points[1:]], axis=1)
return segments
grid_wrapper = pytracks.input.GridWrapper("event_25/grid.out", extra_ids=[3, 4])
tracks_wrapper = pytracks.input.TrackWrapper("event_25/Event_5.out", id_column=2, x_column=5, y_column=7, extra_ids=[10, 11])
grid = grid_wrapper.gen_grid()
trackset = tracks_wrapper.gen_trackset()
plot_data = numpy.zeros(grid.size)
for cell in grid.cells:
plot_data[cell.y - 1][cell.x - 1] = (cell[0] - cell[1])
figure, axis = plot.subplots(figsize=(6, 7))
newset = trackset.get_tracks_random(1)
max_biomass = numpy.amax(newset.biomasses())
track = newset[0]
area = numpy.pi * (5)**2 # dot radius of 5
plot.scatter(track.x[0]/25, track.y[0]/25, c="green", s=area, zorder=3)
plot.scatter(track.x[-1]/25, track.y[-1]/25, c="red", s=area, zorder=3)
path = Path(numpy.column_stack([track.x/25, track.y/25]))
verts = path.interpolated(steps=3).vertices
x, y = verts[:, 0], verts[:, 1]
data = numpy.true_divide(track.biomasses, max_biomass)
axis.add_collection(colorline(x, y, data))
axis.set_title("Lifetime - Biomass")
axis.set_xlim([0, 100])
axis.set_ylim([0, 100])
figure.subplots_adjust(bottom=0.235)
colorbar_axis = figure.add_axes([0.15, .12, .73, .05])
grid_image = axis.imshow(plot_data, interpolation='none', origin="lower", cmap=plot.get_cmap("Blues_r"), vmin=-1, vmax=1, extent=[0, 100, 0, 100], aspect="equal")
colorbar = plot.colorbar(grid_image, cax=colorbar_axis, orientation='horizontal')
colorbar.set_ticks([-1, 0, 1])
colorbar.set_ticklabels([-1, 0, 1])
colorbar.set_label("Habitat Quality")
plot.savefig("export/tracks_lifetime.pdf", bbox_inches='tight', transparent=True)
plot.show()
| true |
39aff0851e407c54bdac75c36d9c498af8acbc98 | Python | JXQI/FCN_ResNet_Classifiar | /Loader/divide.py | UTF-8 | 4,028 | 3.21875 | 3 | [] | no_license | '''
Funtion: 主要用于分析原数据集,读取json文件,生成训练集和验证集、
Data: 2020.11.18
'''
import os
from os.path import join
import json
from pandas import DataFrame
import pandas as pd
import random
'''
Funtion: 读取json文件,获取文件名和标签
Args:
path:数据集所在的路径
despath:保存生成的image_list所在的路径
Return:
None
'''
def Source(path,despath):
# os.walk 返回 (dirpath, dirnames, filenames)
d={"image_name":[],"target":[]}
for file_path in os.walk(path):
print("数据集大小为:%d"%len(file_path[2]))
for file in file_path[2]:
try:
with open(join(path,file)) as f:
json_file=json.load(f)
target = json_file["meta"]["clinical"]["benign_malignant"]
d["image_name"].append(file)
d['target'].append(target)
except:
pass
DataFrame.from_dict(d).to_csv(join(despath,"image_list.csv"),index=False)
'''
Function: 获取数据集分布情况
Args: image_list
Return: None
'''
def Source_Length(file):
d=pd.read_csv(file)
benign,malignant=0,0 #统计各自的数目
for i in d["target"]:
if i=='benign':
benign+=1
else:
malignant+=1
print("数据大小为:%d, benign大小为:%d, malignant大小为:%d"%(len(d["target"]),benign,malignant))
'''
Function: 按照比例划分列表
Args:
full_list: 需要划分的列表
shuffle: 是否打乱数据集
ratio: 划分比例
'''
def split(full_list,shuffle=False,ratio=0.2):
total=len(full_list)
offset=int(total*ratio)
if total==0 or offset<1:
return [],full_list
if shuffle:
random.shuffle(full_list)
sublist_1=full_list[:offset]
sublist_2=full_list[offset:]
return sublist_1,sublist_2
'''
Function: 划分训练集和测试集:
Args:
image_list 数据集列表
des_path 目标文件夹
balance:是否平衡划分,True:正例:负例=1:1
Return:
目标文件夹下生成train.txt和val.txt
'''
def dataset(image_list,des_path='./',shuffle=True,ratio=0.8,balance=False):
d=pd.read_csv(image_list)
benign,malignant=[],[] #保存各自的文件名,先分成两类,再从中进行划分
for i in range(len(d["image_name"])):
if d["target"][i]=='benign':
benign.append(d["image_name"][i])
else:
malignant.append(d["image_name"][i])
print("begin数目:%d, malignant数目:%d"%(len(benign),len(malignant)))
if balance:
benign=benign[:len(malignant)] #TODO:这里简单的取前几个元素,可以更改
print("平衡数据集划分,正例:负例=%d:%d"%(len(benign),len(malignant)))
train1, vol1 = split(benign, shuffle=shuffle, ratio=ratio)
train2, vol2 = split(malignant, shuffle=shuffle, ratio=ratio)
train,val={"image_name":[],"target":[]},{"image_name":[],"target":[]} #保存划分的结果,并且保存对于的标签
for i in train1:
train["image_name"].append(i)
train["target"].append('benign')
for i in train2:
train["image_name"].append(i)
train["target"].append('malignant')
for i in vol1:
val["image_name"].append(i)
val["target"].append('benign')
for i in vol2:
val["image_name"].append(i)
val["target"].append('malignant')
DataFrame.from_dict(train).to_csv(join(des_path,"train.csv"),index=False)
DataFrame.from_dict(val).to_csv(join(des_path, "val.csv"),index=False)
print("训练集数目%d,验证集数目%d"%(len(train["target"]),len(val['target'])))
if __name__=='__main__':
##统计数据集,并且生成image_list.csv
# path='../Data/Descriptions'
# Source(path,despath='.')
# #查看数据集的大小
# file='./image_list.csv'
# Source_Length(file)
#划分测试集和验证集
file = './image_list.csv'
dataset(file,balance=True)
| true |
c223ba8c8852536409632ba79ea25ef6ed0952bc | Python | svrijenhoek/dart | /dart/handler/NLP/cosine_similarity.py | UTF-8 | 5,650 | 2.796875 | 3 | [] | no_license | from dart.handler.elastic.connector import ElasticsearchConnector
import math
import itertools
import numpy as np
import collections, functools, operator
from stop_words import get_stop_words
from statistics import StatisticsError
# basically copied from https://www.datasciencecentral.com/profiles/blogs/
# document-similarity-analysis-using-elasticsearch-and-python
class CosineSimilarity:
def __init__(self, language):
self.connector = ElasticsearchConnector()
self.stop_words = get_stop_words(language)
self.term_vectors = {}
def create_dictionary(self, doc):
output = {}
try:
# count the total number of terms in document
sum_terms = sum([v['term_freq'] for k, v in doc.get('term_vectors').get('text').get('terms').items()])
# gets the total number of documents with the text field
# this number seems to be wrong though? as if it performs on a subset of docs. However, for now we assume that
# proportions are similar to reality.
total_docs = doc.get('term_vectors').get('text').get('field_statistics')['doc_count']
for k, v in doc.get('term_vectors').get('text').get('terms').items():
if k not in self.stop_words:
term_freq = v['term_freq']/sum_terms
doc_freq = v['doc_freq']
inverse_document_freq = 1.0 + math.log(total_docs / doc_freq)
output[k] = term_freq * inverse_document_freq
except AttributeError:
pass
return output
def most_relevant_terms(self, doclist):
tv1 = [self.connector.get_term_vector('articles', doc) for doc in doclist]
dict1 = [self.create_dictionary(tv) for tv in tv1]
merged1 = dict(functools.reduce(operator.add,
map(collections.Counter, dict1)))
sorted_x = sorted(merged1.items(), key=lambda kv: kv[1], reverse=True)
output = [x[0] for x in sorted_x[:5]]
return output
@staticmethod
def cosine(vec1, vec2):
intersection = set(vec1.keys()) & set(vec2.keys())
numerator = sum([vec1[x] * vec2[x] for x in intersection])
sum1 = sum([vec1[x]**2 for x in vec1.keys()])
sum2 = sum([vec2[x]**2 for x in vec2.keys()])
denominator = math.sqrt(sum1) * math.sqrt(sum2)
if not denominator:
return 0.0
else:
return float(numerator) / denominator
def prepare_vector(self, doc):
if doc in self.term_vectors:
return self.term_vectors[doc]
else:
tv = self.connector.get_term_vector('articles', doc)
if 'term_vectors' in tv and 'text' in tv['term_vectors']:
vector = self.create_dictionary(tv)
self.term_vectors[doc] = vector
return vector
def calculate_cosine_similarity(self, doc1, doc2):
v1 = self.prepare_vector(doc1)
v2 = self.prepare_vector(doc2)
if v1 and v2:
return self.cosine(v1, v2)
else:
return 0
def prepare_vectors(self, doclist):
output = []
for doc in doclist:
if doc in self.term_vectors:
output.append(self.term_vectors[doc])
else:
tv = self.connector.get_term_vector('articles', doc)
if 'term_vectors' in tv and 'text' in tv['term_vectors']:
vector = self.create_dictionary(tv)
output.append(vector)
self.term_vectors[doc] = vector
return output
# def calculate_cosine_similarity(self, list1, list2):
# try:
# vectors1 = self.prepare_vectors(list1)
# vectors2 = self.prepare_vectors(list2)
#
# if vectors1 and vectors2:
# output = []
# for _, x in enumerate(vectors1):
# for _, y in enumerate(vectors2):
# cosine = self.cosine(x, y)
# output.append(cosine)
# return median(output)
# else:
# return 0
# except (AttributeError, TypeError):
# print("Error!")
# print(list1)
# print(list2)
# except StatisticsError:
# return 0
def calculate_all(self, doc_list):
try:
vectors = self.prepare_vectors(doc_list)
output = []
for x, y in itertools.combinations(vectors, 2):
cosine = self.cosine(x, y)
output.append(cosine)
return np.mean(output), np.std(output)
except StatisticsError:
return 0
# def calculate_all_distances(self, doc_list):
# dict_list = self.prepare_vectors(doc_list)
# output = []
# for ix, x in enumerate(dict_list):
# for iy, y in enumerate(dict_list):
# if ix > iy:
# cosine = self.cosine(x, y)
# output.append({'x': doc_list[ix], 'y': doc_list[iy], 'cosine': cosine})
# return output
# def calculate_cosine_experiment(self, list1, list2):
# vector1 = self.prepare_vectors(list1)
# merged1 = dict(functools.reduce(operator.add,
# map(collections.Counter, vector1)))
# vector2 = self.prepare_vectors(list2)
# merged2 = dict(functools.reduce(operator.add,
# map(collections.Counter, vector2)))
# return self.cosine(merged1, merged2)
| true |
b51a9032b9eb1715f4619bccaa025ebef01143a6 | Python | jocassid/JohnsUsefulPythonCode | /tests/test_jsonGet.py | UTF-8 | 2,383 | 3.53125 | 4 | [] | no_license |
# This file is part of https://github.com/jocassid/JohnsUsefulPythonCode
# This file is in the public domain, be excellent to one another, party on dudes.
from jsonGet import jsonGet
def testJsonGet():
assert jsonGet(None, 'default', None) == 'default'
assert jsonGet({'a':'alpha'}, 'default', None) == 'default'
# Miss on 1st level of 1 level dictionary
assert jsonGet({'a':'alpha'}, None, 'b') is None
# Hit on 1st level of 1 level dictionary
assert jsonGet({'a':'alpha'}, None, 'a') == 'alpha'
I_B = {
1:'I-B-1',
2:'I-B-2'
}
I = {
'A':{
1:'I-A-1',
2:'I-A-2'
},
'B':I_B
}
jsonData = {
'I':I,
'II':{
'A':{
1:'II-A-1',
2:'II-A-2'
},
'B':{
1:'II-B-1',
2:'II-B-2'
}
}
}
# Miss on 1st level of multi-level dictionary
assert jsonGet(jsonData, 42, 'III') == 42
# Hit on 1st level of multi-level dictionary
assert jsonGet(jsonData, 42, 'I') == I
# Miss on 2nd level of multi-level dictionary
assert jsonGet(jsonData, 42, 'I', 'C') == 42
# Hit on 2nd level of multi-level dictionary
assert jsonGet(jsonData, 42, 'I', 'B') == I_B
# Miss on 3rd level of multi-level dictionary
assert jsonGet(jsonData, 42, 'I', 'A', 3) == 42
# Hit on 3rd level of multi-level dictionary
assert jsonGet(jsonData, 42, 'I', 'A', 1) == 'I-A-1'
# data is not dictionary
assert jsonGet('something', 42, 'I') == 42
# 2nd level is not dictionary
assert jsonGet({'I':'foo'}, 42, 'I', 'A') == 42
# 3rd level is not dictionary
assert jsonGet({'I':{'A':'foo'}}, 42, 'I', 'A', 1) == 42
def testJsonGetWithLists():
# Top level is list
assert jsonGet([], 42, 0) == 42
# index outside of list bounds
assert jsonGet([2, 2], 42, 2) == 42
# 2nd level is list
assert jsonGet({'foo':[2,4,9]}, 42, 'foo', 1) == 4
# 3 levels of lists
data = [
[
[1, 1, 2, 3, 5],
[1, 1, 4, 9, 25]
],
[
[2, 3, 5, 7],
[4, 6, 10, 14],
[6, 9, 15, 21]
],
]
assert jsonGet(data, 42, 1, 2, 3) == 21
| true |
d66df1c71da5ee34b80f07b3710abebca2d50935 | Python | FASLADODO/TF_Transformer | /MultiHeadAttention.py | UTF-8 | 2,870 | 2.703125 | 3 | [
"Apache-2.0"
] | permissive | """
Copyright 2020 Yi Lin(Kyle) Gao
#@title Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License."""
import tensorflow as tf
class MultiHeadAttention(tf.keras.layers.Layer):
"""Implemented with tf.einsum(), is faster than using tf.transpose() with tf.matmul()"""
def __init__(self, d_model, num_heads):
super().__init__()
self.d_model = d_model
self.num_heads = num_heads
assert d_model % self.num_heads == 0
self.depth = d_model // self.num_heads
self.wq = tf.keras.layers.Dense(d_model)
self.wk = tf.keras.layers.Dense(d_model)
self.wv = tf.keras.layers.Dense(d_model)
self.dense = tf.keras.layers.Dense(d_model)
def split_heads(self, x, batch_size):
"""Split the last dimension into (num_heads,depth)
Arguments:
x -- A tokenized sequence (batch_size, seq_len, d_model)
Returns:
A tokenized sequence with dimensions (batch_size, seq_len, num_heads, depth)
"""
x = tf.reshape(x, (batch_size, -1, self.num_heads, self.depth))
return x
def call(self, q, k, v, mask=None):
batch_size = tf.shape(q)[0]
q = self.wq(q) # (batch_size,len_q, dim_q)
k = self.wk(k) # (batch_size,len_v, dim_q)
v = self.wv(v) # (batch_size,len_v, dim_v)
q = self.split_heads(q, batch_size) # (batch_size, len_q, num_heads, depth_q) (m,l,h,d)
k = self.split_heads(k, batch_size) # (batch_size, len_v, num_heads, depth_q) (m,j,h,d)
v = self.split_heads(v, batch_size) # (batch_size, len_v, num_heads, depth_v) (m,j,h,e)
qk = tf.einsum("mlhd,mjhd->mljh", q, k) # (batch_size, len_q, len_v, num_heads) (m,l,j,h)
dk = tf.cast(tf.shape(k)[-1], tf.float32)
qk = qk / tf.math.sqrt(dk)
if mask is not None:
qk = qk - mask*1e9 # We are using a additive mask
qk = tf.nn.softmax(qk, axis=-2) # (batch_size,len_q,len_v, num_heads) (m,l,j,h)
dk = tf.cast(tf.shape(k)[-1], tf.float32)
qk = qk / tf.math.sqrt(dk)
output = tf.einsum("mljh, mjhe -> mlhe", qk, v) # (batch_size,len_q, heads, depth_v)
output = tf.reshape(output, (batch_size, -1, self.num_heads * self.depth)) # (batch_size,len_q, d_model)
return self.dense(output) | true |
0ed78d40e64d7079f12e7795eb7bad8c0ed174b5 | Python | codingJWilliams/Assignment_55_Ciphers | /ciphers/ceasar.py | UTF-8 | 993 | 3.765625 | 4 | [
"MIT"
] | permissive | import doctest
class Cipher:
name = "Ceasar Cipher"
desc = "Shifts a character by it's ascii value"
def encodeFriendly():
plaintext = input(" Text to encode > ")
inp = input(" Shift [13] > ")
shift = int(inp) if len(inp) else 13
return Cipher.encode(plaintext, shift)
def decodeFriendly():
plaintext = input(" Ciphertext to decode > ")
inp = input(" Shift [13] > ")
shift = inp if len(inp) else 13
return Cipher.decode(plaintext, shift)
def encode(string, shift=13):
OUT = ""
for c in string:
asciiPoint = ord(c) + shift
if (asciiPoint > 126): asciiPoint = (asciiPoint % 126) + 32
OUT += chr(asciiPoint)
return OUT
def decode(string, shift=13):
OUT = ""
for c in string:
asciiPoint = ord(c) - shift
if (asciiPoint < 32): asciiPoint = asciiPoint + 94
OUT += chr(asciiPoint)
return OUT
| true |
b07fed8d0f5b327319058cb823fde544d0fca2df | Python | JoneNash/improve_code | /JianZhi/27.py | UTF-8 | 539 | 3.125 | 3 | [] | no_license | #!/usr/bin/env python
# encoding: utf-8
"""
@author: leidelong
@contact: leidl8907@gmail.com
@time: 2019/2/12 11:43
"""
class Solution:
def Permutation(self, ss):
# write code here
res = []
if(len(ss)<2):
return ss
for i in range(len(ss)):
for n in map(lambda x: x+ss[i],self.Permutation(ss[:i]+ss[i+1:])):
if n not in res:
res.append(n)
return sorted(res)
if __name__ == '__main__':
array='abcad'
print Solution().Permutation(array) | true |
29669ae779b1ecd522ffd6d370c22cfb95053f56 | Python | rajammanabrolu/rajammanabrolu.github.io | /fold.py | UTF-8 | 1,652 | 2.984375 | 3 | [] | no_license | import re
import sys
import codecs
###
# argv[1]: input bib file
# argv[2]: input yaml file
# argv[3]: output yaml file
###
### Store a hash table of index: bib entry
bibs_hash = {}
del_keys = ['code', 'website', 'blog', 'media', 'talk']
### Get all the bibs into the bib_hash
bib = ''
for line in open(sys.argv[1], 'r'):
# clean up bibtex
for k in del_keys:
if k in line:
line = ''
if '@' in line:
# Ending a previous bib and starting a new one
if len(bib) > 0:
# get the index
match = re.match(r'\@[a-zA-Z]+\{([\w\:\-\_]+),', bib, re.DOTALL)
if match is not None and len(match.groups()) > 0:
# If we have an index, then we are done with an old bib
index = match.groups()[0].replace(':','').replace('-','')
bibs_hash[index] = bib.strip()
# Start a new bib
bib = line
else:
# continuing a bib
bib = bib + line
yaml = ''
for line in open(sys.argv[2], 'r'):
# Add the line to the yaml
yaml = yaml + line
# Look for a bibtexkey
match = re.match(r'([\s]*)\- id\:[\s]+([\w\:\-\_]+)', line)
if match is not None and len(match.groups()) > 1:
# We just found a BIBTEXKEY with a valid index
spaces = match.groups()[0]
index = match.groups()[1].replace(':','').replace('-','')
# Get the original bib entry
bib = bibs_hash[index].strip()
# Insert the original bib into the yaml
yaml = yaml + ' '*(len(spaces)+2) + 'bibtex: |\n'
for b in bib.split('\n'):
yaml = yaml + ' '*(len(spaces)+6) + b.strip() + '\n'
with open(sys.argv[3], 'w') as f:
f.write(yaml)
| true |
5b472b3a51fca8abebf09b493a004ce03020c477 | Python | Parvfect/Scientific-Programming | /pythImplementations/Oscillators.py | UTF-8 | 14,414 | 3.046875 | 3 | [] | no_license | import numpy as np
import matplotlib.pyplot as plt
import math
from matplotlib import animation
import os
from datetime import datetime
#Creating a unique id for a specific execution so we can save the plots categorically
path = os.getcwd()
"""Things to do ----
1) Create tests file
2) Double check runge kutta
3) Adaptive step
"""
def rounding(decimal_places, arr):
"""Takes in an array of values and rounds them to n digits"
Because precision causes values to become infinity"""
for i in range(0, len(arr)):
arr[i] = round(i,decimal_places)
return arr
class SimpleHarmonicOscillator:
positions = []
velocities = []
times = []
def __init__(self, mass, frequency, position, velocity):
self.mass = mass
self.frequency = frequency
self.position = position
self.velocity = velocity
def solve_damped(self, dt, n, damp_coeff):
"""Solving the equation of a damped simple harmonic oscillator"""
for i in range(n):
acceleration = -(self.frequency*self.frequency*self.position) - (2*damp_coeff*self.frequency*self.velocity)
self.velocity += acceleration*dt
self.position += self.velocity*dt
self.positions.append(self.position)
plt.plot(self.positions)
plt.show()
def solve_damped_driven(self, dt, n, damp_coeff, driving_force, driving_frequency):
t = 0
for i in range(n):
acceleration = -(self.frequency*self.frequency*self.position) - (2*damp_coeff*self.frequency*self.velocity) + driving_force*np.sin(np.radians(driving_frequency*t))/self.mass
self.velocity += acceleration*dt
self.position += self.velocity*dt
t += dt
self.positions.append(self.position)
plt.plot(self.positions)
plt.show()
def duffing_oscillator(self, gamma, driving_frequency, delta, alpha, beta, dt, n):
t = 0
for i in range(n):
acceleration = gamma* np.cos(np.radians(driving_frequency*t)) - delta * self.velocity - alpha*self.position - beta* self.position*self.position
self.velocity += acceleration*dt
self.position += self.velocity*dt
t += dt
self.positions.append(self.position)
self.times.append(t)
plt.plot(self.times, self.positions)
plt.show()
class SimplePendellum:
#Acceleration due to gravity
g = 9.8
positions = []
velocities = []
def __init__(self, length, theta, velocity, mass):
"""Initialises the instance variables"""
self.length = length
self.theta = theta
self.velocity = velocity
self.acceleration = 0
self.mass = mass
self.frequency = (1/(2*3.14))*(np.sqrt(self.g/self.length))
def f_z(self):
return (-self.g*np.sin(np.radians(self.theta))/self.length)
def solve(self, dt, n):
"""Iterates through to calculate a solution to the differential equation"""
for i in range(n):
self.velocity, self.theta = ode.euler_adaptive_step(self.f_z, self.velocity, self.theta, dt, 0.01)
self.positions.append(self.theta)
self.velocities.append(self.velocity)
plt.plot(self.positions)
plt.show()
plt.show()
"""def euler_step_damped(self, dt, n ,dc):
plt.plot(self.positions, self.velocities)
for i in range(n):
self.velocity += (-self.g*np.sin(np.radians(self.theta))/self.length)*dt
self.theta += self.velocity*dt
self.positions.append(self.theta)
plt.plot(self.positions)
plt.show()
"""
class ElasticPendellum:
g = 9.8
lengths = []
positions = []
v_lenghts = []
def __init__(self, x1, y1, l0, m, x2, y2, spring_constant):
self.x1 = x1
self.y1 = y1
self.l0 = l0
self.m = m
self.k = spring_constant
self.x2 = x2
self.y2 = y2
def f_z1(self, y):
return (-self.g * np.sin(self.x1) - 2 * self.y2 * y ) / (self.l0 + self.x2)
def f_z2(self, y):
return (self.l0 + self.x2) * self.y1**2 - (self.k * (self.x2) / self.m) + self.g * np.cos(self.x1)
def runge_kutta(self, func, h, y):
"""Solves using the fifth order range kutta method"""
#Rememeber that y is just the input that you are feeding
k1 = func(y)
k2 = func(y + h*k1/2)
k3 = func(y + h*k2/2)
k4 = func(y + h*k3)
return (k1 + 2 * k2 + 2 * k3 + k4)/6
def solve(self, dt, n, type):
now = datetime.now()
os.mkdir(path + "/Figures/ElasticPendulum/{}".format(now))
path_temp = path + "/Figures/ElasticPendulum/{}".format(now)
#Creating a text file with the initial conditions
file_name = path_temp + "/initial_conditions.txt"
f = open(file_name, "w")
f.write("x1 {} x2 {} y1 {} y2 {} l0{} m{} k{}".format(self.x1, self.x2, self.y1, self.y2, self.l0, self.m, self.k))
f.close()
#Initializing the time variables
times = []
t = 0
y2_arr = []
y1_arr = []
if type == 1:
for i in range(n):
z1 = self.f_z1(self.y1)
z2 = self.f_z2(self.y2)
self.y1 += z1 * dt
self.y2 += z2 * dt
self.x1 += self.y1 * dt
self.x2 += self.y2 * dt
self.positions.append(180 * self.x1 / 3.14)
self.lengths.append(self.x2)
y1_arr.append(self.y1)
y2_arr.append(self.y2)
t += dt
times.append(t)
else:
for i in range(n):
z1 = self.runge_kutta(self.f_z1, 0.01, self.y1)
z2 = self.runge_kutta(self.f_z2, 0.01, self.y2)
self.y1 += z1 *dt
self.y2 += z2 * dt
self.x1 += self.y1 * dt
self.x2 += self.y2 * dt
self.positions.append(180 * self.x1 / 3.14)
self.lengths.append(self.x2)
y1_arr.append(self.y1)
y2_arr.append(self.y2)
t+=dt
times.append(t)
fig = plt.figure()
plt.plot(times, self.positions)
plt.xlabel("Time (s)")
plt.ylabel("Theta (degrees)")
plt.show()
fig.savefig(path_temp + "/t_x1")
fig = plt.figure()
plt.plot(times, self.lengths)
plt.xlabel("Time (s)")
plt.ylabel("Length of pendellum (x) ")
plt.show()
fig.savefig(path_temp + "/t_x2")
fig = plt.figure()
plt.plot(self.positions, y1_arr)
plt.xlabel("Theta (degrees)")
plt.ylabel("Velocities")
plt.show()
fig.savefig(path_temp + "/x1_y1")
fig = plt.figure()
plt.plot(self.lengths, y2_arr)
plt.xlabel("Lengths")
plt.ylabel("Velocity of spring")
plt.show()
fig.savefig(path_temp + "/x2_y2")
class DoublePendellum():
g = 9.8
x1_arr = []
x2_arr = []
y1_arr = []
y2_arr = []
def __init__(self, m1, m2, l1, l2, x1, x2, y1, y2, z1=0, z2=0):
self.m1 = m1
self.m2 = m2
self.l1 = l1
self.l2 = l2
self.x1 = x1
self.x2 = x2
self.y1 = y1
self.y2 = y2
self.z1 = z1
self.z2 = z2
def runge_kutta(self, func, y, h):
"""Solves using the fifth order range kutta method"""
#Rememeber that y is just the input that you are feeding
k1 = func(y)
k2 = func(y + h*k1/2)
k3 = func(y + h*k2/2)
k4 = func(y + h*k3)
return (k1 + 2 * k2 + 2 * k3 + k4)/6
def fz2(self, y):
"""Returns acceleration of x2 for time instant dt"""
a = - self.l1 * self.z1 * np.cos(self.x1 - self.x2)/self.l2
b = self.l1 * self.y1 * self.y1 * np.sin(self.x1 - self.x2)/ self.l2
c = -self.g * np.sin(self.x2)/ self.l2
return (a + b + c)
def fz1(self, y):
"""Returns acceleration for x1 for time instant dt"""
a = -(self.m2 * self.l2 * self.z2 * np.cos(self.x1 - self.x2))/ (self.l1 * (self.m1+self.m2))
b = - self.m2 * self.l2 * self.y2 * self.y2 * np.sin(self.x1 - self.x2)/ (self.l1 * (self.m1+self.m2))
c = -self.g * np.sin(self.x1)/ self.l1
return (a + b + c)
def solve(self, dt, n, type):
"""Solves the differential equation and plots it"""
now = datetime.now()
os.mkdir(path + "/Figures/DoublePendulum/{}".format(now))
path_temp = path + "/Figures/DoublePendulum/{}".format(now)
#Creating a text file with the initial conditions
file_name = path_temp + "/initial_conditions.txt"
f = open(file_name, "w")
f.write("x1 {} x2 {} y1 {} y2 {} m1{} m2{} l1{} l2{} z1{} z2{}".format(self.x1, self.x2, self.y1, self.y2, self.m1, self.m2, self.l1, self.l2, self.z1, self.z2))
f.close()
#The time variables
t = 0
times = []
if type == 1:
for i in range(n):
#Euler method for integrating over small time steps
self.z1 = self.fz1(self.y1)
self.z2 = self.fz2(self.y2)
self.y1 += self.z1 * dt
self.y2 += self.z2 * dt
self.x1 += self.y1 * dt
self.x2 += self.y2 * dt
#Appending into arrays
self.x1_arr.append(self.x1 * 180 / 3.14)
self.x2_arr.append(self.x2 * 180 / 3.14)
self.y1_arr.append(self.y1)
self.y2_arr.append(self.y2)
times.append(t)
#Increasing the time
t += dt
else:
for i in range(n):
#Runge kutta method for integrating over small time steps
self.z1 = self.runge_kutta(self.fz1, self.y1, 0.001)
self.z2 = self.runge_kutta(self.fz2, self.y2, 0.001)
self.y1 += self.z1 * dt
self.y2 += self.z2 * dt
self.x1 += self.y1 * dt
self.x2 += self.y2 * dt
#Appending into arrays
self.x1_arr.append(self.x1 * 180 / 3.14)
self.x2_arr.append(self.x2 * 180 / 3.14)
self.y1_arr.append(self.y1)
self.y2_arr.append(self.y2)
times.append(t)
#Increasing the time
t += dt
#Position - time plot
fig = plt.figure()
plt.plot(times, self.x1_arr)
plt.xlabel("Times")
plt.ylabel("Coordinates of the pendulum 1")
plt.show()
fig.savefig(path_temp + "/x1_t.png")
fig = plt.figure()
plt.plot(times, self.x2_arr)
plt.xlabel("Times")
plt.ylabel("Theta 2 ")
plt.show()
fig.savefig(path_temp + "/x2_t")
#Phase diagram plots
fig = plt.figure()
plt.plot(self.x1_arr, self.y1_arr)
plt.xlabel("Positions x1")
plt.ylabel("Velocity")
plt.show()
fig.savefig(path_temp + "/x1_y1")
fig = plt.figure()
plt.plot(self.x2_arr, self.y2_arr)
plt.xlabel("Positions x2")
plt.ylabel("Velocity")
plt.show()
fig.savefig(path_temp + "/x2_y2")
class DuffingOscillator:
def __init__(self, x, y, a, b, w, g, d):
self.x = x
self.y = y
self.z = 0
self.a = a
self.b = b
self.g = g
self.d = d
self.w = w
def fz(self, t, y):
return self.g*np.cos(self.w*t) - (self.d*y) - (self.a*self.x) - (self.b*self.x*self.x*self.x)
def runge_kutta(self, func, t, y, h):
"""Solves using the fifth order range kutta method"""
#Rememeber that y is just the input that you are feeding
k1 = func(t, y)
k2 = func(t+ h/2, y + h*k1/2)
k3 = func(t + h/2, y + h*k2/2)
k4 = func(t+ h/2, y + h*k3/2)
k5 = func(t + h, y + h*k4)
return (k1 + k2 + k3 + k4 + k5)/5
def solve(self, dt, n, type):
now = datetime.now()
os.mkdir(path + "/Figures/DuffingOscillator/{}".format(now))
path_temp = path + "/Figures/DuffingOscillator/{}".format(now)
#Creating a text file with the initial conditions
file_name = path_temp + "/initial_conditions.txt"
f = open(file_name, "w")
f.write("Position {} Velocity {} A {} B {} w{} g{} D{}".format(self.x, self.y, self.a, self.b, self.w, self.g, self.d))
f.close()
vels = []
times = []
positions = []
t = 0
if type == 1:
for i in range(n):
self.z = self.fz(t, self.y)
self.y += self.z*dt
self.x += self.y*dt
positions.append(self.x)
vels.append(self.y)
t+=dt
times.append(t)
else:
for i in range(n):
self.z = self.runge_kutta(self.fz, t, self.y, 0.0001)
self.y += self.z*dt
self.x += self.y*dt
positions.append(self.x)
vels.append(self.y)
t+=dt
times.append(t)
fig = plt.figure()
plt.plot(positions, vels)
plt.xlabel("Positions")
plt.ylabel("Velocities")
fig.savefig(path_temp + "/x_v")
plt.show()
fig = plt.figure()
plt.plot(times, positions)
plt.xlabel("Time")
plt.ylabel("Positions")
fig.savefig(path_temp + "/t_x")
plt.show()
"""
Orderly and not so chaotic
"""
"""
t = DoublePendellum(2.5, 3.5, 6.8, 6.8, 0.05, 0.3, 0.002, 0.013)
t.solve(0.001, 100000, 1)
"""
"""
t = DuffingOscillator(0.1, 0.02, 1, 5, 0.5, 8, 0.02)
t.solve(0.01, 50000, 2)
"""
"""
t = ElasticPendellum(0.03, 0.01, 4.8, 1.5, 0.04, 0.01, 5.5)
t.solve(0.01, 100000, 1)
""" | true |
9e80e430d3c0823c57c68c688309b27e14e1c74c | Python | ShaneKent/PyEventLogViewer | /winlogtimeline/ui/tag_settings.py | UTF-8 | 14,444 | 2.578125 | 3 | [
"MIT"
] | permissive | from tkinter import *
from tkinter import messagebox
from tkinter.ttk import *
import re
class TagSettings(Toplevel):
def __init__(self, parent):
super().__init__(parent)
# Class variables
self.tags = dict()
self.changes_made = False
# Window Parameters
self.title('Record Highlight Settings')
self.resizable(width=False, height=False)
# Create and place the widgets
self._init_widgets()
self.populate_tags(parent.current_project.config.get('events', {}).get('colors', {}))
self._place_widgets()
def _init_widgets(self):
"""
Creates the elements of this window and sets configuration values.
:return:
"""
# Master container frame
self.container = Frame(self)
# Treeview for tags
self.listbox_container = Frame(self.container)
self.tag_list = Treeview(self.listbox_container, columns=('source', 'id'), show='headings')
# Set up the tree headings
self.tag_list.heading('source', text='Event Source', command=lambda: self.sort_column('source', False))
self.tag_list.heading('id', text='Event ID', command=lambda: self.sort_column('id', False))
# Set up the tree columns
self.tag_list.column('id', minwidth=0, width=60, stretch=NO)
self.tag_list.column('source', minwidth=0, width=100, stretch=YES)
self.tag_list.bind('<<TreeviewSelect>>', self.callback_update_select_background)
# Scrollbar settings
self.vsb = Scrollbar(self.listbox_container, orient='vertical', command=self.tag_list.yview)
self.hsb = Scrollbar(self.listbox_container, orient='horizontal', command=self.tag_list.xview)
self.tag_list.configure(yscrollcommand=self.vsb.set)
self.tag_list.configure(xscrollcommand=self.hsb.set)
# Color preview
self.color_block = Canvas(self.container, width=300, height=20, relief=SUNKEN)
self.color_block_rect = self.color_block.create_rectangle(0, 0, 301, 21, fill='#FFFFFF')
self.color_block_text = self.color_block.create_text(5, 5, anchor='nw',
text='The quick brown fox jumps over the lazy dog.')
# Sliders
self.slider_container = Frame(self.container)
# Red config
self.red = IntVar()
self.r_label = Label(self.slider_container, text='R: ')
self.r_slider = Scale(self.slider_container, from_=0, to=255, variable=self.red,
command=lambda *args: self.truncate(self.r_slider))
self.r_value_label = Label(self.slider_container, text='0')
self.red.trace('w', lambda *args: self.callback_update_label(self.red, self.r_value_label))
self.r_slider.set(255)
# Green config
self.green = IntVar()
self.g_label = Label(self.slider_container, text='G: ')
self.g_slider = Scale(self.slider_container, from_=0, to=255, variable=self.green,
command=lambda *args: self.truncate(self.g_slider))
self.g_value_label = Label(self.slider_container, text='0')
self.green.trace('w', lambda *args: self.callback_update_label(self.green, self.g_value_label))
self.g_slider.set(255)
# Blue config
self.blue = IntVar()
self.b_label = Label(self.slider_container, text='B: ')
self.b_slider = Scale(self.slider_container, from_=0, to=255, variable=self.blue,
command=lambda *args: self.truncate(self.b_slider))
self.b_value_label = Label(self.slider_container, text='0')
self.blue.trace('w', lambda *args: self.callback_update_label(self.blue, self.b_value_label))
self.b_slider.set(255)
# Buttons for editing tags
self.add_button = Button(self.container, text='Add', command=self.callback_add_tag, underline=0)
self.bind('<Alt-a>', self.callback_add_tag)
self.delete_button = Button(self.container, text='Delete', command=self.callback_remove_tag, underline=0)
self.bind('<Alt-d>', self.callback_remove_tag)
# Finish and cancel buttons
self.finish_button = Button(self.container, text='Finish', command=self.callback_finish, underline=0)
self.cancel_button = Button(self.container, text='Cancel', command=self.callback_cancel, underline=0)
self.bind('<Alt-f>', self.callback_finish)
self.bind('<Return>', self.callback_finish)
self.bind('<Alt-c>', self.callback_cancel)
self.bind('<Escape>', self.callback_cancel)
# Focus on window.
self.focus_set()
def _place_widgets(self):
"""
Lays out the elements in this window.
:return:
"""
padding = 3
# Listbox for tags
self.tag_list.grid(row=0, column=0, columnspan=4, sticky='NESW')
self.vsb.grid(row=0, column=4, sticky='NESW')
self.hsb.grid(row=1, column=0, sticky='NESW')
self.listbox_container.columnconfigure(0, weight=4)
self.listbox_container.grid(row=0, column=0, columnspan=5, padx=padding, pady=padding, sticky='NESW')
# Color box
self.color_block.grid(row=1, column=0, columnspan=5, padx=padding, pady=padding, sticky='NS')
# Red config
self.r_label.grid(row=2, column=0, sticky='EW')
self.r_slider.grid(row=2, column=1, columnspan=3, sticky='EW')
self.r_value_label.grid(row=2, column=4, sticky='EW')
# Green config
self.g_label.grid(row=3, column=0, sticky='EW')
self.g_slider.grid(row=3, column=1, columnspan=3, sticky='EW')
self.g_value_label.grid(row=3, column=4, sticky='EW')
# Blue config
self.b_label.grid(row=4, column=0, sticky='EW')
self.b_slider.grid(row=4, column=1, columnspan=3, sticky='EW')
self.b_value_label.grid(row=4, column=4, sticky='EW')
# Slider container
self.slider_container.columnconfigure(1, weight=4)
self.slider_container.columnconfigure(4, minsize=25)
self.slider_container.grid(row=2, column=0, columnspan=5, padx=padding, sticky='NESW')
# Buttons for editing tags
self.add_button.grid(row=5, column=1, padx=padding, pady=padding, sticky='E')
self.delete_button.grid(row=5, column=2, padx=padding, pady=padding, sticky='EW')
# Finish and cancel buttons
self.finish_button.grid(row=5, column=3, padx=padding, pady=padding, sticky='EW')
self.cancel_button.grid(row=5, column=4, padx=padding, pady=padding, sticky='EW')
# Master container frame
self.container.columnconfigure(1, minsize=100)
self.container.pack(side=LEFT, fill=BOTH)
@staticmethod
def truncate(slider):
"""
Used to truncate slider values since ttk doesn't support the resolution option.
:return:
"""
value = slider.get()
if int(value) != value:
slider.set(int(value))
def sort_column(self, col, reverse):
"""
Sorts the tag list based on a particular column.
:param col: The column to sort.
:param reverse: Whether or not to sort in reverse order.
:return:
"""
column_elements = [(self.tag_list.set(k, col), k) for k in self.tag_list.get_children('')]
if col == 'id':
column_elements = [(int(v), k) for v, k in column_elements]
column_elements.sort(reverse=reverse)
for index, (val, k) in enumerate(column_elements):
self.tag_list.move(k, '', index)
self.tag_list.heading(col, command=lambda _col=col: self.sort_column(_col, not reverse))
def callback_update_label(self, var, label):
"""
Callback used to update the label associated with a slider. Also updates the color associated with the tag.
:param var: The variable bound to the slider.
:param label: The label to update.
:return:
"""
label.config(text=str(int(var.get())))
self.update_tag()
def populate_tags(self, tags):
"""
Iterates over the tag dictionary and inserts each tag.
:param tags: A dictionary containing tag, color pairs. The color should be a hex string.
:return:
"""
tag_config = ((source, event, color) for source, events in tags.items() for event, color in events.items())
for source, event, color in tag_config:
self.insert_tag(source, event, color)
def insert_tag(self, source, event, color):
"""
Inserts a tag into the ui and the tag list.
:param source: The event source.
:param event: The event id as a string.
:param color: The color to associate with the tag as a string in hex format.
:return:
"""
tag = f'{source}::{event}'
self.tag_list.insert('', 'end', values=(source, int(event)), tags=(tag,))
self.tag_list.tag_configure(tag, background=color)
self.tags[source] = self.tags.get(source, dict())
self.tags[source][event] = color
def callback_update_select_background(self, event=None):
"""
Callback used to update the selection background and sliders to match the selection.
:return:
"""
selection = self.tag_list.focus()
if not selection:
return
source, event = (str(v) for v in self.tag_list.item(selection)['values'])
hex_color = self.tags[source][event]
# self.color_block.create_rectangle(0, 0, 301, 21, fill=hex_color)
self.color_block.itemconfigure(self.color_block_rect, fill=hex_color)
hex_color = hex_color.lstrip('#')
r, g, b = tuple(int(hex_color[i:i + 2], 16) for i in range(0, 5, 2))
self.r_slider.set(r)
self.g_slider.set(g)
self.b_slider.set(b)
def update_tag(self):
"""
Updates the colors associated with a tag
:return:
"""
selection = self.tag_list.focus()
if not selection:
return
source, event = (str(v) for v in self.tag_list.item(selection)['values'])
r, g, b = tuple(map(int, (self.r_slider.get(), self.g_slider.get(), self.b_slider.get())))
hex_color = f'#{r:02x}{g:02x}{b:02x}'
self.tags[source][event] = hex_color
self.color_block.itemconfigure(self.color_block_rect, fill=hex_color)
self.tag_list.tag_configure('::'.join((source, event)), background=hex_color)
self.changes_made = True
def callback_add_tag(self, event=None):
"""
Creates a dialog window for the user to enter a new tag.
:return:
"""
window = TagPrompt(self)
window.grab_set()
def callback_remove_tag(self, event=None):
selection = self.tag_list.focus()
if not selection:
return
source, event = (str(v) for v in self.tag_list.item(selection)['values'])
self.tags[source].pop(event)
if len(self.tags[source].keys()) == 0:
self.tags.pop(source)
self.tag_list.delete(selection)
self.changes_made = True
def callback_finish(self, event=None):
"""
Callback used to finish making changes to the tags and return to master.
:return:
"""
self.master.current_project.config['events'] = self.master.current_project.config.get('events', {})
self.master.current_project.config['events']['colors'] = self.tags
if self.master.timeline is not None:
self.master.timeline.update_tags(self.master.current_project.config['events']['colors'])
self.master.changes_made |= self.changes_made
self.destroy()
def callback_cancel(self, event=None):
"""
Callback used to discard changes made. Destroys the widget and returns control to the master
without making any changes.
:return:
"""
self.destroy()
def __destroy__(self):
"""
Returns focus and control to the master.
:return:
"""
self.grab_release()
class TagPrompt(Toplevel):
def __init__(self, parent):
super().__init__(parent)
# Window settings
self.title('New Tag')
self.resizable(width=False, height=False)
# Create and place the widgets
self._init_widgets()
self._place_widgets()
def _init_widgets(self):
self.container = Frame(self)
self.source_label = Label(self.container, text='Event Source')
self.source_entry = Entry(self.container)
self.id_label = Label(self.container, text='Event ID')
id_vcmd = (self.container.register(self.validate_command_id), '%d', '%P')
self.id_entry = Entry(self.container, validate='key', validatecommand=id_vcmd)
self.ok_button = Button(self.container, text='Ok', command=self.callback_ok)
def _place_widgets(self):
padding = 3
self.source_label.grid(row=0, column=0, columnspan=3, padx=padding, pady=padding, sticky='EW')
self.source_entry.grid(row=1, column=0, columnspan=3, padx=padding, pady=padding, sticky='EW')
self.id_label.grid(row=2, column=0, columnspan=3, padx=padding, pady=padding, sticky='EW')
self.id_entry.grid(row=3, column=0, columnspan=3, padx=padding, pady=padding, sticky='EW')
self.ok_button.grid(row=4, column=1, padx=padding, sticky='NESW')
self.container.pack()
@staticmethod
def validate_command_id(action, value):
"""
Restricts entry to only allow integers.
:return:
"""
if action != '1':
return True
if re.match(r'^[0-9]+$', value):
return True
return False
def callback_ok(self):
source, event = self.source_entry.get(), str(self.id_entry.get())
if not all((source, event)):
messagebox.showerror('Error', 'You must enter a value.')
return
if event in self.master.tags.get(source, {}):
messagebox.showerror('Error', 'That tag already exists.')
return
self.master.insert_tag(source, event, '#FFFFFF')
self.master.changes_made = True
self.destroy()
def __destroy__(self):
"""
Returns focus and control to the master.
:return:
"""
self.grab_release()
| true |
5a8ce74c73561bc58dd1d601e484f5d28bda1e6a | Python | RamsesCamas/193230 | /semaphores/sema_philo.py | UTF-8 | 1,331 | 3.21875 | 3 | [] | no_license | import threading
import time
class TenedorFilosofo(threading.Thread):
def __init__(self, tenedores, filosofosNum):
threading.Thread.__init__(self)
self.tenedores = tenedores
self.filosofosNum = filosofosNum
self.datoTemporal = (filosofosNum + 1) % 5
self.start()
def hilosFilosofos(self):
print("Filosofo iniciando", self.filosofosNum)
time.sleep(2)
self.tenedores[self.filosofosNum].acquire()
time.sleep(1)
print("Filosofo ", self.filosofosNum, "recoge tenedor del lado derecho")
time.sleep(1)
self.tenedores[self.datoTemporal].acquire()
print("Filosofo ", self.filosofosNum, "recoge tenedor del lado izquierdo")
time.sleep(0.5)
print("Filosofo ", self.filosofosNum, "libre izquierdo")
self.tenedores[self.datoTemporal].release()
time.sleep(0.5)
print("Filosofo ", self.filosofosNum, "libre derecho")
self.tenedores[self.filosofosNum].release()
time.sleep(2)
def run(self):
self.hilosFilosofos()
tenedorArray = [1,1,1,1,1]
if __name__ == '__main__':
for i in range(0,5):
tenedorArray[i] = threading.BoundedSemaphore(2)
for i in range(0,5):
total = TenedorFilosofo(tenedorArray, i)
time.sleep(2) | true |
76574d385201af0485e98ba38f862676f18aa47b | Python | Ganesh2611/player_level | /6.string_isomorphic.py | UTF-8 | 130 | 2.9375 | 3 | [] | no_license | N,N1=input().split()
for n in N:
s=N.count(n)
for j in N1:
s1=N1.count(j)
if(s==s1):
print("yes")
else:
print("no")
| true |
ae064d3b3ba302edf669838aeed2ac18f2b86078 | Python | LuizFelipeBG/CV-Python | /Mundo 2/ex37.py | UTF-8 | 365 | 4.25 | 4 | [] | no_license |
num = int(input('Digite um número: '))
con = int(input('Conversão -> 1-Binario / 2-octal / 3-hexadecimal: '))
if con == 1:
print('O número em binario é: {}'.format(bin(num)))
elif con == 2:
print('O número em octal é: {}'.format(oct(num)))
elif con == 3:
print('O número em octal é: {}'.format(hex(num)))
else:
print('Opção invalida')
| true |
7820ddd26d8676370106d608fea1452533c54c7e | Python | AdamZhouSE/pythonHomework | /Code/CodeRecords/2671/60700/255911.py | UTF-8 | 351 | 2.921875 | 3 | [] | no_license | def A(x, y):
a = 1
for i in range(x-y+1, x+1):
a *= i
return a
def C(x, y):
return A(x, y)//A(y, y)
tests = int(input())
nums = []
for i in range(tests):
nums.append(input())
for i in nums:
n = int(i)
invalidNum = 0
for j in range((n+1)//2+1):
invalidNum += C(n-j+1, j)
print(2**n - invalidNum)
| true |
13aa4d597eb101a1e750bf57d0878c5b065b1bc5 | Python | ravikantchauhan/python | /prime_number.py | UTF-8 | 262 | 4.03125 | 4 | [] | no_license | # prime numbers
# a number that is divisible only by itself and 1 (e.g. 2, 3, 5, 7, 11).
# "prime numbers are very useful in cryptography"
num = 10
for i in range(2,num):
if num % i == 0:
print("Not Prime")
# break
else:
print("Prime") | true |
489d7297a4b8d62aa0ad9cb201b98ecbfa8c3af9 | Python | tonyducvo000/pylit | /main.py | UTF-8 | 1,489 | 3.140625 | 3 | [] | no_license | #Given a line of text, find all the occurrence of alliteration
import sys
from pathlib import Path
#my_file = Path(sys.argv[1])
my_file = Path("/home/work/test/pycharm/in")
if not my_file.is_file():
print ("File does not exist!")
sys.exit()
con = str(my_file)
line_number = 0
with open(con, "r") as myfile:
for line in myfile:
string = line.lower().split()
string2 = string
index = 0
line_number += 1
result = []
while True:
if (len(string2) - index == 1 or len(string2) - index == 0):
break
if string2[index][0] == string2[index + 1][0] or \
string2[index][0:1] == string2[index + 1][0:1]:
result.append(string[index])
result.append(string[index+1])
index += 2
while True:
if (len(string2) - index == 1 or len(string2) - index == 0):
break
if string2[index][0] == string2[index - 1][0]:
result.append(string[index])
index += 1
else:
break
else:
index += 1
resultString = " ".join(result)
for ch in [',', '.']:
if ch in resultString:
resultString = resultString.replace(ch, "")
print("Alliterations for line #" + str(line_number) + ": " + resultString)
exit(0)
| true |
c3e8d3bd7c5ce8198fff3d98188c32cad0c0b64d | Python | roni-kemp/python_programming_curricula | /CS1/0410_pygame_colors/colors.py | UTF-8 | 435 | 2.578125 | 3 | [
"MIT"
] | permissive |
black = 0,0,0
white = 255,255,255
blue = 0, 0, 255
red = 255, 0, 0
green = 0, 255, 0
gray = 127,127,127
lightblue = 0, 255, 255
purple = 255, 0, 255 #bluered
yellow = 255, 255, 0 #redgreen
seagreen = 127, 255, 127
mauve = 255, 127, 127
violet = 127, 127, 255
lightgray = 190,190,190
bluegreen = 0, 127, 127
purplish = 127, 0, 127
gold = 127, 127, 0
orange = 255, 127, 0
royal_purple = 75, 36, 211
swampgreen = 103, 156, 51
| true |
1287b43d33a94b1dd00728cffa64b91b7e2b3bf1 | Python | alexanderboiko/My-training-in-Python | /Training/callable_types.py | UTF-8 | 428 | 3.5625 | 4 | [] | no_license | # def f():
# print('Hello world')
# d = lambda :'hi'
# print(d())
# print(callable(d))
# class Cat:
#
# def __call__(self, *args, **kwargs):
# print('may')
#
# def say_hello(self):
# print('hello')
#
# bob = Cat()
# print(bob)
# print(callable(Cat))
# print(callable(bob))
# print(callable(bob.say_hello()))
# bob()
def f():
n = 0
while True:
yield n
n+=1
print(callable(f)) | true |
9d5453eacd231f3766dce246a63fd5fecd10d73c | Python | 3chords/python-challenge | /PyPoll/main.py | UTF-8 | 2,431 | 3.578125 | 4 | [] | no_license | # jeff simonson, 5/4/19 python homework
# If I had more time I realize that I would need to do a better job of setting up the key-value dictionaries
# to iterate and update the vote counts based on the candidate key search, get the vote count associated with that key and add to the vote.
# Once I got that then the rest would be relatively easy and I could loop through the dictionary to calculate the percentages of total vote,
# find a max and declare the winner. Oh well... Pandas will likely solve this for me. :)
#project objectives
#The total number of votes cast
#A complete list of candidates who received votes
#The percentage of votes each candidate won
#The total number of votes each candidate won
#The winner of the election based on popular vote.
#--------------------------------------------------
# modules
import os
import csv
#declare and set variables equal to 0
vote_counter = 0
total_votes = 0
#name of cvs file
myfile = "election_data.csv"
#provide path (i was having legit trouble with resource so I had to hard code it, but it works!)
csvpath = os.path.join('C:\\dumbass_sandbox',myfile)
with open(csvpath, newline="") as csvfile:
#initialize csv.reader
csvreader = csv.reader(csvfile, delimiter=",")
#acknowledge headers in first row, skip them
file_header = next(csvreader)
#loop rows
for r in csvreader:
if vote_counter == 0:
# add candidate & votes to dictionary
c_name = r[2]
votes_by_candidate = {'c_name': c_name, 'c_votes': 1}
else:
# does candidate already exist?
for c_name, c_votes in votes_by_candidate.items():
#candidate exists
if c_name == r[2]:
# update the candidate-vote tally pair
add_to_old_votes = votes_by_candidate['c_votes'].values() + 1
votes_by_candidate = {'c_name': c_name, 'c_votes': add_to_old_votes}
else:
# add a new name to the dictionary votes = 1
votes_by_candidate = {'c_name': r[2], 'c_votes': 1}
#count all votes
vote_counter += 1
#out of the loop, verify values from above
#count votes but subtract 1 for headers
total_votes = vote_counter -1
print(total_votes)
#print out vote tallies by candidate
| true |
7d615c4f9e11477b506c326d43b261e7e38dbf08 | Python | SudhanshuBlaze/hacktoberfest2021- | /binary-decimal.py | UTF-8 | 367 | 3.890625 | 4 | [] | no_license | def binaryToNum(binary):
result = 0
for i in range(len(binary)):
digit = binary.pop()
if digit == '1':
result = result + pow(2, i)
print("Result: ", result)
if __name__ == '__main__':
try:
bin = list(input("Input Binary Number: "))
binaryToNum(bin)
except:
print("There's an error occurred") | true |
4592a6f0361f5b7ead2d10eb91c7da8cc6a66428 | Python | shinminki/Git-Tutorial | /my_module.py | WINDOWS-1253 | 131 | 2.6875 | 3 | [] | no_license | def add(a,b):
return a+b
def sub(a,b):
return a-b
def mul(a,b):
return a*b
def div(a,b):
<<<<<<< HEAD
return a//b #ּ | true |
3f8751455bff262e4091dfeeadaa7c7758fbaf7d | Python | daxile6/BookStore-System | /Book.py | UTF-8 | 1,138 | 3.578125 | 4 | [] | no_license | class Book:
'''
Class: Book contains the detail of the books. It allows comparing
two instances accoring to the rank.
for example b1 < b2 if b1.rank < b2.rank
'''
def __init__(self, key, title, group, rank, similar):
self.key = key
self.title = title
self.group = group
self.rank = int(rank)
self.similar = similar
def __lt__(self, a) :
'''
This function allows to make direct comparation using the operator <
'''
return self.rank < a.rank#CHANGED FROM RANK TO TITLE
def __gt__(self, a) :
'''
This function allows to make direct comparation using the operator >
'''
return self.rank > a.rank # CHANGED FROM RANK TO TITLE
def __str__(self):
'''
function returns a string containting the book information
'''
return f"\n\tBook: {self.key}\n\tTitle: {self.title}\n\tGroup: {self.group}\n\tRank: {self.rank}"
# def __le__(self, other):
# return self.rank <= other.rank
# def __ge__(self, other):
# return self.rank >= other.rank
| true |
04130a408e01b70e148d146f9c8d8e01b6d94bec | Python | lucasdamo/INF1771-Inteligencia-Artificial-T1 | /src/brute_force.py | UTF-8 | 4,873 | 2.875 | 3 | [] | no_license | from copy import deepcopy
from pathlib import Path
from tqdm import tqdm
import pandas as pd
from collections import Counter
class Node:
def __init__(self, pokemonList, time):
self.pokemonList = pokemonList
self.time = time #time used in battles
def __lt__(self, another):
return self.time < another.time
def __repr__(self):
return ('{0} Time: {1}'.format(self.pokemonList,self.time))
POKEMON_MAX_ENERGY = 5
input_path_dir = Path(__file__).parents[1].joinpath('input')
gym_level = pd.read_csv(input_path_dir.joinpath('gymnasiums.csv')).sort_values('id')['level'].values
pokemon = pd.read_csv(input_path_dir.joinpath('pokemons.csv')).sort_values('power')
pokemon_name = pokemon['name'].values
pokemon_power_list = pokemon['power'].values
n_pokemons = len(pokemon_name)
n_gyms = len(gym_level)
n_pokemons = len(pokemon_name)
n_gyms = len(gym_level)
already_explored = {}
def birthChildren(current_node:Node,pokemon_power:dict):
breed_children = []
battle_list = current_node.pokemonList
# for each pokemon, for each battle create children node with new pokemon on every possible position
for (name,_) in pokemon_power.items():
for i in range(n_gyms):
j = 0
while j < n_pokemons:
if battle_list[i][j] == None:
child = deepcopy(current_node)
#print(child.pokemonList[i][j])
child.pokemonList[i][j] = name
child.time = getTotalTime (child.pokemonList,pokemon_power,gym_level)
#print('child after ' + str(child.pokemonList))
if NodeIsValid(child):
breed_children.append(child)
j = n_pokemons
j = j + 1
#read about pyhton memory managment if is possible to delete child from memory if not valid
return breed_children
def bruteForce(pokemon_name,pokemon_power,gym_level):
empty_battles = []
for _ in range(len(gym_level)):
empty_battles.append([None,None,None,None,None])
working_node = Node(empty_battles, getTotalTime(empty_battles,pokemon_power,gym_level))
best_node = deepcopy(working_node)
open_nodes = []
open_nodes.append(working_node)
children = []
t = tqdm(total=len(open_nodes))
while open_nodes:
open_nodes.sort()
t.set_description(f"Best {best_node.time}")
t.total = len(open_nodes)
t.update(1)
t.refresh()
working_node = open_nodes.pop(0)
if working_node.time < best_node.time:
best_node = working_node
children = birthChildren(working_node,pokemon_power)
for _ in children:
open_nodes.append(children.pop(0))
return best_node
def NodeIsValid(current_node):
hashed_node = get_hash(current_node)
if already_explored.get(hashed_node): #already expanding on similar outcome elsewhere
return False
already_explored[hashed_node] = hashed_node
battle_list = current_node.pokemonList
all_fighters = []
for battle in battle_list:
battle = list(filter(None, battle)) #remove None values from list
battle_count = list(Counter(battle).values())
if battle_count:
if max(battle_count) > 1:
return False #pokemon cannot repeat in battle
for pokemon in battle:
all_fighters.append(pokemon)
pokemon_count = list(Counter(all_fighters).values())
if max(pokemon_count) > POKEMON_MAX_ENERGY or sum(pokemon_count) >= POKEMON_MAX_ENERGY * POKEMON_MAX_ENERGY:
return False # if pokemon is used more than max lives or no pokemon is alive at the end
return True
def get_hash(node):
hashing = []
for i in range(len(node.pokemonList)):
l = list(filter(None, node.pokemonList[i]))
l.sort()
hashing.append(l)
return str(hashing)
def getTotalTime (battles,pokemon_power,gym_level):
totalTime = 0
for i in range(len(battles)):
battlePower = 0
for j in range(len(battles[i])):
if battles[i][j] != None:
battlePower = battlePower + pokemon_power[battles[i][j]]
if battlePower == 0:
battlePower = 0.1
battleTime = gym_level[i]/battlePower
totalTime = totalTime + battleTime
return totalTime
empty_battles = []
for _ in range(len(gym_level)):
empty_battles.append([None,None,None,None,None])
pokemon_power = {}
for i in range(n_pokemons):
pokemon_power[pokemon_name[i]] = pokemon_power_list[i]
#test birthChildren
'''
children = birthChildren(Node(empty_battles, getTotalTime(empty_battles,pokemon_power,gym_level)),pokemon_power)
print(children[0])
'''
#DON'T RUN THIS CODE
best = bruteForce(pokemon_name,pokemon_power,gym_level)
print(best)
| true |
c78870e501b2a969b6d959898c67193371ef542d | Python | astsu-dev/chess | /chess/consts.py | UTF-8 | 357 | 2.65625 | 3 | [] | no_license | from typing import Final, Literal
from .typedefs import Letter, LetterNum, Num, TermColor
BLACK_PIECE_COLOR: Final[TermColor] = "blue"
WHITE_PIECE_COLOR: Final[TermColor] = "cyan"
letters: Final[list[Letter]] = ["a", "b", "c", "d", "e", "f", "g", "h"]
letters_nums: Final[list[LetterNum]] = [0, 1, 2, 3, 4, 5, 6, 7]
nums: Final[list[Num]] = letters_nums
| true |
caf95fd091631c8eecba565c7a2432f1c4119bed | Python | diaaahmed850/DRL | /Environments/ple_xteam/games/citycopter.py | UTF-8 | 11,896 | 2.828125 | 3 | [] | no_license | import math
import sys
import random
import os
import contextlib
with contextlib.redirect_stdout(None):
import pygame
from pygame.constants import K_w, K_s
from .utils.vec2d import vec2d
from .base.pygamewrapper import PyGameWrapper
_dir_ = os.path.dirname(os.path.abspath(__file__))
_asset_dir = os.path.join(_dir_, "assets/")
class Block(pygame.sprite.Sprite):
def __init__(self, pos_init, speed, SCREEN_WIDTH, SCREEN_HEIGHT):
pygame.sprite.Sprite.__init__(self)
self.pos = vec2d(pos_init)
self.width = int(SCREEN_WIDTH * 0.05)
self.height = int(SCREEN_HEIGHT * 0.2)
self.speed = speed
self.SCREEN_WIDTH = SCREEN_WIDTH
self.SCREEN_HEIGHT = SCREEN_HEIGHT
num = random.randrange(1,4,1)
rock_path = os.path.join(_asset_dir, "rock"+str(num)+".png")
image = pygame.image.load(rock_path).convert_alpha()
image = pygame.transform.scale(image,(int(SCREEN_WIDTH/10.909),int(SCREEN_HEIGHT/13.333)))
# colliding block
self.image = image
self.rect = self.image.get_rect()
self.mask = pygame.mask.from_surface(self.image)
self.rect.center = pos_init
def update(self, dt):
self.pos.x -= self.speed * dt
self.rect.center = (self.pos.x, self.pos.y)
class HelicopterPlayer(pygame.sprite.Sprite):
def __init__(self, speed, SCREEN_WIDTH, SCREEN_HEIGHT):
pygame.sprite.Sprite.__init__(self)
pos_init = (int(SCREEN_WIDTH * 0.35), SCREEN_HEIGHT / 2)
self.pos = vec2d(pos_init)
self.speed = speed
self.climb_speed = speed * -0.875 # -0.0175
self.fall_speed = speed * 0.09 # 0.0019
self.momentum = 0
self.width = SCREEN_WIDTH * 0.05
self.height = SCREEN_HEIGHT * 0.05
heli_sprite_path = os.path.join(_asset_dir, "helicopter.png")
self.image = pygame.image.load(heli_sprite_path).convert_alpha()
self.image = pygame.transform.scale(self.image,(int(SCREEN_WIDTH/10.909),int(SCREEN_HEIGHT/20.818)))
self.rect = self.image.get_rect()
self.mask = pygame.mask.from_surface(self.image)
self.rect.center = pos_init
def update(self, is_climbing, dt):
self.momentum += (self.climb_speed if is_climbing else self.fall_speed) * dt
self.momentum *= 0.99
self.pos.y += self.momentum
self.rect.center = (self.pos.x, self.pos.y)
class Terrain(pygame.sprite.Sprite):
def __init__(self, pos_init, speed, SCREEN_WIDTH, SCREEN_HEIGHT):
pygame.sprite.Sprite.__init__(self)
self.pos = vec2d(pos_init)
self.speed = speed
self.width = int(SCREEN_WIDTH * 0.1)
original_color = (0,0,0,0)
num = random.randrange(1,4,1)
block_path = os.path.join(_asset_dir, "block"+str(num)+".jpg")
imageBlock = pygame.image.load(block_path).convert_alpha()
imageTop = pygame.transform.scale(imageBlock, (int(self.width) , int(SCREEN_HEIGHT * 0.5)))
imageTop = pygame.transform.rotate(imageTop,180)
imageBot = pygame.transform.scale(imageBlock, (int(self.width) , int(SCREEN_HEIGHT * 0.5)))
image = pygame.Surface((self.width, SCREEN_HEIGHT * 1.5))
image.fill(original_color)
image.set_colorkey((0,0,0))
color = (0, 0, 0)
# top rect
pygame.draw.rect(
image,
color,
(0, 0, self.width, SCREEN_HEIGHT * 0.5),
0
)
image.blit(imageTop,(0,0))
# bot rect
pygame.draw.rect(
image,
color,
(0, SCREEN_HEIGHT * 1.05, self.width, SCREEN_HEIGHT * 0.5),
0
)
image.blit(imageBot,(0,SCREEN_HEIGHT * 1.05))
self.image = image
self.rect = self.image.get_rect()
self.rect.center = pos_init
def update(self, dt):
self.pos.x -= self.speed * dt
self.rect.center = (self.pos.x, self.pos.y)
class citycopter(PyGameWrapper):
"""
Parameters
----------
width : int
Screen width.
height : int
Screen height, recommended to be same dimension as width.
"""
def __init__(self, width=480, height=480):
actions = {
"up": K_w
}
PyGameWrapper.__init__(self, width, height, actions=actions)
self.is_climbing = False
self.state_size = 7
self.speed = 0.0004 * width
def _handle_player_events(self):
self.is_climbing = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN:
key = event.key
if key == self.actions['up']:
self.is_climbing = True
def getGameState(self):
"""
Gets a non-visual state representation of the game.
Returns
-------
dict
* player y position.
* player velocity.
* player distance to floor.
* player distance to ceiling.
* next block x distance to player.
* next blocks top y location,
* next blocks bottom y location.
* distance between next block bottom floor
See code for structure.
"""
min_dist = 999
min_block = None
for b in self.block_group: # Groups do not return in order
dist_to = b.pos.x - self.player.pos.x
if dist_to > 0 and dist_to < min_dist:
min_block = b
min_dist = dist_to
current_terrain = pygame.sprite.spritecollide(
self.player, self.terrain_group, False)[0]
state = {
"player_y": self.player.pos.y,
"player_vel": self.player.momentum,
"player_dist_to_ceil": self.player.pos.y - (current_terrain.pos.y - self.height * 0.25),
"player_dist_to_floor": (current_terrain.pos.y + self.height * 0.25) - self.player.pos.y,
"next_gate_dist_to_player": min_dist,
"next_gate_block_top": min_block.pos.y,
"next_gate_block_bottom": min_block.pos.y + min_block.height
}
return state
def getScreenDims(self):
return self.screen_dim
def getActions(self):
return self.actions.values()
def getScore(self):
return self.score
def game_over(self):
return self.lives <= 0.0
def init(self):
self.score = 0.0
self.lives = 1.0
self.player = HelicopterPlayer(
self.speed,
self.width,
self.height
)
self.player_group = pygame.sprite.Group()
self.player_group.add(self.player)
self.block_group = pygame.sprite.Group()
self._add_blocks()
self.terrain_group = pygame.sprite.Group()
self._add_terrain(0, self.width * 4)
def _add_terrain(self, start, end):
w = int(self.width * 0.1)
# each block takes up 10 units.
steps = range(start + int(w / 2), end + int(w / 2), w)
y_jitter = []
freq = 4.5 / self.width + self.rng.uniform(-0.01, 0.01)
for step in steps:
jitter = (self.height * 0.125) * \
math.sin(freq * step + self.rng.uniform(0.0, 0.5))
y_jitter.append(jitter)
y_pos = [int((self.height / 2.0) + y_jit) for y_jit in y_jitter]
for i in range(0, len(steps)):
self.terrain_group.add(Terrain(
(steps[i], y_pos[i]),
self.speed,
self.width,
self.height
)
)
def _add_blocks(self):
x_pos = self.rng.randint(self.width, int(self.width * 1.05))
y_pos = self.rng.randint(
int(self.height * 0.45),
int(self.height * 0.55)
)
self.block_group.add(
Block(
(x_pos, y_pos),
self.speed,
self.width,
self.height
)
)
def reset(self):
self.init()
def step(self, dt):
self.screen.fill((0, 0, 0))
self._handle_player_events()
self.score += self.rewards["tick"]
self.player.update(self.is_climbing, dt)
self.block_group.update(dt)
self.terrain_group.update(dt)
hits = pygame.sprite.spritecollide(
self.player, self.block_group, False,pygame.sprite.collide_mask)
for creep in hits:
self.lives -= 1
hits = pygame.sprite.spritecollide(
self.player, self.terrain_group, False,pygame.sprite.collide_mask)
for t in hits:
if self.player.pos.y - self.player.height <= t.pos.y - self.height * 0.25:
self.lives -= 1
if self.player.pos.y >= t.pos.y + self.height * 0.25:
self.lives -= 1
for b in self.block_group:
if b.pos.x <= self.player.pos.x and len(self.block_group) == 1:
self.score += self.rewards["positive"]
self._add_blocks()
if b.pos.x <= -b.width:
b.kill()
for t in self.terrain_group:
if t.pos.x <= -t.width:
self.score += self.rewards["positive"]
t.kill()
if self.player.pos.y < self.height * 0.125: # its above
self.lives -= 1
if self.player.pos.y > self.height * 0.875: # its below the lowest possible block
self.lives -= 1
if len(self.terrain_group) <= (
10 + 3): # 10% per terrain, offset of ~2 with 1 extra
self._add_terrain(self.width, self.width * 5)
if self.lives <= 0.0:
self.score += self.rewards["loss"]
self.player_group.draw(self.screen)
self.block_group.draw(self.screen)
self.terrain_group.draw(self.screen)
score = int(self.getScore())
if(score<0):
score =0
font = pygame.font.SysFont("Arial",int(self.width/15),True)
message = str(score)
fontcolor = (1, 1, 1)
outlinecolor = (255,255,255)
scorescreen = textOutline(font, message, fontcolor, outlinecolor)
self.screen.blit(scorescreen, [int(self.width/2),int(self.height/8)])
def textHollow(font, message, fontcolor):
notcolor = [c^0xFF for c in fontcolor]
base = font.render(message, 0, fontcolor, notcolor)
size = base.get_width() + 2, base.get_height() + 2
img = pygame.Surface(size, 16)
img.fill(notcolor)
base.set_colorkey(0)
img.blit(base, (0, 0))
img.blit(base, (2, 0))
img.blit(base, (0, 2))
img.blit(base, (2, 2))
base.set_colorkey(0)
base.set_palette_at(1, notcolor)
img.blit(base, (1, 1))
img.set_colorkey(notcolor)
return img
def textOutline(font, message, fontcolor, outlinecolor):
base = font.render(message, 0, fontcolor)
outline = textHollow(font, message, outlinecolor)
img = pygame.Surface(outline.get_size(), 24)
img.blit(base, (1, 1))
img.blit(outline, (0, 0))
img.set_colorkey(0)
return img
if __name__ == "__main__":
import numpy as np
pygame.init()
game = citycopter(width=480, height=480)
game.screen = pygame.display.set_mode(game.getScreenDims(), 0, 32)
#copterImage = pygame.image.load("helicopter.png").convert_alpha()
#pygame.display.set_icon(copterImage)
pygame.display.set_caption("CityCopter")
game.clock = pygame.time.Clock()
game.rng = np.random.RandomState(24)
game.init()
while True:
if game.game_over():
game.reset()
dt = game.clock.tick_busy_loop(30)
game.step(dt)
pygame.display.update()
| true |
d8d0a833707edd92028d8384b19ad16c98c2e6d0 | Python | abhidg/namcap-reports | /scripts/package-info.py | UTF-8 | 727 | 2.984375 | 3 | [
"LicenseRef-scancode-public-domain"
] | permissive | #!/usr/bin/env python
# Retrieves AUR package information and stores
# it in the form of a dictionary.
import urllib, pickle
def info(package):
"Returns a dictionary of information about the package"
url = "http://aur.archlinux.org/rpc.php?type=info&arg=" + package
d = eval(urllib.urlopen(url).read())
return d["results"]
def getpkgnames(file="community-packages.txt"):
"Returns package names from a txt file."
f = open(file)
return sorted(map(lambda s: s[:-1], f.readlines()))
def pkgdb(pkgnames):
"Prepare a database (dictionary) of package information."
d = {}
for pkg in pkgnames: d[pkg] = info(pkg)
return d
if __name__ == "__main__":
d = pkgdb(getpkgnames())
pickle.dump(d, open('pkg.db','wb'))
| true |
c3e627c75d19faae88522c1a8fa8f68f9d28ccd6 | Python | linkel/algorithm-design-manual | /Chapter5_GraphTraversal/5-16_independent_set.py | UTF-8 | 4,066 | 3.6875 | 4 | [] | no_license | class Node:
def __init__(self, val):
self.val = val
self.left = None
self.right = None
# 1
# 2 3
# 6
# 7 8
#
#
l = [1,2,3,None,None,None,6,7,8]
def generate_tree_from_list(l):
if not l:
return None
nodes = [None if i is None else Node(i) for i in l]
children = nodes[::-1]
root = children.pop()
for node in nodes:
if node:
if len(children):
node.left = children.pop()
if len(children):
node.right = children.pop()
return root
ex_tree = generate_tree_from_list(l)
# a) Give an algorithm to generate a max-size independent set if G is a tree.
# I think for a tree we can pick all the leaves, then exclude them and their parents and pick leaves again.
def independent_set_tree(root):
res = []
def helper(node, res):
if not node:
return False # not picked
if not node.right and not node.left:
res.append(node.val)
return True # picked
left = helper(node.left, res)
right = helper(node.right, res)
if left or right: # if the child was picked we don't pick the current one
return False
else:
res.append(node.val)
return True
helper(root, res)
return res
print(independent_set_tree(ex_tree))
# b) Let G = (V, E) be a tree with weights associated with the vertices such that
# the weight of each vertex is equal to the degree of that vertex. Give an efficient
# algorithm to find a maximum independent set of G.
# What we did for the minimum weight vertex cover of G last time will work too, if this question wants a maximum weight independent set AND the weight of each vertex is equal to the degree.
# We'll pick a level and alternate from it.
# The wording's kinda bad. If it wants a maximum size independent set then it's the same as before without the weights.
# c) Let G = (V, E) be a tree with arbitrary weights associated with the vertices.
# Give an efficient algorithm to find a maximum independent set of G.
# 2
# 5 4
# 2 40 6
# 10 2 4 13
# 6
# It should pick [10,2,6,40,2,6]
another = [2,5,4,2,40,None,6,10,2,4,13, None, None, None, None, None, None, None, 6]
ex_nother = generate_tree_from_list(another)
# When we pick the current node, we can get current node's value plus that of the children's nonpicked value.
# When we don't pick the current node, we can get either the children's picked value OR the children's nonpicked value. So grab the best out of that.
class Result:
def __init__(self, picked_sum, picked_path, unpicked_sum, unpicked_path):
self.picked_sum = picked_sum
self.picked_path = picked_path
self.unpicked_sum = unpicked_sum
self.unpicked_path = unpicked_path
def max_weight_indep_set(root):
def helper(node):
if not node:
return Result(0, [], 0, [])
left = helper(node.left)
print(left.unpicked_sum)
right = helper(node.right)
left_path_if_curr_unpicked = None
right_path_if_curr_unpicked = None
if left.unpicked_sum > left.picked_sum:
left_path_if_curr_unpicked = left.unpicked_path
else:
left_path_if_curr_unpicked = left.picked_path
if right.unpicked_sum > right.picked_sum:
right_path_if_curr_unpicked = right.unpicked_path
else:
right_path_if_curr_unpicked = right.picked_path
return Result(
node.val + left.unpicked_sum + right.unpicked_sum,
[node.val] + left.unpicked_path + right.unpicked_path,
max(left.unpicked_sum, left.picked_sum) + max(right.unpicked_sum, right.picked_sum),
left_path_if_curr_unpicked + right_path_if_curr_unpicked)
final = helper(root)
if final.picked_sum > final.unpicked_sum:
return final.picked_path
else:
return final.unpicked_path
print(max_weight_indep_set(ex_nother)) | true |
bbb8a81e244d21a84bbff9ce0203a74771a48b38 | Python | HwangDongJun/Study_Python-inflearn- | /pandas/study_pandas.py | UTF-8 | 1,403 | 3.921875 | 4 | [] | no_license | import pandas as pd #엑셀파일을 좀 더 자유롭게 다루기 위해서 사용한다.
#as pd는 pandas를 이제부터 pd라는 이름으로 부르겠다는 의미이다.
df = pd.DataFrame([[1, 2, 3], [4, 5, 6]]) #이 방법이 엑셀파일에서 1 2 3
#4 5 6 이렇게 입력과 똑같은 것이다.
#아래의 방법을 위에서 바로 할 수 있는데, 그 방법은 df = pandas.DataFrame([[1, 2, 3], [4, 5, 6]],
# columns=['aa', 'bb', 'cc'],
# index=['x', 'y']
# ) 와 같은 방법으로 가능하다.
df.columns = ['aa', 'bb', 'cc'] #행의 0 1 2 의 이름을 정해서 바꾸어 준다.
df.index = ['x', 'y'] #이번엔 첫번째 열의 이름을 바꾸는 방법
df['dd'] = df['aa'] - df['bb'] #새로운 열을 만들어서 그 열의 들어갈 값을 정해주는 방법이다.
df = df.append(df.sum(), ignore_index=True) #df.append로 행의 추가를 할 수 있으며, ignore_index=True로 인해 첫번째 aa~행은 무시하게 된다.
#sum()으로 인해 더한 값이 출력이 된다.
df.index = ['x', 'y', 'sum'] #원래의 값을 잃어버렸기 때문에 다시 설정한다.
print(df) | true |
e867e5de620b2b51153a7ad73b4819e2bcaa817c | Python | Kartik1801/VOCA-Vocabulary-Building-Tool | /article_scrap.py | UTF-8 | 1,536 | 3.28125 | 3 | [] | no_license | #following piece of code extracts article from brief section of TOI(Times of India)
from tts import t2s
from newspaper import Article
from bs4 import BeautifulSoup
import requests
import random
#to get link on news article from briefs section of toi and store all the brief article's link in list
def get_random_article():
link=requests.get("https://timesofindia.indiatimes.com/briefs")
soup=BeautifulSoup(link.content,'html.parser')
#res=soup.find(id='content')
#briefs = res.find_all('div', class_='brief_box')
links = []
for link in soup.find_all('div',attrs={"class":"brief_box"}):
if(link.find('a')):
links.append(link.find('a')['href'])
l=random.choice(links)
#A new article from TOI
url = "https://timesofindia.indiatimes.com"+l
#For different language newspaper refer above table
toi_article = Article(url, language="en") # en for English
#To download the article
toi_article.download()
#To parse the article
toi_article.parse()
#To perform natural language processing ie..nlp:: yet to study and implement
#toi_article.nlp()
#extracts text
return toi_article
def get_article_title(toi_article):
#extracts title
t=toi_article.title
return t
def get_article_text(toi_article):
t=toi_article.text
return t
#art=get_random_article()
#text=get_article_text(art)
#title=get_article_title(art)
# print(type(art))
# print(title.strip('\n'))
# print(text.strip('\n'))
# t2s(text.strip('\n')) | true |
f80f515f0161ed23997b55a277acb5ad4366657f | Python | everjoey/pygraph | /tsp.py | UTF-8 | 725 | 3.078125 | 3 | [] | no_license | #!/usr/bin/env python3
from .graph import Node
from .graph import Edge
from .graph import Graph
import itertools
def brute_force(graph):
min_path_weight = float('inf')
for path in itertools.permutations(graph.nodes.keys(), len(graph.nodes)):
path_weight = 0
for i, u_name in enumerate(path):
v_name = path[0] if i == len(path)-1 else path[i+1]
path_weight += graph.edges[(u_name, v_name)].weight
print(path_weight)
print(path)
if path_weight < min_path_weight:
min_path_weight = path_weight
min_path = path
return min_path_weight, min_path
if __name__ == '__main__':
from . import generator
import cProfile
g = generator.complete_graph(15, False, (0,10))
pw = brute_force(g)
print('min', pw)
| true |
0bd46ff843916f2abe850efed963efcb27b8a782 | Python | podhmo/individual-sandbox | /daily/20190709/example_autolprofile/99wrap_decorator.py | UTF-8 | 1,801 | 2.625 | 3 | [] | no_license | import sys
import logging
from pycomment.parse import parse_string, node_name, PyTreeVisitor
from lib2to3.pgen2 import token
from lib2to3 import fixer_util as u
# todo: with lineno
class Visitor(PyTreeVisitor):
def __init__(self):
self.r = []
def visit_return_stmt(self, node):
self.r.append(node)
logging.basicConfig(level=logging.DEBUG)
def run(t):
visitor = Visitor()
visitor.visit(t)
for node in visitor.r:
target = node
defs = []
while target:
if node_name(target) == "funcdef":
defs.append(target)
elif target.parent is None:
break
target = target.parent
yield defs
def lineno(x):
return x.get_lineno()
code = """
@profile
def f(x):
return x + 1
"""
# DEBUG:pycomment.parse: visit_decorator (prefix='\n')
# DEBUG:pycomment.parse: visit_AT (prefix='\n')
# DEBUG:pycomment.parse: visit_NAME (prefix='')
# DEBUG:pycomment.parse: visit_NEWLINE (prefix='')
t = parse_string(code)
for defs in run(t):
for node in defs:
node.prefix = "@profile\n"
print(t)
print("----------------------------------------", file=sys.stderr)
code = """
def f(x):
def g(y):
return y + y
return g(x + 1)
"""
def Decorator(name):
return u.Node(278, [u.Leaf(token.AT, "@", prefix=None), u.Name(name), u.Newline()])
def insert_before(node, new_node):
for i, x in enumerate(node.parent.children):
if x == node:
node.parent.insert_child(i, new_node)
return True
return False
t = parse_string(code)
for defs in run(t):
insert_before(defs[0], Decorator("profile"))
if not defs[0].prefix:
defs[0].prefix = u.find_indentation(defs[0])
break
print(t)
| true |
c4de71a83842ad1f2913649f9e6e47172e515719 | Python | OScott19/TheMulQuaBio | /archived/silbiocomp/Practicals/Code/LongPrac/Code/Thermal_Response_Fits_skel1.py | UTF-8 | 2,276 | 2.78125 | 3 | [
"CC-BY-3.0",
"MIT"
] | permissive | #!/usr/bin/env python
""" Perform NLLS fitting of Phytoplankton performance curves... """
# You should figure out the imports you need!
#####################################################
# S C H O O L F I E L D F U N C T I O N S #
#####################################################
def schoolf(params, temps, traits):
"""Schoolfield model, to be called by schoolfield_model()"""
B0 = params['B0'].value
E = params['E'].value
E_D = params['E_D'].value
T_pk = params['T_pk'].value
model = B0 * np.exp(-E * ((1/(K*temps)) - (1/(K*283.15)))) \ ...?
model_log = np.array(map(np.log, model), dtype=np.float64)
return model_log - np.log(traits)
def schoolfield_model(temps, B0_start, E_start, T_pk_start, E_D_start, traits):
"""NLLS fitting to the Schoolfield model; this function will
contain the lmfit.minimize calls to the schoolf() function. This is
where you can constrain the parameters."""
params = Parameters()
params.add('B0', value = B0_start)
params.add('E', value = E_start, min = ?, max = ?)
params.add('E_D', value = E_D_start, min = ?, max = ?)
params.add('T_pk', value = T_pk_start, min = ?, max = ?)
results_sf = minimize(schoolf, params, args=(temps, traits))
return results_sf
########################
# F O R M A T D A T A #
########################
for u in UniqueID:
# Get the subset of the data for this ID, in appropriate format:
tmp_Datasubset = []
for i in original_dataset:
if i[0] == u:
tmp_Datasubset.append(i)
else:
pass
# Extract the starting values for the NLLS fitting:
T_pk_start = float(tmp_Datasubset[1][10])
B0_start = float(tmp_Datasubset[1][11])
E_start = float(tmp_Datasubset[1][12])
E_D_start = float(tmp_Datasubset[1][13])
# Extract temperature and trait data
temps = []
traits = []
for i in tmp_Datasubset:
temps.append(float(i[5]))
traits.append(float(i[6]))
# Convert list to array
temps = np.array(temps)
traits = np.array(traits)
###########################################
# F I T S C H O O L F I E L D M O D E L #
###########################################
try:
# Fit model
Schoolf_fit = schoolfield_model(temps, B0_start, E_start, T_pk_start, E_D_start, traits)
| true |
d9237f973a3be3560b9f5a21897c44dcc029ea19 | Python | Jagrut/Python_code | /common/functions.py | UTF-8 | 1,107 | 3.046875 | 3 | [] | no_license | import os
import yaml
from os.path import join
from common.constants import CONFIG_FILE_PATH
COMMON_BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
def get_config(appliance, param, yaml_file_path=join(COMMON_BASE_DIR, CONFIG_FILE_PATH)):
"""This function gives the yaml value corresponding to the parameter
sample Yaml file
platforma_details:
xtm_host: 10.100.26.90
:param appliance: The header name as mentioned in the yaml file (ex:platforma_details)
:param param: The parameter name who's value is to be determined (ex: xtm_host)
:param yaml_file_path: Path of yaml file, Default will the config.yaml file
:return: value corresponding to the parameter in yaml file
:except: Exception while opening or loading the file
"""
with open(yaml_file_path, 'r') as f:
doc = yaml.load(f)
param_value = doc[appliance][param]
if param_value == "":
message = 'Value is not updated for the parameter:{} in the yaml config file'\
.format(param)
raise ValueError(message)
return param_value
| true |
e849f29c2c10e633f0b79b8a143052b28634eef6 | Python | linamy85/md2016 | /hw3/test1src/step0.py | UTF-8 | 208 | 2.578125 | 3 | [] | no_license | import sys
if __name__ == '__main__':
with open(sys.argv[1], 'w') as file:
for i in range(50000):
for j in range(5000):
file.write("0 %d:1 %d:1\n" % (i, j + 50000))
| true |
dd8328bffc3c2b724e6be9ad7580cd6c97b50d53 | Python | gaurav-kaushik/BostonBikr | /app/BostonBikr.py | UTF-8 | 19,137 | 3.171875 | 3 | [
"MIT"
] | permissive | # -*- coding: utf-8 -*-
"""
BostonBikr.py will allow the user to import a map (from a SQL database or local files) and run path-finding calculations on it.
"""
from math import sin, cos, sqrt, atan2, acos, radians
from Queue import Queue
from sets import Set
#import pymysql as mdb
import pickle
import networkx as nx
import matplotlib.pyplot as plt
from random import randint, sample, choice
import operator
from geneticAlgorithm import geneticPath
from geojson import Feature, Point, FeatureCollection
"""
DEFINE CONSTANTS
R = radius of Earth
meter* = estimate for Boston geocodes
"""
R = 6373000
maxVal = 999999.9
meterPerLat = 82190.6
meterPerLng = 111230
"""
DISTANCE CALCULATION FUNCTIONS
"""
def cor2ID(cor):
#convert list to tuple to serve as node key
tp = (cor[0], cor[1])
return tp
def distanceCal4par(lon1, lat1, lon2, lat2):
#compute the distance between (lon1, lat1) and (lon2, lat2)
lon1 = radians(lon1)
lat1 = radians(lat1)
lon2 = radians(lon2)
lat2 = radians(lat2)
dlon = lon2 - lon1
dlat = lat2 - lat1
a = (sin(dlat/2))**2 + cos(lat1) * cos(lat2) * (sin(dlon/2))**2
c = 2 * atan2(sqrt(a), sqrt(1-a))
distance = R * c
return distance
def distanceCal(cor1, cor2):
# print "distanceCal called!"
# print "Here is cor1: " + str(cor1)
# print "Here is cor2: " + str(cor2)
return distanceCal4par(cor1[0], cor1[1], cor2[0], cor2[1])
def calPathDisSlow(linCor):
#Calculate the tot dis of entire path from scratch
print "calPathDisSlow called!"
pathLen = 0
for idx in xrange(1,len(linCor)):
delLen = distanceCal(linCor[idx], linCor[idx-1])
#print delLen
pathLen += delLen
return pathLen
def lenCal(vec):
#length of vector
return sqrt(vec[0]**2+vec[1]**2)
def directionalVec(u, v):
#return the unit directional vetor from pt u to pt v
vec = ((u[0]-v[0])*meterPerLng, (u[1]-v[1])*meterPerLat)
vecLen = lenCal(vec)
vec = (vec[0]/vecLen, vec[1]/vecLen)
return vec
def innerProduct(u, v):
#suppose u and v are already unit vector
return u[0]*v[0]+u[1]*v[1]
###
#Scoring function
###
def distScore(curDis, targetDis):
#penalize on the difference between current
#distance and target distance
return (curDis-targetDis)**2/targetDis**2
#p is onePath <type list>
def turnScore(p):
#penalize on turns
score=0
if (len(p)>=3):
for i in xrange(0, (len(p) - 2)):
u = directionalVec(p[i], p[i+1])
v = directionalVec(p[i+1], p[i+2])
prod = innerProduct(u, v)
prod = min(1,max(prod,-1))
angle = acos(prod) #in radians
score=score+angle
return score
#p is onePath <type list>
def repScore(p, curDis):
#penalize on repetition of path
score = 0
edgeSet = Set()
for idx in xrange(1, len(p)):
key = (p[idx-1], p[idx])
alterKey = (p[idx], p[idx-1])
if key not in edgeSet:
edgeSet.add(key)
else:
score += distanceCal(p[idx], p[idx-1])
if alterKey not in edgeSet:
edgeSet.add(alterKey)
else:
score += distanceCal(p[idx], p[idx-1])
return score/curDis
#p is onePath <type list>, curDis targDist double
def totScoreCal(path, curDis, targetDis):
#total penalize score, ratios are chosen s.t. penalty
#coming from different sources have similar variance
turnRatio = 0.02
disRatio = 10
repRatio = 10
tScore = turnScore(path)
dScore = distScore(curDis, targetDis)
rScore = repScore(path, curDis)
totScore = turnRatio*tScore + disRatio*dScore + repRatio*rScore
return totScore
"""
STAY CLASSY
Here's where we define Vertex and its metaclass Graph.
We'll use methods in these classes to generate a clean map and more!
"""
class Vertex:
#cor is a tuple of (lon, lat)
def __init__(self, cor):
self.id = cor
self.connectedTo = {}
def addNeighbor(self, nbrID, dist=0, score=1):
self.connectedTo[nbrID] = [dist, score]
def __str__(self):
#print overload
s = str(self.id) + ' connectedTo: '
for x in self.connectedTo:
s += str(x) + ' d='+str(self.connectedTo[x][0])
s += ', s=' + str(self.connectedTo[x][1])+'; '
return s
def getConnections(self):
return self.connectedTo.keys()
def neighborNumber(self):
return len(self.connectedTo)
def getID(self):
return self.id
def getLon(self):
return self.id[0]
def getLat(self):
return self.id[1]
def getLength(self,nbrID):
return self.connectedTo[nbrID][0]
def getScore(self, nbrID):
return self.connectedTo[nbrID][1]
class Graph(Vertex):
def __init__(self):
self.vertList = {}
self.numVertices = 0
self.numEdges= 0
def recountVandE(self):
self.numVertices = 0
self.numEdges = 0
for u in self.getVertices():
self.numVertices += 1
self.numEdges += len(self.vertList[u].getConnections())
def addVertex(self, v):
self.numVertices += 1
newVertex = Vertex(v)
self.vertList[v] = newVertex
return newVertex
def getVertex(self,n):
if n in self.vertList:
return self.vertList[n]
else:
return None
def __contains__(self,n):
return n in self.vertList
#note that f, t are tuples cor(lon, lat) here
def addEdge(self, f, t, dist=0, score=1, oneWay=False):
if f not in self.vertList:
nv = self.addVertex(f)
if t not in self.vertList[f].getConnections():
self.numEdges += 1
self.vertList[f].addNeighbor(t, dist, score)
if not oneWay:
if t not in self.vertList:
nv = self.addVertex(t)
if f not in self.vertList[t].getConnections():
self.numEdges += 1
self.vertList[t].addNeighbor(f, dist, score)
def getVertices(self):
return self.vertList.keys()
def __str__(self):
for v in self.vertList:
print self.vertList[v]
return ''
def removeVertex(self, delVID):
if delVID in self.vertList:
self.numVertices -= 1
self.numEdges -= len(self.vertList[delVID].getConnections())
del self.vertList[delVID]
#Note this only delete the edge from u to v, not vice versa
def removeEdge(self, u, v):
if u in self.vertList:
if v in self.vertList[u].getConnections():
self.numEdges -= 1
[dis, score] = self.vertList[u].connectedTo[v]
del self.vertList[u].connectedTo[v]
return (dis, score)
else:
return (-1, 0)
#This function remove the middle point u and concatenate its
#in and out edge
def removeMiddlePt(self, u):
twoNeighbors = self.vertList[u].getConnections()
for v in twoNeighbors:
self.removeEdge(v, u)
self.addEdge(twoNeighbors[0], twoNeighbors[1])
self.removeVertex(u)
#combine all nodes in the combineSet, return their COM combined newNode
def combine(self, combineSet):
x=0
y=0
for u in combineSet:
x+=u[0]
y+=u[1]
newND = (x/len(combineSet), y/len(combineSet))
self.addVertex(newND)
for u in combineSet:
for nb in self.vertList[u].getConnections():
if nb not in combineSet:
self.removeEdge(nb, u)
self.addEdge(nb, newND)
self.addEdge(newND, nb)
self.removeVertex(u)
return newND
def calPathDis(self, path):
#Calculate the tot dis of entire path from preCalDist
pathLen = 0
for idx in xrange(1,len(path)):
fNode = path[idx-1]
tNode = path[idx]
pathLen += self.vertList[fNode].connectedTo[tNode][0]
return pathLen
def findNearestNode(self, lookUpNode, NNnode):
#"find the closest node to the geocoded location"
minDist = maxVal
for node in self.vertList:
curDist = distanceCal(node, lookUpNode)
if curDist < minDist:
minDist = curDist
NNnode[1] = node[1]
NNnode[0] = node[0]
return minDist
def __iter__(self):
return iter(self.vertList.values())
def ccBFS(self, startN, visited, conComponent):
###
#This is to count the number of vertices inside each connected component
#remove isolated islands
###
visited.add(startN)
conComponent['conND'].add(startN)
conComponent['ct']+=1
BFSqueue = Queue()
BFSqueue.put(startN)
while not BFSqueue.empty():
nd = BFSqueue.get()
if nd in self.vertList:
for conND in self.vertList[nd].getConnections():
if conND not in conComponent['conND']:
visited.add(conND)
conComponent['conND'].add(conND)
conComponent['ct']+=1
BFSqueue.put(conND)
"""
WEB STUFF!
Here, we have methods to:
1. convert an 'address' <type string> to a geocoordinate (GeoCode)
2. take a 'geoItem' and turn it into a geoJSON type (GeoJsonify)
3. define a boundary and check that our coordinates are within it (inBounds)
4. build a dictionary from a set (buildDict)
5. create a 'MiniWorld' map, in which we query our sql database for a subset
of geolocation data for pathfinding (createMiniWorld)
6. put it all together! (PathTestMashUp)
"""
def GeoCode(address):
# take 'address' <type string> and get geocoordinates
import json
from urllib2 import urlopen
from urllib import quote
# encode address query into URL
url = 'https://maps.googleapis.com/maps/api/geocode/json?address={}&sensor=false&key={}'.format(quote(address, gAPI_key))
# call API and extract json
print 'Calling Google for the following address: ' + address
jData = urlopen(url).read()
jData = json.loads(jData.decode('utf-8')) # THIS MIGHT THROW AN ERROR
# extract coordinates (latitude, longitude)
if jData.get('status') == 'ZERO_RESULTS':
latitude, longitude = None, None
print 'The following address was not found: ' + address
else:
latitude, longitude = (value for _, value in sorted(jData.get('results')[0].get('geometry').get('location').items()))
print 'Your location is at the following coordinates: {:f}, {:f}'.format(longitude, latitude)
return (longitude, latitude)
def GeoJsonify(geoItem):
if isinstance(geoItem, list):
geoJSON = {
'type' : 'Feature',
'properties': {'stroke': '#914791'},
'geometry' :{
'type' : 'LineString',
'coordinates': geoItem,
}
}
elif isinstance(geoItem, tuple):
geoJSON = {
'type' : 'Feature',
'geometry' : {
'type' : 'Point',
'coordinates' : [geoItem[1], geoItem[0]],
}
}
return geoJSON
def GeoJsonifyMarkers(markerList):
features_list = []
for m in markerList:
m_url = 'https://www.google.com/search?espv=2&biw=1600&bih=791&site=webhp&q=' + str(m[0])
features_list.append(Feature(geometry=Point(tuple(m[1:])), properties={'title':str(m[0]), 'marker-color':'#751975', 'url': m_url}))
return features_list
def GeoJsonifyEndpoints(start, end):
start_url = 'https://www.google.com/search?espv=2&biw=1600&bih=791&site=webhp&q=' + str(start[0])
end_url = 'https://www.google.com/search?espv=2&biw=1600&bih=791&site=webhp&q=' + str(end[0])
start = Feature(geometry=Point(tuple(start[1:])), properties={'title':str(start[0]), 'marker-color':'#47D147', 'url': start_url})
end = Feature(geometry=Point(tuple(end[1:])), properties={'title':str(end[0]), 'marker-color':'#FF3300', 'url': end_url})
return start, end
def inBounds(node, bounds):
#bounds = [[minX, minY], [maxX, maxY]]
flag1 = (node.getLat()<bounds[1][1] and node.getLat()>bounds[0][1])
flag2 = (node.getLon()<bounds[1][0] and node.getLon()>bounds[0][0])
if flag1 and flag2:
return True
return False
def buildDict(vSet, gDict):
for v in vSet:
gDict[v.getID()] = {'Dist': maxVal, 'pred':None}
return
def getMapBoundary():
#define the boundary of the miniworld
# In this toy version, you can load this pickle from the Static folder
# The real version calls edges from a database
bounds = pickle.load(open("./static/bostonMetroArea_bounds.p", "rb"))
return bounds
def findNearestNodeNX(graph, lookUpNode):
# Find the closest node to your geocoded location
minDist = maxVal
for node in graph.nodes():
curDist = distanceCal(node, lookUpNode)
if curDist < minDist:
minDist = curDist
minNode = node
return minDist, minNode
def miniGraph2NX(miniGraph):
# Convert our miniGraph to NX object
# NOTE: in the future, have SQL-->NX directly
cleanG = miniGraph
nxG = nx.Graph()
nodes = cleanG.vertList.keys()
nodes = dict(zip(nodes,nodes))
for node in nodes:
nxG.add_node(node)
for neighbor in cleanG.vertList[node].getConnections():
length = distanceCal(node, neighbor)
nxG.add_edge(node, neighbor, weight=length)
return nxG, nodes
def nxPlot(nxGraph, nxPos):
plt.figure(1, figsize=(12,12))
nx.draw(nxGraph, pos=nxPos, node_size=5)
plt.show()
def nxShortestPath(nxGraph, nxPos, startPt, endPt, Dijk=0):
if Dijk == 0:
nxList = nx.shortest_path(nxGraph, source=startPt, target=endPt)
score = nx.shortest_path_length(nxGraph, source=startPt, target=endPt)
dist = nx.shortest_path_length(nxGraph, source=startPt, target=endPt, weight='distance')
elif Dijk == 1:
nxList = nx.dijkstra_path(nxGraph, source=startPt, target=endPt, weight='weight')
score = nx.dijkstra_path_length(nxGraph, source=startPt, target=endPt, weight='weight')
dist = nx.dijkstra_path_length(nxGraph, source=startPt, target=endPt, weight='distance')
nxH = nx.subgraph(nxGraph, nxList)
return nxList, nxH, score, dist
def getRealPathLength(myPath):
pathLength = 0
lengths = []
for i in range(len(myPath)-1):
lengths.append(distanceCal(myPath[i], myPath[i+1]))
pathLength += lengths[-1]
return pathLength
def plotPath(fullGraph, pathGraph, nodePos):
nxGraph = fullGraph
nxH = pathGraph
nxPos = nodePos
fig = plt.figure(figsize=(16,16))
ax = fig.add_subplot(111)
nx.draw(nxGraph, pos=nxPos, node_size=2)
nx.draw(nxH, pos=nxPos, node_size=40, width=5, edge_color='r')
ax.plot()
def PathTestMashUp(startPt, endPt, runDis=3):
"""
WHERE THE MAGIC HAPPENS!
The website will call this function.
"""
## Load up your necessary variables
# In this toy version, you can load this pickle from the Static folder
# The real version calls edges from a database upon each query and rebuilds the map around your start and end coordinates
nxGraph = pickle.load(open("./static/bostonMetroArea_Weighted_Locs.p", "rb"))
nxPos = pickle.load(open("./static/bostonMetroArea_pos.p", "rb"))
targetDis = runDis*1000+1 # convert km to m
# Use the Google to find geolocation ,type tuple> for your start/endPt <type string>
startCor = GeoCode(startPt)
endCor = GeoCode(endPt)
startDist, startNode = findNearestNodeNX(nxGraph, startCor)
endDist, endNode = findNearestNodeNX(nxGraph, endCor)
# to prevent crashes, shift one node slightly to a neighbor
if startNode == endNode:
endNode = nx.neighbors(nxGraph, endNode)[0]
print 'The closest node found to startPt is {} from dist {}'.format(startNode, startDist)
print 'The closest node found to endPt is {} with dist {}'.format(endNode, endDist)
# Ensure you're within the boundaries of your world
bounds = getMapBoundary()
print "Boundaries found: {}".format(bounds)
## PATHFINDERS
# Calculate weighted and unweighted Dijkstras
shortestPath_uw, nxH_uw, _, pathLength_uw = nxShortestPath(nxGraph, nxPos, startNode, endNode, Dijk=0)
shortestPath_w, nxH_w, _, pathLength_w = nxShortestPath(nxGraph, nxPos, startNode, endNode, Dijk=1)
# Run the genetic algorithm!
gene = geneticPath(startNode, endNode, targetDis)
shortestPath_g, pathLength_g, error_g = gene.Evolution()
nxH_g = nx.subgraph(nxGraph, gene.finalSpecies)
nxH = nxH_g
shortestPath = shortestPath_g
shortestPath.append(list(endCor))
pathLength = pathLength_g
message = 'Here is a {:.0f} km path for you.'.format(pathLength_g/1000.0)
# Get the locations for any interesting nodes
pathLocations = []
pathNodes = []
pathLocales = []
for edge in nxH.edges(data=True):
if edge[2]['location'] is not None:
pathLocations.append(edge[2]['location'])
pathNodes.append(edge[1])
pathLocales.append([edge[2]['location'], edge[1][0], edge[1][1]])
pathLocations = list(set(pathLocations))
pathNodes = list(set(pathNodes))
pathLocales = [list(x) for x in set(tuple(x) for x in pathLocales)]
unique_locales=[]
unique_strings=[]
for ls in pathLocales:
if ls[0] not in unique_strings:
unique_strings.append(ls[0])
unique_locales.append(ls)
pathLocales = unique_locales
message += " Enjoy your ride!"
# Create the new map layer with path and markers
# turn locales in geojson object
markers = GeoJsonifyMarkers(pathLocales)
# add start and end
startGeo, endGeo = GeoJsonifyEndpoints([startPt, startCor[0], startCor[1]], [endPt, endCor[0], endCor[1]])
# add the path and endpoints as a geojson object
markers.append(GeoJsonify(shortestPath))
markers.append(startGeo)
markers.append(endGeo)
# create final layer as a Feature Collection
geoMarkers = FeatureCollection(markers)
# Create locales list of lists
json = {
'bounds': bounds,
'startPt': GeoJsonify(startCor),
'endPt': GeoJsonify(endCor),
'dist': pathLength,
'path': geoMarkers,
'message': message,
'locales': GeoJsonifyMarkers(pathLocales)
}
return json # FOR APP
# return json, shortestPath, pathLength, nxH, pathLocales, pathNodes # FOR TESTING
if __name__ == "__main__":
# Test run
start = 'Fenway, Boston, MA'
end = 'Fresh Pond, MA'
distance = 16
json = PathTestMashUp(start, end, distance)
| true |
7d580e864653122ac7c7b40005111532ba8deaf8 | Python | howonlee/graph-isomorphism | /word_graphs.py | UTF-8 | 1,666 | 2.9375 | 3 | [] | no_license | import numpy as np
import numpy.random as npr
import numpy.linalg as npl
import scipy.sparse as sci_sp
import scipy.stats as sci_st
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import networkx as nx
from nltk.corpus import brown
import collections
import operator
import cPickle
import random
def word_mapping(words):
curr_count = 1 #in deference to julia structure
state_map = {}
for word in words:
if word not in state_map:
state_map[word] = curr_count
curr_count += 1
return state_map
def get_bigrams(ls):
return zip(ls, ls[1:])
def save_word_mapping(mapping, name):
with open(name, "w") as map_file:
cPickle.dump(mapping, map_file)
print "word mapping dumped to : ", name
def word_net(words, mapping):
bigs = get_bigrams(words)
edge_list = map(lambda x: (mapping[x[0]], mapping[x[1]]), bigs)
net = nx.Graph()
for edge in edge_list:
net.add_edge(*edge)
return net
def save_word_net(net, name):
nx.write_edgelist(net, name, data=False)
print "word graph saved to : ", name
if __name__ == "__main__":
brown_words = brown.words()
first, second = brown_words[10000:10500], brown_words[0:500] #just 2 2000word slices
first_dict = word_mapping(first)
first_net = word_net(first, first_dict)
second_dict = word_mapping(second)
second_net = word_net(second, second_dict)
print "generated. saving...."
save_word_mapping(first_dict, "first_dict.pickle")
save_word_net(first_net, "first_net.edgelist")
save_word_mapping(second_dict, "second_dict.pickle")
save_word_net(second_net, "second_net.edgelist")
| true |
563f64785df7ec8e759d413fec4c3f3ae599383a | Python | SmirnovOleg/formal-languages | /wrappers/GrammarWrapper.py | UTF-8 | 6,079 | 2.84375 | 3 | [
"Apache-2.0"
] | permissive | from typing import List, Dict
from pyformlang.cfg import Variable, Terminal, CFG, Production
from pyformlang.finite_automaton import State
from pyformlang.regular_expression import Regex
class GrammarWrapper:
__var_state_counter = 0
def __init__(self, cfg: CFG):
self.cfg = cfg
self.generate_epsilon = cfg.generate_epsilon()
self.cnf = cfg.to_normal_form()
self.wcnf = self.get_weak_cnf()
@classmethod
def from_text(cls, text: List[str], use_python_regexes_if_necessary=False, variables=None):
vars, terms, prods = set(), set(), set()
start_var = None
for line in text:
if not line.strip():
continue
raw_head, *raw_body = line.strip().split(' ', 1)
if raw_body and any([spec in raw_body[0] for spec in ['|', '.', '?', '+', '-']]):
if '-' in raw_body[0] and use_python_regexes_if_necessary:
regex = Regex.from_python_regex(raw_body[0])
else:
regex = Regex(raw_body[0])
head = Variable(raw_head)
if start_var is None:
start_var = head
cur_cfg = cls._create_cfg_from_regex(head, regex, variables)
vars.update(cur_cfg.variables)
terms.update(cur_cfg.terminals)
prods.update(cur_cfg.productions)
else:
raw_body = raw_body[0].split(' ') if raw_body else ''
if start_var is None:
start_var = Variable(raw_head)
head = Variable(raw_head)
vars.add(head)
body = []
for element in raw_body:
if element == 'eps':
continue
elif (not variables and any(letter.isupper() for letter in element)
or variables and element in variables):
var = Variable(element)
vars.add(var)
body.append(var)
else:
term = Terminal(element)
terms.add(term)
body.append(term)
prods.add(Production(head, body))
cfg = CFG(vars, terms, start_var, prods)
return cls(cfg)
@classmethod
def from_file(cls, path_to_file: str, use_python_regexes_if_necessary=False, variables=None):
with open(path_to_file, 'r') as file:
return cls.from_text(file.readlines(), use_python_regexes_if_necessary, variables)
@classmethod
def _create_cfg_from_regex(cls, head: Variable, regex: Regex, variables=None) -> CFG:
dfa = regex.to_epsilon_nfa().to_deterministic().minimize()
transitions = dfa._transition_function._transitions
state_to_var: Dict[State, Variable] = {}
productions, terms, vars = set(), set(), set()
for state in dfa.states:
state_to_var[state] = Variable(f'{state}:{cls.__var_state_counter}')
cls.__var_state_counter += 1
vars.update(state_to_var.values())
for start_state in dfa.start_states:
productions.add(Production(head, [state_to_var[start_state]]))
for state_from in transitions:
for edge_symb in transitions[state_from]:
state_to = transitions[state_from][edge_symb]
current_prod_head = state_to_var[state_from]
current_prod_body = []
if (not variables and edge_symb.value.isupper()
or variables and edge_symb.value in variables):
var = Variable(edge_symb.value)
vars.add(var)
current_prod_body.append(var)
else:
term = Terminal(edge_symb.value)
terms.add(term)
current_prod_body.append(term)
current_prod_body.append(state_to_var[state_to])
productions.add(Production(current_prod_head, current_prod_body))
if state_to in dfa.final_states:
productions.add(Production(state_to_var[state_to], []))
if not productions:
return CFG(vars, terms, head, {Production(head, [])})
return CFG(vars, terms, head, productions)
def get_weak_cnf(self) -> CFG:
wcnf = self.cnf
if self.generate_epsilon:
new_start_symbol = Variable("S'")
new_variables = set(wcnf.variables)
new_variables.add(new_start_symbol)
new_productions = set(wcnf.productions)
new_productions.add(Production(new_start_symbol, [wcnf.start_symbol]))
new_productions.add(Production(new_start_symbol, []))
return CFG(new_variables, wcnf.terminals, new_start_symbol, new_productions)
return wcnf
def accepts(self, word) -> bool:
size = len(word)
if size == 0:
return self.cfg.generate_epsilon()
cnf = self.cfg.to_normal_form()
inference_matrix = [[set() for _ in range(size)] for _ in range(size)]
for i in range(size):
term = Terminal(word[i])
for prod in cnf.productions:
if len(prod.body) == 1 and prod.body[0] == term:
inference_matrix[i][i].add(prod.head)
for length in range(1, size):
for pos in range(size):
if pos + length >= size:
break
for split in range(length):
first_part = inference_matrix[pos][pos + split]
second_part = inference_matrix[pos + split + 1][pos + length]
for prod in cnf.productions:
if len(prod.body) == 2:
if prod.body[0] in first_part and prod.body[1] in second_part:
inference_matrix[pos][pos + length].add(prod.head)
return cnf.start_symbol in inference_matrix[0][size - 1]
| true |
3cbc9c970940c61ee195dd3cca87acda2f4c9463 | Python | Hirata-Kodai/2021_MatsuoLab_NLPSpringseminar | /Models.py | UTF-8 | 3,377 | 3.109375 | 3 | [
"MIT"
] | permissive | import torch
from torch import nn
import torch.nn.functional as F
def compute_loss(model, input, optimizer=None, is_train=True):
"""lossを計算するための関数
is_train=Trueならモデルをtrainモードに、
is_train=Falseならモデルをevaluationモードに設定します
:param model: 学習させるモデル
:param input: モデルへの入力
:param optimizer: optimizer
:param is_train: bool, モデルtrainさせるか否か
"""
model.train(is_train)
# lossを計算します。
loss = model(*input)
if is_train:
# .backward()を実行する前にmodelのparameterのgradientを全て0にセットします
optimizer.zero_grad()
# parameterのgradientを計算します。
loss.backward()
# parameterのgradientを用いてparameterを更新します。
optimizer.step()
return loss.item()
class CBOW(nn.Module):
def __init__(self, vocab_size, embedding_size):
"""
:param vocab_size: int, 語彙の総数
:param embedding_size: int, 単語埋め込みベクトルの次元
"""
super(CBOW, self).__init__()
self.vocab_size = vocab_size
self.embedding_size = embedding_size
self.emb = nn.Embedding(self.vocab_size, self.embedding_size, padding_idx=0) # Embedding層の定義
self.linear = nn.Linear(self.embedding_size, self.vocab_size, bias=False) # 全結合層(バイアスなし)
def forward(self, batch_X, batch_Y):
"""
:pram batch_X: Tensor(dtype=torch.long), (batch_size, window*2)
:pram batch_Y: Tensor(dtype=torch.long), (batch_size, 1)
:return loss: CBOWのロス
"""
emb_X = self.emb(batch_X) # (batch_size, window*2, embedding_size)
sum_X = torch.sum(emb_X, dim=1) # (batch_size, embedding_size)
lin_X = self.linear(sum_X) # (batch_size, vocab_size)
log_prob_X = F.log_softmax(lin_X, dim=-1) # (batch_size, vocab_size)
loss = F.nll_loss(log_prob_X, batch_Y)
return loss
class Skipgram(nn.Module):
def __init__(self, vocab_size, embedding_size):
'''
:pram vocav_size : int, 語彙の総数
:pram embedding_size : int, 単語埋め込みベクトルの次元
'''
super(Skipgram, self).__init__()
self.vocab_size = vocab_size
self.embedding_size = embedding_size
self.embedding = nn.Embedding(self.vocab_size, self.embedding_size)
self.linear = nn.Linear(self.embedding_size, self.vocab_size)
def forward(self, batch_X, batch_Y):
'''
:pram batch_X : torch.Tensor(dtype=torch.long), (batch_size, )
:pram batch_Y : torch.Tensor(dtype=torch.long), (batch_size, window*2)
:return loss : torch.Tensor(dtype=forch.float), Skipgramのloss
'''
emb_X = self.embedding(batch_X) # (batch_size, embedding_size)
lin_X = self.linear(emb_X) # (batch_size, vocab_size)
log_prob_X = F.log_softmax(lin_X, dim=-1) # (batch_size, vocab_size)
log_prob_X = torch.gather(log_prob_X, 1, batch_Y) # (batch_X, window*2)
log_prob_X = log_prob_X * (batch_Y != 0).float() # padding(=0) 部分にマスク
loss = log_prob_X.sum(1).mean().neg()
return loss
| true |
898c6efdaf6374e9d4610366af6a619a45a98fa3 | Python | Daiver/jff | /obsolete/py/arap_image_warp/tests.py | UTF-8 | 8,045 | 2.859375 | 3 | [] | no_license | import unittest
import numpy as np
from arap_image_warp1 import findCommonVertices, cellIndices, edgeLengths
from arap_image_warp1 import gMatrix, gMatrices, hMatrix
from arap_image_warp1 import composeA1Matrix, composeB1Matrix
from arap_image_warp1 import composeA2Matrix, composeB2Matrix
from arap_image_warp1 import normalizedTransformationFromPositions
class ARAPImageWarpTests01(unittest.TestCase):
def setUp(self):
self.adj = [
[1, 3, 4],
[0, 2, 4],
[3, 4, 1],
[2, 4, 0],
[0, 1, 2, 3]
]
self.pos = np.array([
[0, 0],
[1, 0],
[1, 1],
[0, 1],
[0.5, 0.5]
])
def testComposeA2Matrix01(self):
cells = [
[0, 1, 2],#0
[1, 2, 0],#1
[2, 0, 1],#2
[1, 0, 2],#3
[2, 1, 0],#4
[0, 2, 1] #5
]
constraints = [0]
nVerts = 3
weight = 100
res = composeA2Matrix(cells, constraints, nVerts, weight)
ans = np.array([
[-1, 1, 0],
[0, -1, 1],
[1, 0, -1],
[1, -1, 0],
[0, 1, -1],
[-1, 0, 1],
[100, 0, 0]
])
self.assertTrue(np.allclose(res, ans))
def testComposeB2Matrix01(self):
weight = 100
edgeLens1D = [1, 2, 3]
controlPointDesirePositions = [1, 2]
res = composeB2Matrix(edgeLens1D, controlPointDesirePositions, weight)
ans = np.array([
1, 2, 3, 100, 200
])
self.assertTrue(np.allclose(res, ans))
def testComposeA2Matrix02(self):
cells = [
[0, 1, 2],#0
[1, 2, 0],#1
[2, 0, 1],#2
[1, 0, 2],#3
[2, 1, 0],#4
[0, 2, 1] #5
]
constraints = [0]
nVerts = 3
weight = 100
A2 = composeA2Matrix(cells, constraints, nVerts, weight)
def testNormalizedTransformationFromPositions01(self):
pos = np.array([
[0, 0],
[0, 1],
[-1, 0],
[1, 0]
], dtype=np.float32)
cell = [0, 1, 2, 3]
newPos = np.array([
[0, 0],
[1, 0],
[0, 1],
[0, -1]
], dtype=np.float32)
g = gMatrix(pos, cell)
trans = normalizedTransformationFromPositions(newPos, g, cell)
for p, newp in zip(pos, newPos):
self.assertTrue(np.allclose( np.dot(trans, p), newp))
def testNormalizedTransformationFromPositions02(self):
pos = np.array([
[0, 0],
[0, 1],
[-1, 0],
[1, 0]
], dtype=np.float32)
cell = [0, 1, 2, 3]
newPos = np.array([
[0, 0],
[1, 0],
[0, 1],
[0, -1]
], dtype=np.float32)
g = gMatrix(pos, cell)
trans = normalizedTransformationFromPositions(newPos, g, cell)
'''print np.dot(trans, pos[1])
print np.dot(trans, pos[0])
print np.dot(trans, pos[1] - pos[0]) '''
self.assertTrue(np.allclose(np.dot(trans, pos[1]) - np.dot(trans, pos[0]),
np.dot(trans, pos[1] - pos[0])))
def testfindCommonVertices01(self):
res1 = findCommonVertices(self.adj, 0, 1)
self.assertEqual(len(res1), 1)
self.assertEqual(res1[0], 4)
res2 = findCommonVertices(self.adj, 2, 4)
self.assertEqual(len(res2), 2)
self.assertEqual(res2[0], 3)
self.assertEqual(res2[1], 1)
res3 = findCommonVertices(self.adj, 4, 2)
self.assertEqual(len(res3), 2)
self.assertEqual(res3[0], 1)
self.assertEqual(res3[1], 3)
def testCellIndices01(self):
res = cellIndices(self.adj)
true = [[0, 1, 4], [0, 3, 4], [0, 4, 1, 3],
[1, 0, 4], [1, 2, 4], [1, 4, 0, 2],
[2, 3, 4], [2, 4, 3, 1], [2, 1, 4],
[3, 2, 4], [3, 4, 2, 0], [3, 0, 4],
[4, 0, 1, 3], [4, 1, 0, 2], [4, 2, 1, 3], [4, 3, 0, 2]]
for x, y in zip(res, true):
self.assertSequenceEqual(x, y)
def testEdgeLength01(self):
res = edgeLengths(self.pos, self.adj)
ans = np.array([
[1, 0],
[0, 1],
[0.5, 0.5],
[-1, 0],
[0, 1],
[-0.5, 0.5],
[-1, 0],
[-0.5, -0.5],
[0, -1],
[1, 0],
[0.5, -0.5],
[0, -1],
[-0.5, -0.5],
[0.5, -0.5],
[0.5, 0.5],
[-0.5, 0.5],
])
for x, y in zip(res, ans):
self.assertEqual(x[0], y[0])
self.assertEqual(x[1], y[1])
def testGMatrix01(self):
cell = [0, 1, 2]
pos = np.array([
[0, 10],
[20, 30],
[40, 50]])
res = gMatrix(pos, cell)
ans = np.array([
[0, 10],
[10, 0],
[20, 30],
[30, -20],
[40, 50],
[50, -40],
])
for x, y in zip(res, ans):
self.assertEqual(x[0], y[0])
def testHMatrix01(self):
#return #test later
pos = np.array([
[1, 2],
[3, 2],
[1.5, 1],
[1.5, 3]
])
e = np.array([2, 0])
cell = [0, 1, 2, 3]
g = gMatrix(pos, cell)
res = hMatrix(e, len(cell), g)
#print 'G'
#print g
#print 'H'
#print res
def testComposeA1(self):
h = [
np.array([
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1]
]),
np.array([
[2, 2, 2, 2, 2, 2, 2, 2],
[2, 2, 2, 2, 2, 2, 2, 2]
]),
np.array([
[3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 3, 3, 3]
])
]
cells = [
[6, 3, 4, 2],
[5, 4, 6],
[0, 1, 6]
]
nVerts = 7
weight = 13
conds = [6, 2]
res = composeA1Matrix(h, cells, nVerts, weight, conds)
ans = np.array([
[[ 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1],
[ 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1],
[ 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2],
[ 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2],
[ 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3],
[ 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13],
[ 0, 0, 0, 0, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 13, 0, 0, 0, 0, 0, 0, 0, 0]]
])
self.assertTrue(np.allclose(ans, res))
def testComposeB1(self):
nCells = 3
weight = 13
constraints = np.array([
[1, 2],
[-2, 3]
])
res = composeB1Matrix(nCells, weight, constraints)
ans = np.array([
0, 0, 0, 0, 0, 0,
13*1, 13*2, -2*13, 3*13
]).transpose()
self.assertTrue(np.allclose(res, ans))
if __name__ == '__main__':
unittest.main()
| true |
0934b14a59a43871e72e3d06f381304fee1e2794 | Python | canorve/GALFITools | /src/galfitools/shell/commands_sky.py | UTF-8 | 3,655 | 2.703125 | 3 | [
"MIT"
] | permissive |
import argparse
from galfitools.sky.GalfitSky import galfitSky
from galfitools.sky.Sky import sky
from galfitools.sky.SkyDs9 import SkyDs9
from galfitools.sky.SkyRing import SkyRing
from galfitools.shell.prt import printWelcome
def mainGalfitSky():
printWelcome()
parser = argparse.ArgumentParser(description="computes the sky using GALFIT")
parser.add_argument("image", help="the image file")
parser.add_argument("mask", help="the GALFIT mask file")
parser.add_argument("-s","--scale", type=float, help="the plate scale. default = 1", default=1)
parser.add_argument("-zp","--mgzpt", type=float, help="the magnitud zero point. default=25",default = 25)
parser.add_argument("-x","--xpos", type=float, help="the x position. default=1",default = 1)
parser.add_argument("-y","--ypos", type=float, help="the y position. default=1",default = 1)
parser.add_argument("-is","--initsky", type=float, help="the initial sky value default=0",default = 0)
args = parser.parse_args()
imgname = args.image
maskfile = args.mask
mgzpt = args.mgzpt
scale = args.scale
X = args.xpos
Y = args.ypos
initsky = args.initsky
galfitSky(imgname, maskfile, mgzpt, scale, X, Y, initsky)
def mainSky():
printWelcome()
parser = argparse.ArgumentParser(description="computes sky from a ds9 region box file")
parser.add_argument("image", help="the image file")
parser.add_argument("maskfile", help="the GALFIT Mask image file ")
parser.add_argument("Ds9regFile", help="the DS9 box region file")
args = parser.parse_args()
imgname = args.image
maskimage = args.maskfile
filereg = args.Ds9regFile
mean, sig = sky(imgname, maskimage, filereg)
print("Sky within 3 sigma:")
print("mean sky: {:.3f} ".format(mean))
print("std sky: {:.3f} ".format(sig))
def mainSkyDs9():
printWelcome()
parser = argparse.ArgumentParser(description="SkyDs9: computes sky background from a Ds9 region file: Box, Ellipses and Polygons ")
parser.add_argument("ImageFile", help="the image file where the photometry will be computed")
parser.add_argument("RegFile", help="the DS9 region file")
args = parser.parse_args()
ImageFile = args.ImageFile
RegFile = args.RegFile
mean, sig = SkyDs9(ImageFile, RegFile)
print("Sky with the top 80% and botton 20% removed.")
print("mean sky: {:.3f} ".format(mean))
print("std sky: {:.3f} ".format(sig))
def mainSkyRing():
"""Computes the sky using rings """
printWelcome()
parser = argparse.ArgumentParser(description="SkyRing: computes the sky using gradient over rings")
# required arguments
parser.add_argument("Image",help="Fits image of the objects")
parser.add_argument("MaskFile",help="Fits Mask image")
parser.add_argument("Ds9regFile", help="the DS9 ellipse region file")
parser.add_argument("-c","--center", action="store_true", help="use the center of the ellipse. Otherwise it will use the (x,y) position with the highest value of the ellipse")
# arguments with inputs
parser.add_argument("-w","--width", type=int, help="width of the ring for the grad method. ",default=20)
args = parser.parse_args()
image = args.Image
mask = args.MaskFile
ds9regfile = args.Ds9regFile
width = args.width
center = args.center
##end input
mean, std, median, rad = SkyRing(image, mask, ds9regfile, width, center)
line="Total sky: mean = {:.2f}; std={:.2f}; median = {:.2f} at radius {:.2f} ".format(mean,std,median, rad)
print(line)
| true |
10c0895011df59a665a77e8b74ced5007cc50aaf | Python | dancan-sandys/News-api | /app/models.py | UTF-8 | 710 | 2.734375 | 3 | [
"MIT"
] | permissive | class NewsArticle():
'''
A class defining the blueprint of a news article object
'''
def __init__(self, name,image,description,time, url_to_site):
self.article_name = name
self.article_image = image
self.article_description = description
self.article_time = time
self.url_to_site =url_to_site
class NewsSource():
'''
A class defining the blueprint of a news source
'''
def __init__( self,id, name, category, description):
self.source_id = id
self.source_name = name
self.source_category = category
self.source_description = description
| true |
987a7890fc0ecd04e9a882c81803f81cb9f7deae | Python | jweede/wedaman.com | /resume/generate_resume.py | UTF-8 | 770 | 2.546875 | 3 | [] | no_license | #!/usr/bin/env python3
from datetime import datetime
import os
import jinja2
import yaml
HERE = os.path.dirname(os.path.realpath(__file__))
YAML_FILE = HERE + "/resume.yml"
OUTPUT_FILE = HERE + "/../_site/resume/index.html"
def generate_resume():
with open(YAML_FILE) as fp:
context = yaml.load(fp)
# skills = context["resume"]["Skills"]
# for skillObj in skills:
# skillObj["list"] = ", ".join(skillObj["list"])
env = jinja2.Environment(
undefined=jinja2.StrictUndefined, loader=jinja2.FileSystemLoader(HERE)
)
env.globals["publish_date"] = datetime.now().isoformat(" ")
env.globals.update(context)
env.get_template("index.html").stream().dump(OUTPUT_FILE)
if __name__ == "__main__":
generate_resume()
| true |
764d7b07a8085b84ecaa4739f46dd33aa0019455 | Python | joshlam123/optimization_algorithms | /greedy.py | UTF-8 | 2,137 | 3.609375 | 4 | [
"MIT"
] | permissive | import numpy as np
class GreedyTSP():
def __init__(self, n_city):
Ncity = n_city
self.cities = Ncity.cities
self.start_city = Ncity.start_city
self.init_tour = Ncity.init_tour
self.interval = list()
self.visited_cities = list()
self.greedy_tour = list()
self.distance = lambda x,y: np.sqrt((x[0]-y[0])**2 + (x[1]-y[1])**2)
def f(self, point, tour):
'''
input: tour (list)
Function that evaluates the cost of every single remaining node
output: distance
'''
distances = list() # distances is an ordered list containing visited cities
for i in range(len(list(tour))):
if i < len(tour)-1:
if tour[i] != self.start_city:
tour_distance = self.distance(self.cities[point][0], self.cities[tour[i]][0])
distances.append((tour[i], tour_distance))
return distances
def perform_greedy(self, tour):
total_distance = 0.0
city_keys = len(tour)
next_node = (self.start_city, 0)
while len(self.visited_cities) != city_keys:
dist = self.f(next_node[0], tour)
if len(dist) > 0:
next_node = min(dist, key=lambda n: (n[1], -n[0]))
self.visited_cities.append(next_node[0])
total_distance += next_node[1]
tour.remove(next_node[0])
else: # else we are at the last node and return back to starting point
last_city = self.cities[self.visited_cities[-1]][0]
begin_city = self.cities[self.start_city][0]
total_distance += np.sqrt((last_city[0]-begin_city[0])**2 + (last_city[1]-begin_city[1])**2)
self.visited_cities.append(self.start_city)
average_distance = total_distance / city_keys
return total_distance, average_distance
def greedy_this(self):
tour = self.init_tour.copy()
total_distance, average_distance = self.perform_greedy(tour)
return total_distance, average_distance | true |
b2128df2be235dd5cd1fbb9637e342aeeb9849b3 | Python | mschuldt/misc | /make_ip_static.py | UTF-8 | 2,753 | 2.859375 | 3 | [] | no_license | #!/usr/bin/python
# Usage:
# ./make_ip_static.py [-d] [IP]
# options:
# -d Dry run
# IP IP address that will be made static
# Defaults to the current IP address
#
# Adapted from this tutorial:
# http://www.modmypi.com/blog/tutorial-how-to-give-your-raspberry-pi-a-static-ip-address
import re
import subprocess as sp
from sys import argv
from os import geteuid
dhcp_line = "iface eth0 inet dhcp"
#TODO: There must be a better way to get this info
ifconfig_re = """eth0[ ]+Link encap:Ethernet HWaddr ..:..:..:..:..:..[ ]*
[ ]*inet addr:([0-9.]+) Bcast:([0-9.]+) Mask:([0-9.]+)"""
netstat_re = """^([0-9.]+)[ ]+([0-9.]+)"""
dry_run = False
if "-d" in argv:
dry_run = True
argv.remove("-d")
if not dry_run and geteuid() != 0:
print("I need to be run with root permissions!.")
exit(0)
filename = "/etc/network/interfaces"
f = open(filename, "r")
interfaces = f.read()
f.close()
def die(msg, code=1):
print(msg)
print("No changes where made.")
exit(code)
if not re.search(dhcp_line, interfaces):
die("hmmm...I can't find what I'm looking for")
# get info from 'ifconfig'
ifconfig = sp.Popen('ifconfig', stdout=sp.PIPE, stderr=sp.STDOUT).stdout.read()
m = re.search(ifconfig_re, ifconfig)
if not m:
die("ifconfig output is unfamiliar. I can't find what I'm looking for")
addr, bcast, mask = m.group(1), m.group(2), m.group(3)
if len(argv) == 2:
addr = argv[1]
# get info from 'netstat -nr'
destination = gateway = None
netstat = sp.Popen(['netstat', '-nr'], stdout=sp.PIPE, stderr=sp.STDOUT).stdout.readlines()
for line in netstat:
m = re.search(netstat_re, line)
if m:
if m.group(1) != "0.0.0.0":
if destination:
die("I'm confused. found two netstat Destination candidates.")
destination = m.group(1)
if m.group(2) != "0.0.0.0":
if gateway:
die("I'm confused. found two netstat Gateway candidates.")
gateway = m.group(2)
if not destination:
die("I could not find the Destination address using netstat")
if not gateway:
die("I could not find the Gateway address using netstat")
replacement = """iface eth0 inet static
address {}
netmask {}
network {}
broadcast {}
gateway {}""".format(addr, mask, destination, bcast, gateway)
new_interfaces = re.sub(dhcp_line, replacement, interfaces)
if dry_run:
print("""The static IP address will be: {}
In file '{}' I will replace:
'{}'
with:
{}""".format(addr, filename, dhcp_line, replacement))
else:
f = open(filename, "w")
f.write(new_interfaces)
f.close()
print("static IP address is: " + addr)
f = open("/home/pi/IP_ADDR", "w")
f.write(addr)
f.close()
exit(0)
| true |
91f504bbcba685cd734c825f537f73eba7709171 | Python | cmccluskey/misc | /FileInfoTools/getfileinfo.py | UTF-8 | 2,774 | 2.5625 | 3 | [] | no_license | #!/usr/bin/python
import argparse
import os
import magic
# brew install libmagic
# via pip install python-magic
# export PYTHONPATH=/usr/local/lib/python2.7/site-packages
import re
import stat
from magicfixup import magicfixup
# Don't navigate these root paths
exclude_paths = ['/dev','/tmp','/Volumes']
parser = argparse.ArgumentParser(description='Build a list of file extensions and Magic description')
parser.add_argument('-d', dest='debug', action='store_true', default=False, help='Enable debugging to standard out')
parser.add_argument('-p', dest='path', required=True, help='Root path to build list')
parser.add_argument('-o', dest='filename', required=True, help='Output filename for list')
# Ok to process based on exlude list
def excludeCheckPassed(checkfile, myList):
for x in myList:
regex = '^' + re.escape(x)
if re.search(regex,checkfile):
if args.debug: print "Warning: Skipping file %s since path %s is in the exclude list" % (checkfile,x)
return False
else:
None
return True
# True out arg parser
args = parser.parse_args()
if args.debug: print(args)
# Open output file
outf = open(args.filename, 'w')
# Walk the tree and write out line
for root, dirs, files in os.walk(args.path):
for file in files:
testfile = os.path.join(root, file)
# Check for excluded paths
if excludeCheckPassed(testfile,exclude_paths):
# Check for a symlink which messes with the socket check
if os.path.islink(testfile):
if args.debug: print "Warning: Skipping file %s since it is a symlink" % testfile
else:
# Readable as the current user
try:
fp = open(testfile,'r')
except IOError:
if args.debug: print "Error: Skipping file %s due to access perimissions" % testfile
next
else:
fp.close
# Checking to see if file is a socket
mode = os.stat(testfile).st_mode
if stat.S_ISSOCK(mode):
if args.debug: print "Warning: Skipping file %s since it is a socket" % testfile
else:
extension = os.path.splitext(testfile)[1]
# Confirm there is an extension on the file
if extension:
# Cleanup description to strip out per-file data
description = magicfixup(magic.from_file(testfile,arg.debug).split(',')[0])
# Write out data: Extension <tab> Mime Type <tab> Magic Description <tab> Full filename
outf.write(extension.strip('.').lower())
outf.write('\t')
outf.write(description)
outf.write('\t')
outf.write(magic.from_file(testfile, mime=True))
outf.write('\t')
outf.write(testfile)
outf.write('\n')
else:
if args.debug: print "Warning: No extension detected for %s" % testfile
else:
None
# Close output filehandle
outf.close
| true |
6ae6c30aedeeb402ae362e5ea0e6ab1fa877f500 | Python | Nmph24/CSE | /Arbin Saucedo - The Ross Maze.py | UTF-8 | 18,087 | 3.578125 | 4 | [] | no_license | class Item(object):
def __init__(self, name):
self.name = name
def pick_up(self):
print("You have picked up %s" % self.name)
class Statue(Item):
def __init__(self, name, desc):
super(Statue, self).__init__(name)
self.desc = desc
class PaintBrushStatue(Statue):
def __init__(self):
super(PaintBrushStatue, self).__init__("Pain Brush Statue", "This statue shows that you just started playing "
"this and mean that you are about to begin a probably easy maze but "
"we will see so good luck")
class BobRossStatue(Statue):
def __init__(self):
super(BobRossStatue, self).__init__("Bob Ross Statue", "A Bob Ross Statue look nice and"
" it is detailed greatly and it means you beat the game nice one"
"I hope you read the paintings descriptions of you grabbed them if not "
"that's fine but thanks for hanging out and beating the game now type "
"quit to end the game Bye")
class Paintings(Item):
def __init__(self, name, desc):
super(Paintings, self).__init__(name)
self.desc = desc
class WildernessDay(Paintings):
def __init__(self):
super(WildernessDay, self).__init__("Wilderness Day", "Wilderness Day was a painting and the "
"last one Bob Ross painted and it shows a forest with a sun "
"down as it time and a path way made of rocks")
class InTheMidstOfWinter(Paintings):
def __init__(self):
super(InTheMidstOfWinter, self).__init__("In The Midst Of Winter", "In The Midst of Winter really "
"speaks for it self in it nice winter forest and it"
"small cabin and the trees around it that are dead."
"The winter is harsh but nice")
class LakeAtTheRidge(Paintings):
def __init__(self):
super(LakeAtTheRidge, self).__init__("Late At The Ridge", "This painting has a view of snowy mountains "
" in the distant and a shining lake with a cloudy blue sky and green"
"trees and grasses")
class BalmyBeach(Paintings):
def __init__(self):
super(BalmyBeach, self).__init__("Balmy Beach", "You can see an ocean with a sun down and a nice pink sky"
"there are also some small waves crashing by and some palm trees")
class EvergreenValley(Paintings):
def __init__(self):
super(EvergreenValley, self).__init__("Evergreen Valley", "A nice snowy mountain in the distant and tall "
"green trees with a small trail amd a pink blue sky in this painting")
class TrailsEnd(Paintings):
def __init__(self):
super(TrailsEnd, self).__init__("Trail's End", "This painting is showing a tree with no leaves in front "
"but behind it there is a trail and a lot of orange autumn trees")
class BridgeToAutumn(Paintings):
def __init__(self):
super(BridgeToAutumn, self).__init__("Bridge To Autumn", "In This one there is a small wooden shelter "
"with a lake behind it and the autumn trees on the other side of the "
"lake and a small rock trail leading to the shelter")
class ViewFromClearCreek(Paintings):
def __init__(self):
super(ViewFromClearCreek, self).__init__("View From Clear Creek", "A site of a creek, a small river, tall"
"green grass and tall trees with mountains and hills in the distance"
"what a nice painting of this creek")
class CabinInTheHollow(Paintings):
def __init__(self):
super(CabinInTheHollow, self).__init__("Cabin In The Hollow", "A snowy forest with trees with pink leaves, "
"a frozen lake, some fences and a cabin with snow on top this "
"this painting really shows how it gets during winter in snowy areas")
class TranquilityCove(Paintings):
def __init__(self):
super(TranquilityCove, self).__init__("Tranquility Cove", "A painting with a river and some trees in the "
"beginning of autumn with orange grenns grass and tress with the same "
"color")
class WindingStream(Paintings):
def __init__(self):
super(WindingStream, self).__init__("Winding Stream", "This painting shows a trail, some tall trees next to "
"it and mountains in the distant background with a blue sky")
class ReflectionsOfCalm(Paintings):
def __init__(self):
super(ReflectionsOfCalm, self).__init__("Reflections Of Calm", "This painting speaks for itself with the nice"
"reflecting lake and the mountains in the back with trees and rocks "
"surrounding the lake and a blue sky to top it all off")
class BlueRidgeFalls(Paintings):
def __init__(self):
super(BlueRidgeFalls, self).__init__("Blue Ridge Falls", "A nice waterfall that is being split by a rock "
"surrounded but tall green trees and under it is the river current it"
"is making what a nice painting")
class EveningsGlow(Paintings):
def __init__(self):
super(EveningsGlow, self).__init__("Evening's Glow", "Another Painting that can speak for itself. This one"
"has a shining sun in the distant with a few mountains and its barely "
"becoming day. It has a trail leading to a small cabin with trees behind"
" it and a river as well next to it")
class SeasideHarmony(Paintings):
def __init__(self):
super(SeasideHarmony, self).__init__("Seaside Harmony", "A painting with the ocean and a cloudy pink sky with"
"waves crashing against some rocks on the shore")
class AWalkInTheWoods(Paintings):
def __init__(self):
super(AWalkInTheWoods, self).__init__("A Walk In The Woods", "The first every painting Bob Ross made and "
"it is pretty just like the rest. This painting has a small water "
"lake with a trail leading towards it and autumn trees surrounding "
"the trail and the small body of water with an yellow sky")
class You(object):
def __init__(self, name, desc):
self.name = name
self.desc = desc
def note(self):
print("%s " % self.name)
your_name = input("What is your name? ")
print("Nice to meet you %s" % your_name)
your_desc = input("Tell me a little something about yourself")
You = You("Desc Of You: %s is your name" % your_name, "and you are %s" % your_desc)
print(You.name, You. desc)
class Room(object):
def __init__(self, name, description, s, n, e, w, items):
self.name = name
self.desc = description
self.north = n
self.south = s
self.east = e
self.west = w
self.items = items
def move(self, direction):
global current_node
current_node = globals()[getattr(self, direction)]
wilderness_day = WildernessDay()
bob_ross_statue = BobRossStatue()
in_the_midst_of_winter = InTheMidstOfWinter()
lake_at_the_ridge = LakeAtTheRidge()
balmy_beach = BalmyBeach()
evergreen_valley = EvergreenValley()
trails_end = TrailsEnd()
bridge_to_autumn = BridgeToAutumn()
view_from_clear_creek = ViewFromClearCreek()
cabin_in_the_hollow = CabinInTheHollow()
tranquility_cove = TranquilityCove()
winding_stream = WindingStream()
reflections_of_calm = ReflectionsOfCalm()
blue_ridge_falls = BlueRidgeFalls()
evenings_glow = EveningsGlow()
seaside_harmony = SeasideHarmony()
a_walk_in_the_woods = AWalkInTheWoods()
paint_brush_statue = PaintBrushStatue()
painters_beginning = Room("Painters Beginning", "Well this is gonna ba a great ride with a few items "
"not much there only really is paintings you can collect and they have some "
"reason to be there but you have to play to find out and this is a maze so "
"it might take some time so good luck with the maze also there is a random door?"
"walk through it if you want to but also some tips type 'look' to see all your "
"routes you can take anyway good luck", "Wilderness", None, None, None,
paint_brush_statue)
Wilderness = Room("Wilderness", "You walked through the door and the sun is just going down and there is "
"is a forest around you and a painting and another two doors you can grab the "
"painting if you want to up to you just type pick up then the name of the "
"item you want to pick up ", "Winter", "painters_beginning", "Lake", None,
wilderness_day)
Winter = Room("Winter", "The door must have sent you to a very snowy place but hey there is a cabin go inside..."
"Well now you at least have shelter and there is a painting and if you want you can grab it also "
"there is a door inside the cabin you want to go in?", None, 'Wilderness', "Beach", None,
in_the_midst_of_winter)
Lake = Room("Lake", "It appears to be a lake and some mountains in the distant there is also another door and "
"painting so might as well go to the door or if you want you can pick up that painting", "Beach", None,
None, "Wilderness", lake_at_the_ridge)
Beach = Room("Beach", "You at the beach and it looks nice this place also has a sun about to sleep you can tell "
"by the pink sky and there are two doors and another painting you can always pick it up if you want to"
"that is", "Valley", "Lake", None, "Winter", balmy_beach)
Valley = Room("Valley", "You are on a trail after you got out the door and you can see the big snowy mountains"
"and a lot of tall trees but it is a little cold. There is the painting and the doors again which"
"you want ot go through and do you wan the painting?", "Autumn", "Beach", "Creek", "Trail",
evergreen_valley)
Trail = Room("Trail", "Its an autumn forest with a lot of trees and another painting but no other doors here "
"looks like your going ot hav to head back before you do, do you want the painting? Up to you.",
None, None, 'Valley', None, trails_end)
Autumn = Room("Autumn", "There is a small wooden shelter, a lake , and some trees but do you want to go in the "
"small shelter?.... You walked in and there is a door and a painting do you want the painting and "
"do you want ot head into the door?", None, "Valley", None, "Cabin", bridge_to_autumn)
Cabin = Room("Cabin", "Its freezing go into the cabin where its warm.... Your in and outside there are pink trees "
"with snow on them and a frozen river there is also another painting in the room wanna grab it? No"
"doors again well looks like you will have to head back again", None, None, "Autumn", None,
cabin_in_the_hollow)
Creek = Room("Creek", "You are at a creek and it looks beautiful with trees on one side of a river and hill on the"
" other there is also doors and a painting. who is leaving them behind? Anyway you wanna grab and go?",
"Cove", "Stream", None, "Valley", view_from_clear_creek)
Cove = Room("Cove", "There is a river in front of you and some trees around but there are more in the distance."
"There is the painting again, who would have guessed, anyway there is no doors again so we have to "
"head back again but do you want to grab the painting before you go?", None, "Creek", None, None,
tranquility_cove)
Stream = Room("Stream", "There is a trail in front of you and trees on the side of the trial there is also "
"mountain in the distance and a blue sky not too cloudy either there is that painting again and "
"some more doors so you wanna take the painting?", "Creek", "Reflection", "Seaside", None, winding_stream)
Reflection = Room("Reflection", "A big lake this time around and a closer mountain but still pretty far there are"
"also more trees and the painting also the door. So wanna grab the painting?", "Stream", None,
"Falls", None, reflections_of_calm)
Falls = Room("Falls", "Beautiful... a falls and a powerful stream and tons of trees and rocks following the stream"
"there's the painting as usual but no door so we have to head back and if you want grab the painting to",
None, None, None, "Reflection", blue_ridge_falls)
Seaside = Room("Seaside", "A beach and it looks pretty with the pink sky above and the clouds not to mention the "
"crashing waves hitting the rocks. Oh the painting and door well if you want grab the painting if "
"not don't up to you", "Evenings", None, None, "Stream", seaside_harmony)
Evenings = Room("Evenings", "A beautiful sun about to rise a nice stream some trees and a cabin... you walked in to"
"he cabin and as expected the door and the painting so you want to or not up to you", "Walk", "Seaside",
None, None, evenings_glow)
Walk = Room("Walk", "Well there is a trail and a small body of water in front of you and a nice autumn forest with a "
"yellow sky and tons of trees and one last door and painting well take it or not i don't mind", "Art",
"Evenings", None, None, a_walk_in_the_woods)
Art = Room("Art", "You made it to the end of the maze to a museum full of art and all kinds of it. It looks great"
"well congrats on beating the maze and thanks for playing the game hope to see you soon and on more thing"
"there is a statue grab it and read its description", None, "Walk", None, None, bob_ross_statue)
current_node = painters_beginning
directions = ["north", "south", "east", "west"]
short_directions = ["n", "s", "e", "w"]
bag_of_paintings = []
while True:
print("Name: %s" % current_node.name)
print("Desc: %s " % current_node.desc)
if current_node.items is not None:
print("Item: %s " % current_node.items.name)
command = input('>_'.lower())
if command == 'quit':
quit(0)
elif command in short_directions:
pos = short_directions.index(command)
command = directions[pos]
if command in directions:
try:
current_node.move(command)
except KeyError:
print("You can not go that way")
elif 'pick up' in command:
item_req = input("What item? ")
if item_req.lower() == current_node.items.name.lower():
bag_of_paintings.append(current_node.items)
print("Taken.")
else:
print("Item not Picked Up")
elif "look at" in command:
for Item in bag_of_paintings:
if Item.name in command:
print(Item.name)
print(Item.desc)
elif command == 'inventory':
for Item in bag_of_paintings:
print(Item.name)
if command == 'Inventory':
print(bag_of_paintings)
if command == 'Bob':
print("very art")
if command == 'Knock Knock':
print("Knock-knock - it's Knuckles - the bloat thrower ")
print("Independent flower ")
print("Magical Emerald holder ")
print("Give you the coldest shoulder ")
print("My spike goes through boulders ")
print("That's why I stay a loner ")
print("I was born by myself ")
print("I don't need a posse - I get it on by myself ")
print("Adversaries get shelft")
if command == "Kappa":
print("░░░░░░░░░")
print("░░░░▄▀▀▀▀▀█▀▄▄▄▄░░░░")
print("░░▄▀▒▓▒▓▓▒▓▒▒▓▒▓▀▄░░")
print("▄▀▒▒▓▒▓▒▒▓▒▓▒▓▓▒▒▓█░")
print("█▓▒▓▒▓▒▓▓▓░░░░░░▓▓█░")
print("█▓▓▓▓▓▒▓▒░░░░░░░░▓█░")
print("▓▓▓▓▓▒░░░░░░░░░░░░█░")
print("▓▓▓▓░░░░▄▄▄▄░░░▄█▄▀░")
print("░▀▄▓░░▒▀▓▓▒▒░░█▓▒▒░░")
print("▀▄░░░░░░░░░░░░▀▄▒▒█░")
print("░▀░▀░░░░░▒▒▀▄▄▒▀▒▒█░")
print("░░▀░░░░░░▒▄▄▒▄▄▄▒▒█░")
print("░░░▀▄▄▒▒░░░░▀▀▒▒▄▀░░")
print("░░░░░▀█▄▒▒░░░░▒▄▀░░░")
print("░░░░░░░░▀▀█▄▄▄▄▀")
if command == "look":
print("East: %s " % current_node.east)
print("North: %s " % current_node.north)
print("South: %s " % current_node.south)
print("West: %s " % current_node.west)
| true |
391b909f57556fb0d557f080bfc0a1273e73da07 | Python | tail95/Algorithm | /BreathFirstSearch/5567.py | UTF-8 | 591 | 3.078125 | 3 | [] | no_license | import sys
from collections import deque
n = int(sys.stdin.readline())
m = int(sys.stdin.readline())
friends = [[0]*(n+1) for _ in range(n+1)]
for _ in range(m):
a, b = map(int, sys.stdin.readline().split())
friends[a][b] = 1
friends[b][a] = 1
inviteds = [1]
queue = deque()
for i in range(n+1):
if friends[1][i] == 1 and i not in inviteds:
inviteds.append(i)
queue.append(i)
while len(queue):
front = queue.popleft()
for i in range(n+1):
if friends[front][i] == 1 and i not in inviteds:
inviteds.append(i)
print(len(inviteds)-1) | true |
6e2548d2e63ca93dda55efa16f0ef0ebc6a23d2d | Python | MohammadUsmanKhan/NCAI-MACHINE-LEARNING | /Assignment # 2/housing price/housing price(c).py | UTF-8 | 595 | 2.84375 | 3 | [] | no_license | import pandas as pd
import numpy as np
dataset=pd.read_csv("housing price.csv")
x=dataset.iloc[:,:-1].values
y=dataset.iloc[:, 1].values
from sklearn.model_selection import train_test_split
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size = 0.2, random_state = 3)
from sklearn.tree import DecisionTreeRegressor
regressor=DecisionTreeRegressor(random_state=15)
regressor.fit(x_train,y_train)
print("The Accuracuy of Decision Tree =",regressor.score(x_test,y_test))
print("Enter Housing ID=",regressor.predict([[1293]]))
y_pred=regressor.predict(x_test)
| true |
4c184bb8065e3a40f85e151b3f840d2ee737bdeb | Python | vstinner/check_python_vuln | /check_python_vuln/hash_dos.py | UTF-8 | 1,015 | 2.671875 | 3 | [
"MIT"
] | permissive | import sys
import subprocess
from vulntools import Test
SET_SIZE = 128
NVALUE = 16
class Check(Test):
NAME = "Hash DoS (CVE-2012-1150)"
SLUG = "hash-dos"
def run(self):
code = 'print(repr(set(str(i) for i in range(%s))))' % SET_SIZE
cmd = [sys.executable]
if sys.version_info < (3,):
cmd.append('-R')
cmd.extend(('-c', code))
results = []
for _ in range(NVALUE):
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
if proc.returncode:
self.exit_error("python failed with exitcode %s"
% proc.returncode)
results.append(stdout.rstrip())
unique = len(set(results))
if unique == NVALUE:
self.exit_fixed()
else:
self.exit_vulnerable("set is not randomized (%s unique repr)"
% unique)
if __name__ == "__main__":
Check().main()
| true |
6a5122e98da24abdfab855fafb06e5def1998bf6 | Python | alexbuyanow/PyCryptoPro | /tests/test_provider.py | UTF-8 | 13,613 | 2.578125 | 3 | [] | no_license | """
PyCryptoPro
CryptoPro providers test
"""
from pathlib import Path
import unittest2 as unittest
import mock
from pycryptopro.provider import (
CryptoProviderInterface,
ConsoleProvider,
CryptoProviderFactory
)
from pycryptopro.entity import Certificate, CRL, Info, Config
from pycryptopro.exception import ProviderNotFoundException
class TestCryptoProviderFactory(unittest.TestCase):
"""
Providers factory tests
"""
def setUp(self):
self.__factory = CryptoProviderFactory(Config())
def tearDown(self):
del self.__factory
def test_get_provider(self):
"""
Tests provider getting
"""
provider = self.__factory.get_provider('console')
self.assertIsInstance(provider, CryptoProviderInterface)
self.assertIsInstance(provider, ConsoleProvider)
def test_get_provider_error(self):
"""
Tests absent provider getting
"""
with self.assertRaisesRegex(
ProviderNotFoundException,
'Provider "undefined" not exists'
):
self.__factory.get_provider('undefined')
@mock.patch('pycryptopro.provider.CryptoProviderInterface')
def test_add_provider(self, provider):
"""
Tests provider adding
"""
self.__factory.add_provider('test', provider)
self.assertEqual(
self.__factory.get_provider('test'),
provider
)
class TestConsoleProviderCertManager(unittest.TestCase):
"""
Console provider tests for cert manager
"""
def setUp(self):
self.__cert_fixture = Path(
'./tests/certificate_fixture.txt'
).read_text()
self.__crl_fixture = Path('./tests/crl_fixture.txt').read_text()
def tearDown(self):
del self.__crl_fixture
del self.__cert_fixture
@mock.patch('pycryptopro.provider.CertFilterInterface')
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_get_certificate_list(self, wrapper, list_filter):
"""
Tests certificate list getting
"""
wrapper.return_value.execute.return_value = self.__cert_fixture
list_filter.limit.return_value = 0
list_filter.offset.return_value = 0
provider = ConsoleProvider(Config())
count, result = provider.get_certificate_list('', list_filter)
self.assertEqual(5, count)
self.assertEqual(5, len(result))
certificate = result[0]
self.__assert_cert(
certificate,
'5aac2b534b8d50306757bab8289886b755444e03'
)
wrapper.return_value.execute.assert_called_once()
@mock.patch('pycryptopro.provider.CertFilterInterface')
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_get_certificate_list_filtered(self, wrapper, list_filter):
"""
Tests certificate list getting
"""
wrapper.return_value.execute.return_value = self.__cert_fixture
list_filter.search.return_value = 'search'
list_filter.limit.return_value = 0
list_filter.offset.return_value = 0
provider = ConsoleProvider(Config())
count, result = provider.get_certificate_list('', list_filter)
self.assertEqual(5, count)
self.assertEqual(5, len(result))
certificate = result[0]
self.__assert_cert(
certificate,
'5aac2b534b8d50306757bab8289886b755444e03'
)
wrapper.return_value.execute.assert_called_once()
@mock.patch('pycryptopro.provider.CertFilterInterface')
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_get_certificate_list_limited(self, wrapper, list_filter):
"""
Tests certificate list getting
"""
wrapper.return_value.execute.return_value = self.__cert_fixture
list_filter.limit.return_value = 2
list_filter.offset.return_value = 2
provider = ConsoleProvider(Config())
count, result = provider.get_certificate_list('', list_filter)
self.assertEqual(5, count)
self.assertEqual(2, len(result))
certificate = result[0]
self.__assert_cert(
certificate,
'5ed7a78b451f46fae96b8959023f640f146ef1d7'
)
wrapper.return_value.execute.assert_called_once()
@mock.patch('pycryptopro.provider.CertFilterInterface')
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_get_certificate_list_empty(self, wrapper, list_filter):
"""
Tests empty certificate list getting
"""
wrapper.return_value.execute.return_value = ''
list_filter.limit.return_value = 0
list_filter.offset.return_value = 0
provider = ConsoleProvider(Config())
count, result = provider.get_certificate_list('', list_filter)
self.assertEqual(0, count)
self.assertEqual(0, len(result))
wrapper.return_value.execute.assert_called_once()
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_get_certificate(self, wrapper):
"""
Tests certificate getting
"""
wrapper.return_value.execute.return_value = self.__cert_fixture
provider = ConsoleProvider(Config())
certificate = provider.get_certificate('', '')
self.__assert_cert(
certificate,
'5aac2b534b8d50306757bab8289886b755444e03'
)
wrapper.return_value.execute.assert_called_once()
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_get_certificate_absent(self, wrapper):
"""
Tests absent certificate getting
"""
wrapper.return_value.execute.return_value = ''
provider = ConsoleProvider(Config())
self.assertIsNone(provider.get_certificate('', ''))
wrapper.return_value.execute.assert_called_once()
@classmethod
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_add_certificate(cls, wrapper):
"""
Tests certificate adding
"""
wrapper.return_value.execute.return_value = ''
provider = ConsoleProvider(Config())
provider.add_certificate(Path(), '')
wrapper.return_value.execute.assert_called_once()
@classmethod
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_remove_certificate(cls, wrapper):
"""
Tests certificate removing
"""
wrapper.return_value.execute.return_value = ''
provider = ConsoleProvider(Config())
provider.remove_certificate('', '')
wrapper.return_value.execute.assert_called_once()
@mock.patch('pycryptopro.provider.CertFilterInterface')
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_get_crl_list(self, wrapper, list_filter):
"""
Tests CRL list getting
"""
wrapper.return_value.execute.return_value = self.__crl_fixture
list_filter.limit.return_value = 0
list_filter.offset.return_value = 0
provider = ConsoleProvider(Config())
count, result = provider.get_crl_list('', list_filter)
self.assertEqual(2, count)
self.assertEqual(2, len(result))
certificate = result[0]
self.__assert_crl(
certificate,
'5aac2b534b8d50306757bab8289886b755444e03'
)
wrapper.return_value.execute.assert_called_once()
@mock.patch('pycryptopro.provider.CertFilterInterface')
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_get_crl_list_filtered(self, wrapper, list_filter):
"""
Tests CRL list getting
"""
wrapper.return_value.execute.return_value = self.__crl_fixture
list_filter.search.return_value = 'search'
list_filter.limit.return_value = 0
list_filter.offset.return_value = 0
provider = ConsoleProvider(Config())
count, result = provider.get_crl_list('', list_filter)
self.assertEqual(2, count)
self.assertEqual(2, len(result))
certificate = result[0]
self.__assert_crl(
certificate,
'5aac2b534b8d50306757bab8289886b755444e03'
)
wrapper.return_value.execute.assert_called_once()
@mock.patch('pycryptopro.provider.CertFilterInterface')
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_get_crl_list_limited(self, wrapper, list_filter):
"""
Tests CRL list getting
"""
wrapper.return_value.execute.return_value = self.__crl_fixture
list_filter.limit.return_value = 1
list_filter.offset.return_value = 1
provider = ConsoleProvider(Config())
count, result = provider.get_crl_list('', list_filter)
self.assertEqual(2, count)
self.assertEqual(1, len(result))
certificate = result[0]
self.__assert_crl(
certificate,
'511c2b534b8d50306757bab8289886b755444e03'
)
wrapper.return_value.execute.assert_called_once()
@mock.patch('pycryptopro.provider.CertFilterInterface')
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_get_crl_list_empty(self, wrapper, list_filter):
"""
Tests empty CRL list getting
"""
wrapper.return_value.execute.return_value = ''
list_filter.limit.return_value = 0
list_filter.offset.return_value = 0
provider = ConsoleProvider(Config())
count, result = provider.get_crl_list('', list_filter)
self.assertEqual(0, count)
self.assertEqual(0, len(result))
wrapper.return_value.execute.assert_called_once()
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_get_crl(self, wrapper):
"""
Tests CRL getting
"""
wrapper.return_value.execute.return_value = self.__crl_fixture
provider = ConsoleProvider(Config())
certificate = provider.get_crl('', '')
self.__assert_crl(
certificate,
'5aac2b534b8d50306757bab8289886b755444e03'
)
wrapper.return_value.execute.assert_called_once()
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_get_crl_absent(self, wrapper):
"""
Tests absent CRL getting
"""
wrapper.return_value.execute.return_value = ''
provider = ConsoleProvider(Config())
self.assertIsNone(provider.get_crl('', ''))
wrapper.return_value.execute.assert_called_once()
@classmethod
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_add_crl(cls, wrapper):
"""
Tests CRL adding
"""
wrapper.return_value.execute.return_value = ''
provider = ConsoleProvider(Config())
provider.add_crl(Path(), '')
wrapper.return_value.execute.assert_called_once()
@classmethod
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_remove_crl(cls, wrapper):
"""
Tests CRL removing
"""
wrapper.return_value.execute.return_value = ''
provider = ConsoleProvider(Config())
provider.remove_crl('', '')
wrapper.return_value.execute.assert_called_once()
def __assert_cert(self, certificate: Certificate, check_id: str):
"""
Checks certificate data
"""
self.assertIsInstance(certificate, Certificate)
self.assertEqual(
check_id,
certificate.identifier
)
self.assertIsInstance(certificate.subject, Info)
self.assertIsInstance(certificate.subject, Info)
def __assert_crl(self, certificate: CRL, check_id: str):
"""
Checks CRL data
"""
self.assertIsInstance(certificate, CRL)
self.assertEqual(
check_id,
certificate.identifier
)
self.assertIsInstance(certificate.issuer, Info)
class TestConsoleProviderCryptoCP(unittest.TestCase):
"""
Console provider tests for cryptocp
"""
@classmethod
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_sign_attached(cls, wrapper):
"""
Tests signing with attached sign
"""
wrapper.return_value.execute.return_value = 'ErrorCode: 0x00000000'
provider = ConsoleProvider(Config())
provider.sign_attached(Path(), '', '')
wrapper.return_value.execute.assert_called_once()
@classmethod
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_sign_detached(cls, wrapper):
"""
Tests signing with detached sign
"""
wrapper.return_value.execute.return_value = 'ErrorCode: 0x00000000'
provider = ConsoleProvider(Config())
provider.sign_detached(Path(), '', '')
wrapper.return_value.execute.assert_called_once()
@classmethod
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_verify_attached(cls, wrapper):
"""
Tests attached sign validation
"""
wrapper.return_value.execute.return_value = 'ErrorCode: 0x00000000'
provider = ConsoleProvider(Config())
provider.verify_attached(Path(), Path())
wrapper.return_value.execute.assert_called_once()
@classmethod
@mock.patch('pycryptopro.provider.ConsoleWrapper')
def test_verify_detached(cls, wrapper):
"""
Tests detached sign validation
"""
wrapper.return_value.execute.return_value = 'ErrorCode: 0x00000000'
provider = ConsoleProvider(Config())
provider.verify_detached(Path(), Path())
wrapper.return_value.execute.assert_called_once()
if __name__ == '__main__':
unittest.main()
| true |
1bdae7408e4bf440e03653f5547726ed8f310eac | Python | ph03n1x13/Codemarshal | /upperLower.py | UTF-8 | 1,412 | 3.671875 | 4 | [] | no_license | import time
"""
Codemarshal problem upper lower
time : 0.0 sec
mem : 2 KB
[+]problem statement: https://algo.codemarshal.org/problems/556a0cb1a843fc851d47b4cd
"""
def lower_upper_ascii(st):
"""converting case using ASCII values"""
result = ''
for letters in xrange(len(st)):
if ord(st[letters]) >= 97 and ord(st[letters]) <= 122:
result += chr(ord(st[letters])-32) # changing lower ASCII orders into capitals
else:
result += st[letters]
return result
def up_low_swap_ascii(stng): # using ASCII values
result = ''
start = time.time()
for letters in xrange(len(stng)):
if ord(stng[letters]) >= 97 and ord(stng[letters]) <= 122:
result += chr(ord(stng[letters])-32)
elif ord(stng[letters]) >= 65 and ord(stng[letters]) <= 90:
result += chr(ord(stng[letters])+32)
else:
result += stng[letters]
end = time.time()
print 'execution time : %d sec' % (end-start)
return result
def lower_upper(st): # using standard library
"""converting case using ASCII values"""
result = ''
for letters in xrange(len(st)):
result += st[letters].upper()
return result
def low_up(st): # using standard library
return st.upper()
def up_low(st): # using standard library
return st.lower()
tst = raw_input(">>> ")
out = up_low(tst)
print out
| true |
cf9bc04f2c2638d32bfa0cbee2aebf0abf848eac | Python | bpolgardy/Python-snippets | /to_query_string.py | UTF-8 | 402 | 3.34375 | 3 | [] | no_license | def to_query_string(data):
'''
Converts in put to query string.
'''
key_value_pairs = []
for key, value in data.items():
if type(value) == list:
for element in value:
key_value_pairs.append(f'{key}={element}')
else:
key_value_pairs.append(f'{key}={value}')
query_string = '&'.join(key_value_pairs)
return query_string
| true |
35376d07c61ed266b915147fef1d958f4c19df15 | Python | maxgold/tps_normals | /tn_visualization/mayavi_utils.py | UTF-8 | 9,991 | 2.671875 | 3 | [] | no_license | import numpy as np
from mayavi import mlab
from tn_utils.colorize import colorize
def disp_pts(points, normals, color1=(1,0,0), color2=(0,1,0), scale_factor=0.01):
"""
Ankush's plotting code.
"""
figure = mlab.gcf()
mlab.clf()
figure.scene.disable_render = True
points_glyphs = mlab.points3d(points[:,0], points[:,1], points[:,2], color=color1, resolution=20, scale_factor=scale_factor)
normals_glyphs = mlab.points3d(normals[:,0], normals[:,1], normals[:,2], color=color2, resolution=20, scale_factor=scale_factor)
glyph_points1 = points_glyphs.glyph.glyph_source.glyph_source.output.points.to_array()
glyph_points2 = normals_glyphs.glyph.glyph_source.glyph_source.output.points.to_array()
dd = 0.001
outline1 = mlab.outline(points_glyphs, line_width=3)
outline1.outline_mode = 'full'
p1x, p1y, p1z = points[0,:]
outline1.bounds = (p1x-dd, p1x+dd,
p1y-dd, p1y+dd,
p1z-dd, p1z+dd)
pt_id1 = mlab.text(0.8, 0.2, '0 .', width=0.1, color=color1)
outline2 = mlab.outline(normals_glyphs, line_width=3)
outline2.outline_mode = 'full'
p2x, p2y, p2z = normals[0,:]
outline2.bounds = (p2x-dd, p2x+dd,
p2y-dd, p2y+dd,
p2z-dd, p2z+dd)
pt_id2 = mlab.text(0.8, 0.01, '0 .', width=0.1, color=color2)
figure.scene.disable_render = False
def picker_callback(picker):
""" Picker callback: this gets called during pick events.
"""
if picker.actor in points_glyphs.actor.actors:
point_id = picker.point_id/glyph_points1.shape[0]
if point_id != -1:
### show the point id
pt_id1.text = '%d .'%point_id
#mlab.title('%d'%point_id)
x, y, z = points[point_id,:]
outline1.bounds = (x-dd, x+dd,
y-dd, y+dd,
z-dd, z+dd)
elif picker.actor in normals_glyphs.actor.actors:
point_id = picker.point_id/glyph_points2.shape[0]
if point_id != -1:
### show the point id
pt_id2.text = '%d .'%point_id
x, y, z = normals[point_id,:]
outline2.bounds = (x-dd, x+dd,
y-dd, y+dd,
z-dd, z+dd)
picker = figure.on_mouse_pick(picker_callback)
picker.tolerance = dd/2.
def gen_grid(f, mins, maxes, ncoarse=10, nfine=30):
"""
generate 3d grid and warps it using the function f.
The grid is based on the number of lines (ncoarse & nfine).
"""
dim = len(mins)
if dim ==3:
xmin, ymin, zmin = mins
xmax, ymax, zmax = maxes
elif dim==2:
xmin, ymin = mins
xmax, ymax = maxes
else: raise NotImplemented()
xcoarse = np.linspace(xmin, xmax, ncoarse)
ycoarse = np.linspace(ymin, ymax, ncoarse)
if dim == 3:
zcoarse = np.linspace(zmin, zmax, ncoarse)
xfine = np.linspace(xmin, xmax, nfine)
yfine = np.linspace(ymin, ymax, nfine)
if dim == 3:
zfine = np.linspace(zmin, zmax, nfine)
lines = []
if dim == 3:
if len(zcoarse) > 1:
for x in xcoarse:
for y in ycoarse:
xyz = np.zeros((nfine, dim))
xyz[:,0] = x
xyz[:,1] = y
xyz[:,2] = zfine
lines.append(f(xyz))
for y in ycoarse:
for z in zcoarse:
xyz = np.zeros((nfine, dim))
xyz[:,0] = xfine
xyz[:,1] = y
xyz[:,2] = z
lines.append(f(xyz))
for z in zcoarse:
for x in xcoarse:
xyz = np.zeros((nfine, 3))
xyz[:,0] = x
xyz[:,1] = yfine
xyz[:,2] = z
lines.append(f(xyz))
else:
for y in ycoarse:
xyz = np.zeros((nfine, dim))
xyz[:,0] = xfine
xyz[:,1] = y
lines.append(f(xyz))
for x in xcoarse:
xyz = np.zeros((nfine, dim))
xyz[:,0] = x
xyz[:,1] = yfine
lines.append(f(xyz))
return lines
def gen_grid2(f, mins, maxes, xres = .01, yres = .01, zres = .01):
"""
generate 3d grid and warps it using the function f.
The grid is based on the resolution specified.
"""
dim = len(mins)
if dim ==3:
xmin, ymin, zmin = mins
xmax, ymax, zmax = maxes
elif dim==2:
xmin, ymin = mins
xmax, ymax = maxes
else: raise NotImplemented()
xcoarse = np.arange(xmin, xmax+xres/10., xres)
ycoarse = np.arange(ymin, ymax+yres/10., yres)
if dim == 3:
zcoarse = np.arange(zmin, zmax+zres/10., zres)
xfine = np.arange(xmin, xmax+xres/10., xres/5.)
yfine = np.arange(ymin, ymax+yres/10., yres/5.)
if dim == 3:
zfine = np.arange(zmin, zmax+zres/10., zres/5.)
lines = []
if dim == 3:
if len(zcoarse) > 1:
for x in xcoarse:
for y in ycoarse:
xyz = np.zeros((len(zfine), 3))
xyz[:,0] = x
xyz[:,1] = y
xyz[:,2] = zfine
lines.append(f(xyz))
for y in ycoarse:
for z in zcoarse:
xyz = np.zeros((len(xfine), 3))
xyz[:,0] = xfine
xyz[:,1] = y
xyz[:,2] = z
lines.append(f(xyz))
for z in zcoarse:
for x in xcoarse:
xyz = np.zeros((len(yfine), 3))
xyz[:,0] = x
xyz[:,1] = yfine
xyz[:,2] = z
lines.append(f(xyz))
else:
for y in ycoarse:
xyz = np.zeros((len(xfine), dim))
xyz[:,0] = xfine
xyz[:,1] = y
lines.append(f(xyz))
for x in xcoarse:
xyz = np.zeros((len(yfine), dim))
xyz[:,0] = x
xyz[:,1] = yfine
lines.append(f(xyz))
return lines
def plot_lines(lines, color=(1,1,1), line_width=1, opacity=0.4):
"""
input :
- lines : a LIST of m matrices of shape n_ix3
each matrix is interpreted as one line
- color : (r,g,b) values for the lines
- line_width : width of the lines
- opacity : opacity of the lines
output : plot each line in mayavi
adapted from : http://docs.enthought.com/mayavi/mayavi/auto/example_plotting_many_lines.html
call
mlab.show() to actually display the grid, after this function returns
"""
Ns = np.cumsum(np.array([l.shape[0] for l in lines]))
Ntot = Ns[-1]
Ns = Ns[:-1]-1
connects = np.vstack([np.arange(0, Ntot-1.5), np.arange(1,Ntot-0.5)]).T
connects = np.delete(connects, Ns, axis=0)
pts = np.vstack(lines)
dim = pts.shape[1]
if dim == 2:
pts = np.c_[pts,np.zeros((pts.shape[0],1))]
s = np.ones(pts.shape[0])
# Create the points
src = mlab.pipeline.scalar_scatter(pts[:,0], pts[:,1], pts[:,2], s)
src.mlab_source.dataset.lines = connects
lines = mlab.pipeline.stripper(src)
# Finally, display the set of lines
surf = mlab.pipeline.surface(lines, line_width=line_width, opacity=opacity)
# set the color of the lines
r,g,b = color
color = 255*np.array((r,g,b, 1))
surf.module_manager.scalar_lut_manager.lut.table = np.array([color, color])
def plot_transform(T, size=0.1):
"""
plots the transform represented by
the 4x4 transformation matrix T.
"""
assert T.shape==(4,4)
origin = np.c_[T[0:3,3]]
origin_mat = np.repeat(origin, 3, axis=1).T
mlab.quiver3d(np.c_[origin[0]], np.c_[origin[1]], np.c_[origin[2]],
np.c_[T[0,0]], np.c_[T[1,0]], np.c_[T[2,0]], color=(1,0,0), line_width=3, scale_factor=size)
mlab.quiver3d(np.c_[origin[0]], np.c_[origin[1]], np.c_[origin[2]],
np.c_[T[0,1]], np.c_[T[1,1]], np.c_[T[2,1]], color=(0,1,0), line_width=3, scale_factor=size)
mlab.quiver3d(np.c_[origin[0]], np.c_[origin[1]], np.c_[origin[2]],
np.c_[T[0,2]], np.c_[T[1,2]], np.c_[T[2,2]], color=(0,0,1), line_width=3, scale_factor=size)
def plot_warping(f, src, target, fine=True, draw_plinks=True):
"""
function to plot the warping as defined by the function f.
src : nx3 array
target : nx3 array
fine : if fine grid else coarse grid.
"""
print colorize("Plotting grid ...", 'blue', True)
mean = np.mean(src, axis=0)
print '\tmean : ', mean
print '\tmins : ', np.min(src, axis=0)
print '\tmaxes : ', np.max(src, axis=0)
mins = np.min(src, axis=0)#mean + [-0.1, -0.1, -0.01]
maxes = np.max(src, axis=0)#mean + [0.1, 0.1, 0.01]
grid_lines = []
if fine:
grid_lines = gen_grid2(f.transform_points, mins=mins, maxes=maxes, xres=0.005, yres=0.005, zres=0.002)
else:
grid_lines = gen_grid(f.transform_points, mins=mins, maxes=maxes)
plot_lines(grid_lines, color=(0,0.5,0.3))
warped = f.transform_points(src)
if src.shape[1] == 2:
src = np.c_[src,np.zeros((src.shape[0],1))]
target = np.c_[target,np.zeros((target.shape[0],1))]
warped = np.c_[warped,np.zeros((warped.shape[0],1))]
mlab.points3d (src[:,0], src[:,1], src[:,2], color=(1,0,0), scale_factor=0.01)
#mlab.points3d (target[:,0], target[:,1], target[:,2], color=(0,0,1), scale_factor=0.01)
mlab.points3d (warped[:,0], warped[:,1], warped[:,2], color=(0,1,0), scale_factor=0.01)
if draw_plinks:
plinks = [np.c_[ps, pw].T for ps,pw in zip(src, warped)]
plot_lines (lines=plinks, color=(0.5,0,0), line_width=2, opacity=1)
| true |
23b2bd751a879217b9acc607a069c2137bf99aa1 | Python | Scrolen/flappyBird | /flappyBird.py | UTF-8 | 7,199 | 2.71875 | 3 | [] | no_license | import pygame
import sys
import random
# Variables
gravity = 0.23
bird_velocity = 0
game_state = False
score = 0
high_score = 0
class Bird:
def __init__(self, bird_down, bird_mid, bird_up):
self.bird_down = pygame.transform.scale2x(pygame.image.load(bird_down)).convert_alpha()
self.bird_mid = pygame.transform.scale2x(pygame.image.load(bird_mid)).convert_alpha()
self.bird_up = pygame.transform.scale2x(pygame.image.load(bird_up)).convert_alpha()
self.bird_states = [self.bird_down, self.bird_mid, self.bird_up]
self.index = 0
self.bird_surface = self.bird_states[self.index]
self.bird_col_rect = self.bird_surface.get_rect(center=(100, 512))
def set_index(self, value):
self.index = value
def get_index(self):
return self.index
def get_bird_surface(self):
return self.bird_surface
def get_col_rect(self):
return self.bird_col_rect
def animate_bird(self):
anim_bird = pygame.transform.rotozoom(self.bird_surface, -bird_velocity * 2.3, 1)
return anim_bird
def bird_animation(self):
bird = self.bird_surface
bird_rect = self.bird_col_rect
return bird, bird_rect
def set_center(self, x, y):
self.bird_col_rect.center = (x, y)
def add_centerY(self, value):
self.bird_col_rect.centery += value
def spawn_pipe():
pipe_pos = random.choice(pipe_y)
pipe_top = pipe_surface.get_rect(midbottom=(700, pipe_pos - 280))
pipe_bottom = pipe_surface.get_rect(midtop=(700, pipe_pos))
return pipe_top, pipe_bottom
def pipe_move(pipe_list):
for pipe in pipe_list:
pipe.centerx -= 4
return pipe_list
def draw_pipes(pipe_list):
for pipe in pipe_list:
if pipe.bottom >= 1024:
screen.blit(pipe_surface, pipe)
else:
pipe_flip = pygame.transform.flip(pipe_surface, False, True)
screen.blit(pipe_flip, pipe)
def floor_cycle(x):
screen.blit(floor_surface, (x, 850))
screen.blit(floor_surface, (x + 576, 850))
def display_score(game_st):
if game_st:
score_surface = font.render(str(int(score)), True, (255, 255, 255))
score_rect = score_surface.get_rect(center=(288, 100))
screen.blit(score_surface, score_rect)
if game_st is False:
score_surface = font.render(f'Score: {int(score)}', True, (255, 255, 255))
score_rect = score_surface.get_rect(center=(288, 100))
screen.blit(score_surface, score_rect)
high_score_surface = font.render(f'High Score: {int(high_score)}', True, (255, 255, 255))
high_score_rect = high_score_surface.get_rect(center=(288, 800))
screen.blit(high_score_surface, high_score_rect)
def update_hs(score, high_score):
if score > high_score:
high_score = score
return high_score
def collision_checker(pipe_list):
if _bird.get_col_rect().top <= -100 or _bird.get_col_rect().bottom >= 850:
die_sound.play()
return False
for pipe in pipe_list:
if _bird.get_col_rect().colliderect(pipe):
die_sound.play()
return False
return True
#
# def animate_bird(bird):
# anim_bird = pygame.transform.rotozoom(bird, -bird_velocity*2.3, 1)
# return anim_bird
# def bird_animation():
# new_bird = bird_states[index]
# new_bird_rect = new_bird.get_rect(center=(100, bird_col_rect.centery))
# return new_bird, new_bird_rect
pygame.init()
font = pygame.font.Font('04B_19.ttf', 40)
screen = pygame.display.set_mode((576, 1024))
clock = pygame.time.Clock()
# bird_down = pygame.transform.scale2x(pygame.image.load('assets/bluebird-downflap.png')).convert_alpha()
# bird_mid = pygame.transform.scale2x(pygame.image.load('assets/bluebird-midflap.png')).convert_alpha()
# bird_up = pygame.transform.scale2x(pygame.image.load('assets/bluebird-upflap.png')).convert_alpha()
# bird_states = [bird_down, bird_mid, bird_up]
# bird_img = bird_states[index]
# bird_col_rect = bird_img.get_rect(center=(100, 512))
_bird = Bird('assets/bluebird-downflap.png', 'assets/bluebird-midflap.png', 'assets/bluebird-upflap.png')
background_surface = pygame.image.load('assets/background-day.png').convert()
background_surface = pygame.transform.scale2x(background_surface)
game_start_surface = pygame.transform.scale2x(pygame.image.load('assets/message.png').convert_alpha())
game_start_rect = game_start_surface.get_rect(center=(288, 512))
floor_surface = pygame.image.load('assets/base.png').convert()
floor_surface = pygame.transform.scale2x(floor_surface)
floor_x = 0
pipe_surface = pygame.image.load('assets/pipe-green.png')
pipe_surface = pygame.transform.scale2x(pipe_surface)
pipes = []
pipe_y = [450, 500, 600, 700, 750]
SPAWNPIPE = pygame.USEREVENT
pygame.time.set_timer(SPAWNPIPE, 1200)
BIRDANIM = pygame.USEREVENT + 1
pygame.time.set_timer(BIRDANIM, 300)
# Sounds
flap_sound = pygame.mixer.Sound('sound/sfx_wing.wav')
die_sound = pygame.mixer.Sound('sound/sfx_hit.wav')
point_sound = pygame.mixer.Sound('sound/sfx_point.wav')
point_sound_cd = 1000
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN and game_state is True:
if event.key == pygame.K_SPACE:
bird_velocity = 0
bird_velocity -= 9.5
flap_sound.play()
if event.type == pygame.KEYDOWN and game_state is False:
game_state = True
pipes.clear()
_bird.set_center(100, 512)
bird_velocity = 0
score = 0
point_sound_cd = 1000
if event.type == SPAWNPIPE:
pipes.extend(spawn_pipe())
if event.type == BIRDANIM:
if _bird.get_index() < 2:
_bird.set_index(_bird.get_index() + 1)
else:
_bird.set_index(0)
# bird_img, bird_col_rect = bird_animation()
# Displaying Background Sky
screen.blit(background_surface, (0, 0))
if game_state:
# Displaying Bird
bird_velocity += gravity
animated_bird = _bird.animate_bird()
_bird.add_centerY(bird_velocity)
screen.blit(_bird.animate_bird(), _bird.get_col_rect())
game_state = collision_checker(pipes)
# Displaying Pipes
pipes = pipe_move(pipes)
draw_pipes(pipes)
score += 0.005
display_score(game_state)
point_sound_cd -= 5
if point_sound_cd == 0 and score != 0:
point_sound.play()
point_sound_cd = 1000
else:
screen.blit(game_start_surface, game_start_rect)
high_score = update_hs(score, high_score)
display_score(game_state)
# Displaying the Floor
floor_x -= 1
if floor_x <= -576:
floor_x = 0
floor_cycle(floor_x)
pygame.display.update()
clock.tick(120)
| true |
a78dd8b32a0305a1ec3ee181ae69764fce784563 | Python | bmsleight/openshift-splendidsnap | /libs/_tweetss.py | UTF-8 | 704 | 2.75 | 3 | [] | no_license | import tweepy, os
def tweet(photo_path, text):
secret_key_filepath = os.path.join(os.path.dirname(__file__), '_secret_key.py')
if os.path.isfile(secret_key_filepath):
from _secret_key import consumer_key, consumer_secret, access_token, access_token_secret
else:
raise ValueError('Not twitter keys')
# OAuth process, using the keys and tokens
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
# Creation of the actual interface, using authentication
api = tweepy.API(auth)
api.update_with_media(str(photo_path), status=str(text))
# api.update_status(status=text)
| true |
c8dc95b46231f3a79f5d17c55a86d411556244cd | Python | marceloMiotto/UdacityWebFullStackProject3 | /python/report_tool.py | UTF-8 | 839 | 3.03125 | 3 | [] | no_license | #!/usr/bin/env python para Python 2
import psycopg2
import db_log
def connect(database_name):
"""Connect to the database. Returns a database connection."""
try:
db = psycopg2.connect(dbname=database_name)
return db
except psycopg2.Error as e:
# THEN you could print an error
# and perhaps exit the program
print "Unable to connect to database"
sys.exit(1)
# Open connection to database
conn = connect("news")
# Create a cursor to handle the data
cur = conn.cursor()
report = db_log.Data_Log(cur)
# Call the function to answer the first question
report.print_most_articles()
# Call the function to answer the second question
report.print_most_authors()
# Call the function to answer the third question
report.print_most_error_date()
# Close the connection
conn.close()
| true |
bde358a46ba1e4dddadbb25556f0112012848360 | Python | EsaZul/Python | /Gates/and_led.py | UTF-8 | 1,348 | 3.4375 | 3 | [] | no_license | #################################################
# #
# Name: Eduardo Saul Ruiz #
# Date: 10/27/2017 #
# Title: and_led.py #
# Function: Implement an AND gate in #
# HW and SW to switch LED #
# #
#################################################
##### Import Libraries #####
import RPi.GPIO as GPIO
import time
# Warnings Off
GPIO.setwarnings(0)
##### Initialize GPIO #####
# GPIO5 = output to LED
# GPIO3 = input from switch
GPIO.setmode(GPIO.BOARD)
GPIO.setup(3, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(5, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(7, GPIO.OUT)
##### Runtime Variables #####
printed = 0 #Chill out w/the rick and morty broh
##### Main function #####
#
# Implement your logic function in SW here:
#
try:
while True: #outer loop
if not GPIO.input(3) and not GPIO.input(5): #logic statement
# make some mag #do this
GPIO.output(7,1)
if not printed: # Don't touch these lines
print "Look Morty! We turned the light on with science!"
printed = 1
else:
GPIO.output(7,0) #shut off
printed = 0
# ^C exit
except KeyboardInterrupt:
print "\n"
print "Exiting program!"
#clean up program
GPIO.cleanup()
#error exit
except:
print "Oops! You have another error/exception."
GPIO.cleanup()
#all else
finally:
GPIO.cleanup()
| true |
eb87ece6224e3bd8f76424c248a54586265894bf | Python | krasserm/bayesian-machine-learning | /noise-contrastive-priors/utils.py | UTF-8 | 4,765 | 2.9375 | 3 | [
"Apache-2.0"
] | permissive | import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
# ------------------------------------------
# Data
# ------------------------------------------
def select_bands(x, y, mask):
assert x.shape[0] == y.shape[0]
num_bands = len(mask)
if x.shape[0] % num_bands != 0:
raise ValueError('size of first dimension must be a multiple of mask length')
data_mask = np.repeat(mask, x.shape[0] // num_bands)
return [arr[data_mask] for arr in (x, y)]
def select_subset(x, y, num, rng=np.random):
assert x.shape[0] == y.shape[0]
choices = rng.choice(range(x.shape[0]), num, replace=False)
return [x[choices] for x in (x, y)]
# ------------------------------------------
# Training
# ------------------------------------------
def data_loader(x, y, batch_size, shuffle=True):
ds = tf.data.Dataset.from_tensor_slices((x, y))
if shuffle:
ds = ds.shuffle(x.shape[0])
return ds.batch(batch_size)
def scheduler(decay_steps, decay_rate=0.5, lr=1e-3):
return tf.keras.optimizers.schedules.ExponentialDecay(
initial_learning_rate=lr,
decay_steps=decay_steps,
decay_rate=decay_rate)
def optimizer(lr):
return tf.optimizers.Adam(learning_rate=lr)
def backprop(model, loss, tape):
trainable_vars = model.trainable_variables
gradients = tape.gradient(loss, trainable_vars)
return zip(gradients, trainable_vars)
def train(model, x, y,
batch_size,
epochs,
step_fn,
optimizer_fn=optimizer,
scheduler_fn=scheduler,
verbose=1,
verbose_every=1000):
steps_per_epoch = int(np.ceil(x.shape[0] / batch_size))
steps = epochs * steps_per_epoch
scheduler = scheduler_fn(steps)
optimizer = optimizer_fn(scheduler)
loss_tracker = tf.keras.metrics.Mean(name='loss')
mse_tracker = tf.keras.metrics.MeanSquaredError(name='mse')
loader = data_loader(x, y, batch_size=batch_size)
for epoch in range(1, epochs + 1):
for x_batch, y_batch in loader:
loss, y_pred = step_fn(model, optimizer, x_batch, y_batch)
loss_tracker.update_state(loss)
mse_tracker.update_state(y_batch, y_pred)
if verbose and epoch % verbose_every == 0:
print(f'epoch {epoch}: loss = {loss_tracker.result():.3f}, mse = {mse_tracker.result():.3f}')
loss_tracker.reset_states()
mse_tracker.reset_states()
# ------------------------------------------
# Visualization
# ------------------------------------------
style = {
'bg_line': {'ls': '--', 'c': 'black', 'lw': 1.0, 'alpha': 0.5},
'fg_data': {'marker': '.', 'c': 'red', 'lw': 1.0, 'alpha': 1.0},
'bg_data': {'marker': '.', 'c': 'gray', 'lw': 0.2, 'alpha': 0.2},
'pred_sample': {'marker': 'x', 'c': 'blue', 'lw': 0.6, 'alpha': 0.5},
'pred_mean': {'ls': '-', 'c': 'blue', 'lw': 1.0},
'a_unc': {'color': 'lightgreen'},
'e_unc': {'color': 'orange'},
}
def plot_data(x_train, y_train, x=None, y=None):
if x is not None and y is not None:
plt.plot(x, y, **style['bg_line'], label='f')
plt.scatter(x_train, y_train, **style['fg_data'], label='Train data')
plt.xlabel('x')
plt.ylabel('y')
def plot_prediction(x, y_mean, y_samples=None, aleatoric_uncertainty=None, epistemic_uncertainty=None):
x, y_mean, y_samples, epistemic_uncertainty, aleatoric_uncertainty = \
flatten(x, y_mean, y_samples, epistemic_uncertainty, aleatoric_uncertainty)
plt.plot(x, y_mean, **style['pred_mean'], label='Expected output')
if y_samples is not None:
plt.scatter(x, y_samples, **style['pred_sample'], label='Predictive samples')
if aleatoric_uncertainty is not None:
plt.fill_between(x,
y_mean + 2 * aleatoric_uncertainty,
y_mean - 2 * aleatoric_uncertainty,
**style['a_unc'], alpha=0.3, label='Aleatoric uncertainty')
if epistemic_uncertainty is not None:
plt.fill_between(x,
y_mean + 2 * epistemic_uncertainty,
y_mean - 2 * epistemic_uncertainty,
**style['e_unc'], alpha=0.3, label='Epistemic uncertainty')
def plot_uncertainty(x, aleatoric_uncertainty, epistemic_uncertainty=None):
plt.plot(x, aleatoric_uncertainty, **style['a_unc'], label='Aleatoric uncertainty')
if epistemic_uncertainty is not None:
plt.plot(x, epistemic_uncertainty, **style['e_unc'], label='Epistemic uncertainty')
plt.xlabel('x')
plt.ylabel('Uncertainty')
def flatten(*ts):
def _flatten(t):
if t is not None:
return tf.reshape(t, -1)
return [_flatten(t) for t in ts]
| true |
64d79f244213f9b117069398057f0a807e3771f0 | Python | lukaszrozej/project-euler | /p612.py | UTF-8 | 1,562 | 3.171875 | 3 | [] | no_license | # https://projecteuler.net/problem=612
import numpy as np
# N = 3 -> 289665
# N = 4 -> 39235977
# N = 5 -> 4528635021
# N = 6 -> 481858831665
N = 18
combinations = np.zeros((10, 10), dtype=int)
combinations[:,0] = 1
for n in range(1,10):
for k in range(1, n+1):
combinations[n, k] = combinations[n-1, k] + combinations[n-1, k-1]
def comb(n,k):
return int(combinations[n,k])
# k - how many distinct digits in a number
# n - how many digits in a number
# sum_over_j - how many n-digit numbers with k distinct digits are there
# (10**N - (10-k)**N) or (10**N - sub -1)
# - how many numbers are friends above mentioned n-digit numbers
# 0 not among k digits
sum_over_k = 0
for k in range(1,10):
sum_over_n = 0
for n in range(1, N+1):
sum_over_j = 0
for j in range(0, k+1):
if j % 2 == 0:
sum_over_j += comb(k, j) * (k-j)**n
else:
sum_over_j += -comb(k, j) * (k-j)**n
sum_over_n += sum_over_j
sum_over_k += comb(9, k) * sum_over_n * (10**N - (10-k)**N)
# 0 among k digits
for k in range(2,11):
sum_over_n = 0
for n in range(1, N+1):
sum_over_j = 0
for j in range(0, k):
if j % 2 == 0:
sum_over_j += comb(k-1, j) * (k-j)**(n-1) * (k-1)
else:
sum_over_j += -comb(k-1, j) * (k-j)**(n-1) * (k-1)
sum_over_n += sum_over_j
if k == 10:
sub = 0
elif k == 9:
sub = N
else:
sub = (10-k) * ((10-k)**N - 1) // (10 - k - 1)
sum_over_k += comb(9, k-1) * sum_over_n * (10**N - sub -1)
# substract pairs x,x, don't count each pair twice
result = ((sum_over_k - 10**N + 1) // 2)
result %= 1000267129
print(result) | true |
53e11b585463b625120154ece992caa0df92ab40 | Python | craig-rupp/SSS_Python | /18/c_byte/end_March18.py | UTF-8 | 1,845 | 3.703125 | 4 | [] | no_license | def ArithGeo(arr):
Arith, Geo = True, True
arith_value = arr[len(arr) - 1] - arr[len(arr)-2]
geo_value = arr[len(arr)-1] / arr[len(arr)-2]
n = len(arr)-1
while n > 0:
if arr[n] - arr[n-1] != arith_value:
Arith = False
if arr[n] / arr[n-1] != geo_value:
Geo = False
n -= 1
return "Arithmetic" if Arith else "Geometric" if Geo else -1
#print ArithGeo(raw_input())
def ArithGeo(arr):
rtn = '-1'
l = len(arr)
if (l < 2):
return rtn
d = arr[1] - arr[0]
r = arr[1] / arr[0]
ba = 1
bg = 1
for i in range(1,l):
if (arr[i] != arr[i-1]+d):
ba = 0
if (arr[i] != arr[i-1]*r):
bg = 0
if (ba == 1):
return 'Arithmetic'
if (bg == 1):
return 'Geometric'
return rtn
# keep this function call here
# to see how to enter arguments in Python scroll down
#print ArithGeo(raw_input())
def ArrayAdditionI(arr):
max_value = (max(arr))
del arr[arr.index(max(arr))]
power_set = [[]]
for i in range(len(arr)):
for j in range(len(power_set)):
temp = [arr[i]] + power_set[j]
power_set.append(temp)
if sum(temp) == max_value:
return 'true'
return 'false'
#print ArrayAdditionI(raw_input())
def NumberAddition(s):
s_obj = {}
for char in range(len(s)):
if s[char].isdigit():
s_obj[char] = s[char]
n_str = ''
n_arr = [x for x in s_obj]
for i in range(len(n_arr)):
for j, k in s_obj.items():
#print(i, n_arr[i], j, k)
if i == j:
n_str += str(k)
elif n_arr[i] == j and j != i:
n_str += ' {}'.format(str(k))
return sum(map(int, n_str.split(' ')))
print NumberAddition("Won90 8")
| true |
a0da4b2aeab8bfa457a282352dfd47358b85515b | Python | KirtimanS/Jmeter-Cloud | /secure_connection.py | UTF-8 | 3,650 | 2.5625 | 3 | [] | no_license | import paramiko
from subprocess import Popen, PIPE, STDOUT
import sys
import traceback
from misc import Misc
import time
class SecureConn:
def __init__(self, key_path, private_ip, metric="percentage"):
self.disp_metric = metric
self.key_path = key_path
self.private_ip = private_ip
def move_files_scp(self, file_paths, direction):
if direction == 'upload':
command = ["scp","-i", self.key_path, file_paths, ]
elif direction == 'download':
command = ["scp", "-i", self.key_path]
else:
raise LookUpError("File movement direction invalid. Choose from 'Upload' and 'Download'.")
for paths in file_paths:
p = Popen(command, stdout = PIPE, stderr = STDOUT, shell = True)
while True:
line = p.stdout.readline().decode('utf-8')
if not line: break
print(line)
## TODO: add logger
def send_commands(self, cmds, test_plan, test_data, result_log_name ='log.jtl', log_name='jmeter.log', default_username="ec2-user"):
prefix = ''
if result_log_name == 'log.jtl':
prefix = Misc.remove_extensions(Misc.get_filename_from_path(test_plan))+'_'+Misc.time_now()+'_'
with paramiko.SSHClient() as client:
key = paramiko.RSAKey.from_private_key_file(self.key_path)
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
client.connect(hostname=self.private_ip, username=default_username, pkey=key)
self.execute_commands(client, ["echo 'Connected to EC2 instance'"]) # connection acknowledgement
self.execute_commands(client, ["mkdir jmeter"]) # create separate directory
ftp_client = client.open_sftp()
print("Starting Jmeter config file upload:" , Misc.get_filename_from_path(test_plan))
ftp_client.put(test_plan, '/home/ec2-user/jmeter/'+Misc.get_filename_from_path(test_plan), callback=self.print_transfer_status)
print("Jmeter config (.jmx) file uploaded.")
if test_data:
print("Starting test data CSV file upload:" , Misc.get_filename_from_path(test_data))
ftp_client.put(test_data, '/home/ec2-user/jmeter/'+Misc.get_filename_from_path(test_data), callback=self.print_transfer_status)
print("Test Data (.csv) uploaded.")
self.execute_commands(client, cmds)
print('Starting Jmeter log (.log) file download')
ftp_client.get('/home/ec2-user/'+log_name, prefix+log_name, callback=self.print_transfer_status)
print("JMeter log (.log) file downloaded as :", prefix+log_name)
print("Starting Jmeter results file (.jtl) download.")
ftp_client.get('/home/ec2-user/jmeter/'+result_log_name, prefix+result_log_name, callback=self.print_transfer_status)
print("JMeter results (.jtl) file downloaded as :", prefix+result_log_name)
ftp_client.close()
except Exception as e:
if hasattr(e, 'message'):
print("Error occured: ", e.message)
else:
print("Error occured: ", e)
traceback.print_exc(file=sys.stdout)
def execute_commands(self, client, cmds):
for cmd in cmds:
stdin, stdout, stderr = client.exec_command(cmd, get_pty=True)
while True:
line = stdout.readline()
if not line:
break
print(line)
return stdout
def print_transfer_status(self, transferred, toBeTransferred):
if self.disp_metric == "percentage":
print("File transferred: {0:.0f} %".format((transferred / toBeTransferred) * 100), end="\r", flush=True)
elif self.disp_metric == "absolute":
print("File transferred: ", transferred, "out of ", toBeTransferred)
else:
raise LookUpError("Status display metric not found. Choose from 'percentage' and 'absolute'")
if __name__ == "__main__":
send_commands(sys.argv)
| true |
e858f0187e654fd2c362f784e7945ae33055f2f2 | Python | fahmisalman/AI-Python | /NearestNeighbors/__init__.py | UTF-8 | 1,485 | 3 | 3 | [
"MIT"
] | permissive | import numpy as np
import os
class KNN(object):
def __init__(self, kneighbors):
self.k = kneighbors
self.x_train = []
self.y_train = []
self.y_list = []
os.chdir('..')
def load_dataset(self, data):
temp = np.loadtxt(open("%s/Sample-Datasets/%s.csv" % (os.path.abspath(os.curdir), data), "r"),
delimiter=",")
return temp[:, 0:2], temp[:, 2]
def fit(self, x, y):
self.x_train = x
self.y_train = y
self.y_list = list(set(self.y_train))
def predict(self, x):
res = []
for i in range(0, len(x)):
euclid = []
for j in range(0, len(self.x_train)):
euclid.append(np.dot(x[i], self.x_train[j]) / (np.linalg.norm(x[i]) * np.linalg.norm(self.x_train[j])))
index_sorted = sorted(range(len(euclid)), key=lambda k: euclid[k], reverse=True)
index_value = []
for j in range(self.k):
index_value.append(self.y_train[index_sorted[j]])
temp = [0] * len(self.y_list)
for k in range(len(self.y_list)):
temp[k] = index_value.count(self.y_list[k])
res.append(self.y_list[temp.index(max(temp))])
return res
def score(self, x, y):
sc = 0
res = self.predict(x)
for i in range(len(res)):
if res[i] == y[i]:
sc += 1
sc /= len(res)
return sc
| true |
d7b1acd368793a7b9d10d78abbbd19b2e8209748 | Python | ray-project/ray | /rllib/examples/learner/train_w_bc_finetune_w_ppo.py | UTF-8 | 5,805 | 2.859375 | 3 | [
"MIT",
"BSD-3-Clause",
"Apache-2.0"
] | permissive | """
This example shows how to pretrain an RLModule using behavioral cloning from offline
data and, thereafter training it online with PPO.
"""
import gymnasium as gym
import shutil
import tempfile
import torch
from typing import Mapping
import ray
from ray import tune
from ray.air import RunConfig, FailureConfig
from ray.rllib.algorithms.ppo import PPOConfig
from ray.rllib.algorithms.ppo.torch.ppo_torch_rl_module import PPOTorchRLModule
from ray.rllib.algorithms.ppo.ppo_catalog import PPOCatalog
from ray.rllib.core.models.base import ACTOR, ENCODER_OUT
from ray.rllib.core.rl_module.rl_module import SingleAgentRLModuleSpec
GYM_ENV_NAME = "CartPole-v1"
GYM_ENV = gym.make(GYM_ENV_NAME)
class BCActor(torch.nn.Module):
"""A wrapper for the encoder and policy networks of a PPORLModule.
Args:
encoder_network: The encoder network of the PPORLModule.
policy_network: The policy network of the PPORLModule.
distribution_cls: The distribution class to construct with the logits outputed
by the policy network.
"""
def __init__(
self,
encoder_network: torch.nn.Module,
policy_network: torch.nn.Module,
distribution_cls: torch.distributions.Distribution,
):
super().__init__()
self.encoder_network = encoder_network
self.policy_network = policy_network
self.distribution_cls = distribution_cls
def forward(
self, batch: Mapping[str, torch.Tensor]
) -> torch.distributions.Distribution:
"""Return an action distribution output by the policy network.
batch: A dict containing the key "obs" mapping to a torch tensor of
observations.
"""
# The encoder network has outputs for the actor and critic heads of the
# PPORLModule. We only want the outputs for the actor head.
encoder_out = self.encoder_network(batch)[ENCODER_OUT][ACTOR]
action_logits = self.policy_network(encoder_out)
distribution = self.distribution_cls(logits=action_logits)
return distribution
def train_ppo_module_with_bc_finetune(
dataset: ray.data.Dataset, ppo_module_spec: SingleAgentRLModuleSpec
) -> str:
"""Train an Actor with BC finetuning on dataset.
Args:
dataset: The dataset to train on.
module_spec: The module spec of the PPORLModule that will be trained
after its encoder and policy networks are pretrained with BC.
Returns:
The path to the checkpoint of the pretrained PPORLModule.
"""
batch_size = 512
learning_rate = 1e-3
num_epochs = 10
module = ppo_module_spec.build()
# We want to pretrain the encoder and policy networks of the RLModule. We don't want
# to pretrain the value network. The actor will use the Categorical distribution,
# as its output distribution since we are training on the CartPole environment which
# has a discrete action space.
BCActorNetwork = BCActor(module.encoder, module.pi, torch.distributions.Categorical)
optim = torch.optim.Adam(BCActorNetwork.parameters(), lr=learning_rate)
for epoch in range(num_epochs):
for batch in dataset.iter_torch_batches(
batch_size=batch_size, dtypes=torch.float32
):
action_dist = BCActorNetwork(batch)
loss = -torch.mean(action_dist.log_prob(batch["actions"]))
optim.zero_grad()
loss.backward()
optim.step()
print(f"Epoch {epoch} loss: {loss.detach().item()}")
checkpoint_dir = tempfile.mkdtemp()
module.save_to_checkpoint(checkpoint_dir)
return checkpoint_dir
def train_ppo_agent_from_checkpointed_module(
module_spec_from_ckpt: SingleAgentRLModuleSpec,
) -> float:
"""Train a checkpointed RLModule using PPO.
Args:
module_spec_from_ckpt: The module spec of the checkpointed RLModule.
Returns:
The best reward mean achieved by the PPO agent.
"""
config = (
PPOConfig()
.training()
.rl_module(rl_module_spec=module_spec_from_ckpt)
.environment(GYM_ENV_NAME)
.debugging(seed=0)
)
tuner = tune.Tuner(
"PPO",
param_space=config.to_dict(),
run_config=RunConfig(
stop={"training_iteration": 10},
failure_config=FailureConfig(fail_fast="raise"),
verbose=2,
),
)
results = tuner.fit()
best_reward_mean = results.get_best_result().metrics["episode_reward_mean"]
return best_reward_mean
if __name__ == "__main__":
ray.init()
ray.data.set_progress_bars(False)
# You can use Ray Data to load a dataset from pandas or from a JSON file.
# The columns of the dataset are ["obs", "actions"].
ds = ray.data.read_json("s3://rllib-oss-tests/cartpole-expert")
module_spec = SingleAgentRLModuleSpec(
module_class=PPOTorchRLModule,
observation_space=GYM_ENV.observation_space,
action_space=GYM_ENV.action_space,
model_config_dict={"fcnet_hiddens": [64, 64]},
catalog_class=PPOCatalog,
)
# Run supervised training on a PPO Module with behavioral cloning loss.
module_checkpoint_path = train_ppo_module_with_bc_finetune(ds, module_spec)
# Modify the load_state_path attribute of module_spec to indicate the checkpoint
# path for the RLModule. This allows us to resume RL fine-tuning after loading the
# pre-trained model weights.
module_spec.load_state_path = module_checkpoint_path
best_reward = train_ppo_agent_from_checkpointed_module(module_spec)
assert (
best_reward > 300
), "The PPO agent with pretraining should achieve a reward of at least 300."
# clean up the checkpoint directory
shutil.rmtree(module_checkpoint_path)
| true |
866ea25c3e610d2715c036b054c9103a8595c93e | Python | OR2513/PseUdeep | /One_hot_feature.py | UTF-8 | 1,384 | 2.84375 | 3 | [] | no_license | import pandas as pd
import numpy as np
def read_fasta_file():
fh = open('E:/PseUdeep_master/data/S_627.txt', 'r')
seq = []
for line in fh:
if line.startswith('>'):
continue
else:
seq.append(line.replace('\n', '').replace('\r', ''))
fh.close()
matrix_data = np.array([list(e) for e in seq])
print(matrix_data)
print(len(matrix_data))
return matrix_data
def extract_line(data_line):
A=[0,0,0,1]
U=[0,0,1,0]
C=[0,1,0,0]
G=[1,0,0,0]
feature_representation={"A":A,"C":C,"G":G,"U":U }
one_line_feature=[]
for index,data in enumerate(data_line):
if data in feature_representation.keys():
one_line_feature.extend(feature_representation[data])
return one_line_feature
def feature_extraction(matrix_data):
final_feature_matrix=[extract_line(e) for e in matrix_data]
return final_feature_matrix
matrix_data = read_fasta_file()
#print(matrix_data)
final_feature_matrix = feature_extraction(matrix_data)
#print(final_feature_matrix)
print(np.array(final_feature_matrix).shape)
pd.DataFrame(final_feature_matrix).to_csv('E:/PseUdeep_master/feature/one-hot/S_627_one_hot.csv',header=None,index=False)
final_feature_matrix1 = np.array(final_feature_matrix)
np.save("E:/PseUdeep_master/feature/one-hot/S_627_onehot.npy",final_feature_matrix1)
| true |
fa02ff7b4425060b6a0700e5b9ff05f7851ff62a | Python | vishwajeet-hogale/Referenceandqna | /qna.py | UTF-8 | 305 | 2.515625 | 3 | [
"MIT"
] | permissive |
import requests
def get_answer(text,question):
req_body={}
req_body["question_texts"]=question.split('","')
req_body["context_text"]=text
host = "http://a89df97acfc3.ngrok.io"
url = f"{host}/api"
response = requests.post(url,json=req_body)
print(response.text)
return(response.text) | true |
2b7bda798d7d5a1316334a0a4ee3263887689eb7 | Python | Galaxia5987/2020Vision | /realsense.py | UTF-8 | 4,808 | 2.9375 | 3 | [] | no_license | import logging
import time
import numpy as np
import constants
class RealSense:
"""
Handler for Intel RealSense cameras. Uses functions accessed from pyrealsense2.
RealSense cameras have a user interface installed when plugging in a camera, and it is the preferred method for
debugging most methods found in this class.
Attributes
----------
name : str
- the name of the camera in use
- default: 'RealSense'
serial_number : str
- the serial number of camera, used in multiple camera setups
pipeline : pyrealsense2.pipeline
- the pipeline through which frames will be recieved frames from the camera
align : pyrealsense2.align
- used for resizing the depth frame
prof : pyrealsense2.pipeline.start
- used for accessing camera settings such as exposure
"""
def __init__(self, serial_number: str = None, rotated_vertical: bool = False, rotated_horizontal: bool = False,
name: str = 'RealSense'):
"""
Import the RealSense library and start the pipeline for the camera. Configure various preferences, such as
rotation of the camera and frame.
:param serial_number: Must be filled with the camera's actual serial number, has no default.
"""
import pyrealsense2 as rs
config = rs.config()
self.name = name
if serial_number:
config.enable_device(serial_number)
self.serial_number = serial_number
config.enable_stream(rs.stream.depth, 480, 270, rs.format.z16, 60)
config.enable_stream(rs.stream.color, 424, 240, rs.format.bgr8, 60)
self.pipeline = rs.pipeline()
self.align = rs.align(rs.stream.color)
start = time.perf_counter()
self.prof = self.pipeline.start(config)
logging.info('[{}] Took {:.3f} seconds to start pipeline'.format(self.name, time.perf_counter() - start))
self.rs_options = rs.option
self.exit = False
self.depth_frame = None
self.rotated_vertical = rotated_vertical
self.rotated_horizontal = rotated_horizontal
self.color_frame = None
@property
def frame(self):
"""
The frame of the camera, treated as a variable, retrieved through an algorithm.
Receives both the coloured frame and the depth frame from the pipeline and stores them in class variables.
:return: The coloured frame.
"""
frames = self.pipeline.wait_for_frames()
frames = self.align.process(frames) # Align depth frame to size of depth frame
depth_frame = frames.get_depth_frame()
self.depth_frame = depth_frame.as_depth_frame()
color_frame = frames.get_color_frame()
color_image = np.asanyarray(color_frame.get_data())
self.color_frame = color_image
return color_image
def start(self):
"""
Dry implementation of Thread run method, to match those in other cameras.
"""
pass
def release(self):
"""
Release the camera and stop the loop.
"""
self.exit = True
self.pipeline.stop()
@staticmethod
def get_resolution():
return 424, 240
def set_exposure(self, exposure: int):
"""
Set the exposure to a desired value. May not set the camera to the exact value, so the actual exposure of the
camera is logged.
:param exposure: Exposure to set the camera to.
"""
s = self.prof.get_device().query_sensors()[1]
s.set_option(self.rs_options.enable_auto_exposure, 0)
s.set_option(self.rs_options.enable_auto_white_balance, 0)
s.set_option(self.rs_options.exposure, exposure)
logging.info('Current exposure: {}'.format(s.get_option(self.rs_options.exposure)))
def get_distance(self, x, y):
"""
Matches a coloured pixel to its distance recorded in the depth frame.
:param x: X coordinate of the pixel.
:param y: Y coordinate of the pixel.
:return: The real life distance of the object the pixel.
"""
if self.rotated_horizontal:
return self.depth_frame.get_distance(self.get_resolution()[0] - x, self.get_resolution()[1] - y)
elif self.rotated_vertical:
if self.serial_number == constants.REALSENSE_CAMERAS[0]['hatch']:
# (y, 480 - x)
return self.depth_frame.get_distance(y, self.get_resolution()[1] - x)
elif self.serial_number == constants.REALSENSE_CAMERAS[0]['cargo']:
# (y, 480 - x)
return self.depth_frame.get_distance(y, self.get_resolution()[1] - x)
return self.depth_frame.get_distance(x, y)
if __name__ == "__main__":
help(RealSense)
| true |
360e8555876f42b38bea6bc71196d3bde393d296 | Python | Igorxp5/sklearn_transforms | /my_custom_sklearn_transforms/sklearn_transformers.py | UTF-8 | 1,325 | 3.3125 | 3 | [] | no_license | from sklearn.base import BaseEstimator, TransformerMixin
# All sklearn Transforms must have the `transform` and `fit` methods
class DropColumns(BaseEstimator, TransformerMixin):
def __init__(self, columns):
self.columns = columns
def fit(self, X, y=None):
return self
def transform(self, X):
# Primeiro realizamos a cópia do dataframe 'X' de entrada
data = X.copy()
# Retornamos um novo dataframe sem as colunas indesejadas
return data.drop(labels=self.columns, axis='columns')
class SplitColumns(BaseEstimator, TransformerMixin):
def __init__(self, columns):
self.columns = columns
def fit(self, X, y=None):
return self
def transform(self, X):
# Primeiro realizamos a cópia do dataframe 'X' de entrada
data = X.copy()
# Retornamos um novo dataframe sem as colunas indesejadas
for col in self.columns:
min_ = data[col].min()
max_ = data[col].max()
for i in range(min_, max_ + 1):
data['{}_{}'.format(i, col)] = (data[col] == i).apply(int)
data['{}_{}'.format(i, col)] = (data[col] == i).apply(int)
data['{}_{}'.format(i, col)] = (data[col] == i).apply(int)
return data.drop(columns=self.columns)
| true |