index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
3,100 | baf3bde709ec04b6f41dce8a8b8512ad7847c164 | def multiple_parameters(name, location):
print(f"Hello {name}")
print(f"I live in {location}")
#positional arguments
multiple_parameters("Selva", "Chennai")
#keyword arguments
multiple_parameters(location="Chennai", name="Selva")
|
3,101 | 00be3d813ce4335ff9ea02ed9f1884d3210f3d5a | #!/usr/bin/env python
import pathlib
from blastsight.view.viewer import Viewer
"""
In this demo, we'll show how you can create a basic animation.
An animation is interpreted as changing the state of the viewer one frame at the time.
That means we'll define a function that makes a change in one single frame.
The function must receive a single argument, of the same type of the 'start' and 'end' values.
"""
v = Viewer()
path = f'{pathlib.Path(__file__).parent.parent}/test_files/caseron.off'
mesh = v.load_mesh(path, highlight=True)
def autorotate(angle):
v.set_rotation_angle([0.0, -angle, 0.0])
"""
The animate() method receives a 'start' value, an 'end' value, a 'method' (the function that changes
one frame in the viewer), and two optional kwargs: 'milliseconds' (how much time should the
animation last) and 'steps' (smoothness of the animation depends on this).
"""
# Start animation
v.animate(0, 360, autorotate, milliseconds=3000, steps=100)
# Show viewer
v.show()
|
3,102 | 342063b37038c804c2afa78091b1f1c2facbc560 | import base64
import json
from werkzeug.exceptions import Unauthorized
from ab import app
from ab.utils import logger
from ab.plugins.spring import eureka
def _login(username, password):
"""
only for test
:return the access token
"""
try:
logger.info('login as user {username}'.format(username=username))
eureka_client = eureka.get_instance()
login_resp = eureka_client.do_service('GOVBRAIN-AUTHCENTER', '/commonuser/login', method='post',
json={'username': username, 'password': password})
ticket = login_resp['data']['ticket']
if app.config.TESTING:
logger.debug('ticket for user', username, 'is:', ticket)
resp = eureka_client.do_service('GOVBRAIN-AUTHCENTER', '/commonuser/ticket_login?ticket={ticket}'.format(ticket=ticket),
method='get')
if app.config.TESTING:
logger.debug('access_token for user', username, 'is:', resp['data']['access_token'])
return resp['data']['access_token']
except Exception as e:
logger.error('login fail, please check username/password')
raise
def get_current_user(s: str=None, required=True):
"""
get current user by request auth header
:param s:
:return:
{'code': 'SUCCESS', 'nickName': 'gs1', 'appName': '__base__',
'tenantId': '650', 'tenantCode': 'gs', 'userName': 'gs1', 'userId': '10318'}
"""
eureka_client = eureka.get_instance()
s = s or eureka_client.get_auth_token()
if not s:
if required:
raise Unauthorized('login required')
else:
return None
# format not checked
b64encoded = s[7:].split('.')[1]
decoded = base64.urlsafe_b64decode(b64encoded + '===').decode('utf-8')
return json.loads(decoded)['user_info']
|
3,103 | e2d8a1e13a4162cd606eec12530451ab230c95b6 | from unittest.mock import MagicMock
import pytest
from charpe.mediums.email_handler import EmailHandler
from charpe.errors import InsuficientInformation
def test_send_requirements(config):
handler = EmailHandler(config)
with pytest.raises(InsuficientInformation):
handler.publish({})
with pytest.raises(InsuficientInformation):
handler.publish({
'recipient': 'charpe@mailinator.com',
})
with pytest.raises(InsuficientInformation):
handler.publish({
'recipient': 'charpe@mailinator.com',
'subject': 'The subject',
})
def test_send(config, caplog, mocker):
the_mock = MagicMock()
smoke = MagicMock(return_value=the_mock)
mocker.patch('smtplib.SMTP', new=smoke)
handler = EmailHandler(config)
handler.publish({
'recipient': 'charpe@mailinator.com',
'subject': 'The subject',
'data': {
'content': 'El mensaje',
},
})
the_mock.send_message.assert_called_once()
msg = the_mock.send_message.call_args[0][0]
assert msg.get('From') == config['MAIL_DEFAULT_SENDER']
assert msg.get('To') == 'charpe@mailinator.com'
html, text = msg.get_payload()
assert 'El mensaje' in text.get_payload()
assert '<p>El mensaje</p>' in html.get_payload()
the_mock.quit.assert_called_once()
|
3,104 | 71eadf5073b5ed13c7d4a58b2aeb52f550a32238 | #!/usr/bin/env python3
import argparse
import json
import os
import random
import timeit
from glob import glob
import numpy as np
def parse_args():
"""[summary]
Returns:
[type]: [description]
"""
parser = argparse.ArgumentParser()
parser.add_argument('--train_dir',
help='directory containing spacenet7 train dataset',
default='/data/spacenet7/spacenet7/train/')
parser.add_argument('--mask_dir',
help='directory containing building mask image files',
default='/data/spacenet7/building_masks/')
parser.add_argument('--out_dir',
help='output root directory',
default='/data/spacenet7/split/')
parser.add_argument('--split_num',
help='number of split',
type=int,
default=5)
return parser.parse_args()
def dump_file_paths(aois, output_path, train_dir, mask_dir):
"""[summary]
Args:
aois ([type]): [description]
output_path ([type]): [description]
train_dir ([type]): [description]
mask_dir ([type]): [description]
"""
results = []
for aoi in aois:
image_paths = glob(
os.path.join(train_dir, aoi, 'images_masked', '*.tif'))
image_paths.sort()
N = len(image_paths)
for i in range(N):
# get path to mask
image_path = image_paths[i]
filename = os.path.basename(image_path)
mask_path = os.path.join(mask_dir, aoi, filename)
assert os.path.exists(mask_path)
# previous frame
image_prev_path = image_paths[0] if i == 0 \
else image_paths[i - 1]
# next frame
image_next_path = image_paths[N - 1] if i == N - 1 \
else image_paths[i + 1]
result = {}
result['image_masked'] = image_path
result['building_mask'] = mask_path
result['image_masked_prev'] = image_prev_path
result['image_masked_next'] = image_next_path
results.append(result)
with open(output_path, 'w') as f:
json.dump(results,
f,
ensure_ascii=False,
indent=4,
sort_keys=False,
separators=(',', ': '))
if __name__ == '__main__':
t0 = timeit.default_timer()
args = parse_args()
os.makedirs(args.out_dir)
aois = sorted([
d for d in os.listdir(args.train_dir)
if os.path.isdir(os.path.join(args.train_dir, d))
])
random.seed(777)
random.shuffle(aois)
# split aois into train and val
n = args.split_num
aois_divided = np.array([aois[i::n] for i in range(n)])
for val_idx in range(n):
# dump file paths for val split
val_aois = aois_divided[val_idx]
dump_file_paths(val_aois,
os.path.join(args.out_dir, f'val_{val_idx}.json'),
args.train_dir, args.mask_dir)
# dump file paths for train split
train_mask = np.ones(n, dtype=bool)
train_mask[val_idx] = False
train_aois = aois_divided[train_mask]
train_aois = np.concatenate(train_aois, axis=0).tolist()
dump_file_paths(train_aois,
os.path.join(args.out_dir, f'train_{val_idx}.json'),
args.train_dir, args.mask_dir)
elapsed = timeit.default_timer() - t0
print('Time: {:.3f} min'.format(elapsed / 60.0))
|
3,105 | 6fd4df7370de2343fe7723a2d8f5aacffa333835 | import pickle
import pytest
from reader import EntryError
from reader import FeedError
from reader import SingleUpdateHookError
from reader import TagError
from reader.exceptions import _FancyExceptionBase
def test_fancy_exception_base():
exc = _FancyExceptionBase('message')
assert str(exc) == 'message'
exc = _FancyExceptionBase(message='message')
assert str(exc) == 'message'
cause = Exception('cause')
exc = _FancyExceptionBase('message')
exc.__cause__ = cause
pickled_exc = pickle.dumps(exc)
assert str(exc) == 'message: builtins.Exception: cause'
assert str(exc) == str(pickle.loads(pickled_exc))
class WithURL(_FancyExceptionBase):
message = 'default message'
def __init__(self, url, **kwargs):
super().__init__(**kwargs)
self.url = url
@property
def _str(self):
return self.url.upper()
exc = WithURL('url')
assert str(exc) == 'default message: URL'
exc = WithURL('url', message='another message')
exc.__cause__ = cause
assert str(exc) == 'another message: URL: builtins.Exception: cause'
def _all_classes(cls):
yield cls
for subclass in cls.__subclasses__():
yield from _all_classes(subclass)
def all_classes(*args, **kwargs):
return list(_all_classes(*args, **kwargs))
@pytest.mark.parametrize('exc_type', all_classes(FeedError))
def test_feed_error_str(exc_type):
exc = exc_type('url')
assert repr('url') in str(exc)
@pytest.mark.parametrize('exc_type', all_classes(EntryError))
def test_entry_error_str(exc_type):
exc = exc_type('url', 'id')
assert repr(('url', 'id')) in str(exc)
@pytest.mark.parametrize('exc_type', all_classes(TagError))
def test_tag_error_str(exc_type):
exc = exc_type(('object',), 'key')
assert "'object': 'key'" in str(exc)
@pytest.mark.parametrize(
'args, expected',
[
(
('before_feeds_update', 'myhook'),
"unexpected hook error: before_feeds_update: 'myhook'",
),
(
('before_feeds_update', 'myhook', ()),
"unexpected hook error: before_feeds_update: 'myhook': ()",
),
(
('before_feed_update', 'myhook', ('feed',)),
"unexpected hook error: before_feed_update: 'myhook': 'feed'",
),
(
('after_entry_update', 'myhook', ('feed', 'entry')),
"unexpected hook error: after_entry_update: 'myhook': ('feed', 'entry')",
),
],
)
def test_single_update_hook_error_str(args, expected):
exc = SingleUpdateHookError(*args)
assert str(exc) == expected
exc = SingleUpdateHookError(*args)
exc.__cause__ = Exception('cause')
assert str(exc) == expected + ": builtins.Exception: cause"
|
3,106 | 5663ded291405bcf0d410041485487bb17560223 |
"""
《Engineering a Compiler》
即《编译器设计第二版》
https://www.clear.rice.edu/comp412/
"""
# 《parsing-techniques》 讲前端
## http://parsing-techniques.duguying.net/ebook/2/1/3.html
"""
前端看Parsing Techniques,后端看鲸书,都是最好的。
"""
# 《essential of programming language》
# sicp
"""
如果对编程语言设计方面感兴趣,想对编程语言和编译器设计有大概的概念,可以看看PLP。
想快速实践可以看《自制脚本语言》,《engineer a compiler》和《编程语言实现模式》。
还是那句话,多做少说,实现一遍,比啥都好使。
"""
"""
Flex&Bison 算是《lex与yacc》的后一版。很详细的介绍了Flex与Bison这两个工具。
书的后面章节从无到有的构造了一个SQL的解释器。
"""
## 有何用
"""
作者:蓝色
链接:https://www.zhihu.com/question/21755487/answer/30574966
你现在觉得枯燥,我想既跟编译原理本身比较抽象的知识有关,也跟讲述者有关。
一个好的讲述者会试着化抽象为形象,以丰富生动的例子来为你解释。而编译原理是否有用?
我认为这门课是一门真正与代码做斗争的课程,对于一个有至于追求技术的人是不容错过的课程,
而且编译原理可以说是一个计算机科学的缩影。你学习它更多的是去追寻程序设计语言的本质,
如它在寄存器分配中将会使用到贪心算法,死代码消除中将会使用到图论算法,
数据流分析中使用到的Fixed-Point Algorithm,词法分析与语法分析中使用到有限状态机与递归下降这样的
重要思想等等,也许你以后不会成为一个编译器开发工作者,但是编译原理的学习中所获,所思的东西足以让你
终生获益。同时,学完这门课程,对于一个有Geek精神的开发者,他会开始运用自己的所学开发享受“上帝”的感觉,
去尝试创造一门语言,我想这种感觉不是每门课程都能带给你的。
我相信,当你真正完成这个过程后,你对你所写的程序、程序语言都会有更深的本质认识,
这样的认识也会让你站的高度完全不同,如果你真的学的好,我想别人看到的是语法,
你看到的是背后的实现,这样的感觉真的很好的,不信你试试。
有了这么多好处,无论如何都有足够的理由支撑你好好学习了。
自从学会了编译原理,我用编译原理的眼光来看带我自己的代码,写出了优秀的单元测试。
文本编辑器的代码高亮功能,代码提示功能就是运用编译原理知识,如果自己写一个是不是很好玩?
"""
## 学习目标
"""
对于普通程序员,我认为编译原理这门课主要掌握几点就够用了:
1. 词法分析方面,掌握正则表达式,了解dfa/nfa。
2. Parsing 方面,能读懂BNF,知道AST,会写简单的递归下降parser,会用antlr之类的parser generator。
3. 优化方面,知道现代编译器的优化能力有多强,知道如何配合编译器写出高效易读的代码,
避免试图outsmart编译器。
4. 会实现简单的虚拟机(stack-based,不带GC),并把四则运算表达式翻译为虚拟机指令。
作者:陈硕
链接:https://www.zhihu.com/question/21755487/answer/30585811
"""
|
3,107 | 29abcfc010453e3a67346ea2df238e07b85502a8 | """
Estructuras que extraen valores de una función y se almacenan en objetos iterables (que se pueden recorrer
Son mas eficientes que las funciones tradicionales
muy útiles con listas de valores infinitos
Bajos determinados escenarios, será muy útil que un generador devuelva los valores de uno en uno
Un generador usar la palabra reservada yield
"""
#haremos que nos genere una función de números pares
def generarPares(limite):
num=1
milista=[]
while num<limite:
milista.append(num*2)
num+=1
return milista
print(generarPares(10))
def generarPares2(limite):
num = 1
while num < limite:
yield num*2
num += 1
devuelvePares=generarPares2(10)
for i in devuelvePares:
print(i)
#Ahora con la instruccion yield from
#Simplifica el código del generador en caso tengamos que usar bucles anidados.
#*el asterisoc en python significa que no se sabe cuantos argumentos se incluiran y que estos se entregaran en forma de tupla
def devuelveCiudades(*ciudades):
for e in ciudades:
yield e
ciudadesDevueltas=devuelveCiudades("Madrid","Barcelona","Bilbao","Valencia")
#next imprime uno a uno
print(next(ciudadesDevueltas))
print(next(ciudadesDevueltas))
#si quisieramos acceder a las letras
def devuelveCiudades2(*ciudades):
for e in ciudades:
for subelemento in e:
yield subelemento
ciudadesDevueltas2=devuelveCiudades2("Madrid","Barcelona","Bilbao","Valencia")
print(next(ciudadesDevueltas2))
print(next(ciudadesDevueltas2))
def devuelveCiudades3(*ciudades):
for e in ciudades:
yield from e #devuelve lo mismo que la funcion 2
ciudadesDevueltas3=devuelveCiudades3("Madrid","Barcelona","Bilbao","Valencia")
print(next(ciudadesDevueltas3))
print(next(ciudadesDevueltas3)) |
3,108 | 46d6771fd9f589e2498cd019ba72232cbda06e5a | from editor.editor import Editor
e = Editor()
e.showWindow() |
3,109 | 1829bd8e87c470a71fea97dd3a47c30477b6e6f1 | """"Pirata barba Negra ( màs de 2 pasos a las izquierda o a la derecha y se cae):
rampa para subir a su barco (5 pasos de ancho y 15 de largo")leer por teclado un valor entero.
a) si el entero es par 1 paso hacia adelante
b)si el entero es impar , pero el entero - 1 es divisible por 4, el pirata da un paso a la derecha
c)En otro caso , el pirata da un paso a la izquierda
d)utilizar un generador de numeros pseudo aleatorios para generar un nuevo entero y repetir a la partir del paso a
Condiciones de terminacion:
** introducciòn de un nùmero negativo ( es de suponer que el pirata se durmiò sobre la rampa)
**El pirata cae por un costado de la rampa y se ahoga
**El pirata logra abordar a salvo su barco
Haga un programa que exhiba el avance del pirata en cada paso"""
from random import randint
numero_usuario =int(input("Ingrese un nùmero para empezar su tambaleada aventura "))
while numero_usuario<0:
print("Parece que el pirata se ha quedado dormido en la rampa intenta despertarlo ingresando otro nùmero ")
numero_usuario =int(input("Ingrese un nùmero para empezar su tambaleada aventura "))
pasos_izq =3 #por la posicion inicial en la tabla
pasos_der= 3
pasos_adelante=0
#considerar punto en la tabla
while pasos_adelante <15 and pasos_der<5 and pasos_izq<5:
if numero_usuario%2 ==0:
pasos_adelante =pasos_adelante+1
#para el while validar que iguale o supere lo pasos_adelante >=15
print("El pirata avanzó" ,pasos_adelante, "pasos hacia adelante")
elif numero_usuario %2 !=0 and (numero_usuario-1)%4==0:
pasos_der= pasos_der+1
pasos_izq=pasos_izq-1
#para el while validar que iguale o supere lo pasos_der>2
print("El pirata hizo" ,pasos_der, "pasos a la derecha ")
elif numero_usuario %2 !=0 and (numero_usuario-1)%4!=0:
pasos_izq=pasos_izq+1
pasos_der= pasos_der-1
#para el while validar que iguale o supere lo pasos_izq>2
print("El pirata hizo" ,pasos_izq, "pasos a la izquierda ")
aleatorio=randint(-10,1000)
print("nùmero aleatorio",aleatorio)
numero_usuario=aleatorio
if pasos_adelante >=15:
print(" Este viaje tambaleado ha sido un èxito! El Pirata llegó a su Barco!")
elif pasos_der>=5:
print("El pirata se ha caído de la rampa por el lado derecho y se ha ahogado :(")
elif pasos_izq>=5:
print("El pirata se ha caído de la rampa por el lado izquierdo y se ha ahogado :(") |
3,110 | a1c1f18e7b95f36a214a1a16f2434be2825829c3 | import numpy as np
import matplotlib.pyplot as plt
import sympy as sp
import matplotlib.pyplot as plt
from sympy import sympify, Symbol
curr_pos = 0
import numpy as np
def bisection(st,maxnum,maxer,xlf,xuf):
file2 = open("test.txt","w")
file2.write("Hello World")
file2.close()
fi = open("test.txt", "w")
x=sp.Symbol('x')
y=sp.Symbol('y')
H = sympify(st)
print(H)
table = []
x1=[]
y1=[]
xu=[]
xl=[]
xks=[]
ys=[]
errors=[]
plots=[]
print(float(H.subs(x,0)))
ys.append(float(H.subs(x,xuf)))
ys.append(float(H.subs(x,xlf)))
i=0.0
err=1
maxsize=maxnum
print(maxnum)
for i in range(0, maxsize, 1):
xl.append(xlf)
xu.append(xuf)
print('xl ='+ str(xlf))
print('xu ='+ str(xuf))
if(err<=maxer):
break
xk=xlf+xuf
xk=xk/2
print('xk ='+ str(xk))
x2=[xk,xk]
y2=[-100,100]
plots.append((x2,y2))
xks.append(xk)
if i==0:
errors.append(1.0)
print(i)
else:
err=abs((xks[i]-xks[i-1]))
print(str((xks[i]-xks[i-1])))
errors.append(err)
f=float(H.subs(x,xk))
print("fk ="+str(f))
f2=float(H.subs(x,xlf))
print("fl ="+str(f2))
f3=f*f2
ys.append(f)
print (xl[0],xu[0])
print(f)
table.append([xuf,xlf,xk])
if f3<0:
xuf=xk
else:
xlf=xk
i=min([xl[0],xu[0]])
add=(abs((xu[0])-(xl[0]))/100)
print ("min = "+str(i)+" add = "+str(add)+ "max = "+str(max([xl[0],xu[0]])))
while i <= max([xl[0],xu[0]]):
x1.append(i)
print("x="+str(i)+ " y = "+str(float(H.subs(x,i))))
y1.append(float(H.subs(x,i)))
i=i+add
teams_list = ["Xu", "Xl", "Xr"]
row_format ="{:>15}" * (len(teams_list) + 1)
fi.write(row_format.format("", *teams_list))
print (row_format.format("", *teams_list))
for row in table:
print (row_format.format("", *row))
fi.write(row_format.format("", *row))
fi.close()
def key_event(e):
global curr_pos
if e.key == "right":
curr_pos = curr_pos + 1
elif e.key == "left":
curr_pos = curr_pos - 1
else:
return
curr_pos = curr_pos % len(plots)
axes = plt.gca()
ax.cla()
axes.set_xlim([xl[0],xu[0]])
axes.set_ylim([min(ys),max(ys)])
ax.plot([xl[curr_pos],xl[curr_pos]], [-200,200],'r',plots2[0][0], plots2[0][1],'g',[xu[curr_pos],xu[curr_pos]],[-200,200],'b',[-200,200],[0,0],'y')
plt.title("Iteration "+str(curr_pos+1)+" xr= "+str(xks[curr_pos])+" errors= "+str(errors[curr_pos]*100)+"%")
fig.canvas.draw()
plots2 = [(x1,y1)]
curr_pos = 0
print(xl)
fig = plt.figure()
axes = plt.gca()
axes.set_xlim([xl[0],xu[0]])
axes.set_ylim([min(ys),max(ys)])
fig.canvas.mpl_connect('key_press_event', key_event)
ax = fig.add_subplot(111)
plt.title("Iteration "+str(curr_pos+1)+" xr= "+str(xks[curr_pos])+" errors= "+str(errors[curr_pos]*100)+"%")
ax.plot([xl[curr_pos],xl[curr_pos]], [-200,200],'r',plots2[0][0], plots2[0][1],'g',[xu[curr_pos],xu[curr_pos]],[-200,200],'b',[-200,200],[0,0],'y')
plt.show()
bisection('(3/2)*(x)-6-(1/2)*sin(2*x)',50,1*10**-3,4,5)
|
3,111 | 3272296bca0d6343540597baebef8d882a1267c0 | from ..core import promise, rule
_context = {
'@vocab': 'https://schema.org/',
'fairsharing': 'https://fairsharing.org/',
'html': 'fairsharing:bsg-s001284',
}
@promise
def resolve_html(url):
from urllib.request import urlopen
return urlopen(url).read().decode()
@rule({
'@context': _context,
'@type': 'WebSite',
'@id': {},
'url': {},
})
def html_resolver(ld):
return dict(ld, **{
'html': str(resolve_html(ld['url'])),
})
|
3,112 | 01f4d097cc5f4173fa5a13268b91753566a9f7e1 | #!/usr/bin/env python
#-------------------------------------------------------------------------------
# Name: Sequitr
# Purpose: Sequitr is a small, lightweight Python library for common image
# processing tasks in optical microscopy, in particular, single-
# molecule imaging, super-resolution or time-lapse imaging of cells.
# Sequitr implements fully convolutional neural networks for image
# segmentation and classification. Modelling of the PSF is also
# supported, and the library is designed to integrate with
# BayesianTracker.
#
# Authors: Alan R. Lowe (arl) a.lowe@ucl.ac.uk
#
# License: See LICENSE.md
#
# Created: 23/03/2018
#-------------------------------------------------------------------------------
__author__ = "Alan R. Lowe"
__email__ = "code@arlowe.co.uk"
import os
import re
from dataio import tifffile as t
from pipeline import ImagePipeline, ImageWeightMap2
import utils
import numpy as np
class ImageLabels(object):
""" ImageLabels
A class to deal with image labels.
"""
def __init__(self,
filename,
thresh_fn=lambda x:x>0,):
self._raw_data = t.imread(filename)
print self._raw_data.shape
# make sure we have a reasonable number of dimensions
assert(self._raw_data.ndim > 1 and self._raw_data.ndim < 4)
# preprocess the data here
if self._raw_data.ndim == 3:
l_data = np.zeros(self._raw_data.shape[1:], dtype='uint8')
for l in range(self._raw_data.shape[0]):
l_data[thresh_fn(self._raw_data[l,...])] = l+1
raw_labels = range(self._raw_data.shape[0]+1)
else:
l_data = thresh_fn(self._raw_data).astype('uint8')
raw_labels = [0, 1]
# convert the label file into an unpacked version? no, but we may
# need to change so that the labels are 0,1,2...
# raw_labels = np.unique(l_data)
self._outputs = len(raw_labels)
if self.outputs > 5:
raise ValueError('More that five output classes!')
print 'Compressing labels from {0:s} to {1:s}'.format(np.unique(l_data), str(raw_labels))
self._labels = l_data
def labels(self):
""" return the labels """
return self._labels
@property
def outputs(self):
return self._outputs
class ImageWeightMap2(object):
""" ImageWeightMap2
Calculate a per-pixel weight map to prioritise learning of certain pixels
within an image. Here, the weight map is calculated the distance between
objects in the foreground for binary images.
The algorithm proceeds as:
1. Create a list of xy points that represent the boundaries of the
foreground objects
2. Create a Delaunay graph connecting each of the xy points
3. For each background pixel, calculate the mean length of the edges of
the simplex in which the pixel lies
4. Set the pixel of the background to be the mean length value
5. Calculate an exponential decay of the weight map
Effectively, the algorithm generates a map of the 'narrowness' of regions
separating foreground objects. Where objects are separated by only a single
pixel, the value is high, larger separation decay to zero.
Params:
w0: the weighting amplitude
sigma: the decay of the exponential function
Notes:
TODO(arl): clean up the code!
"""
def __init__(self, w0=10., sigma=5.):
ImagePipe.__init__(self)
self.w0 = w0
self.sigma = sigma
def __call__(self, image):
# make a von Neumann structring element to create the boundaries
s = np.array([[0,1,0],[1,1,1],[0,1,0]])
b = np.squeeze(image.astype('bool'))
b_erode_outline = np.logical_xor(binary_erosion(b, iterations=1, structure=s), b)
# make the sentinels
b_dilate = binary_dilation(b, iterations=3, structure=s)
b_dilate_outline = np.logical_xor(binary_erosion(b_dilate, iterations=1, structure=s), b_dilate)
# add a perimeter of ones to make sentinel points for the boundaries
b_erode = np.logical_xor(b_erode_outline, b_dilate_outline)
# pre weight the mask using only the region surrounding the cells
mask = np.logical_xor(b, b_dilate)
# assign xy points to the boundary pixels, then a Delaunay triangulation
x,y = np.where(b_erode)
points = np.column_stack((x,y))
tri = Delaunay(points)
self.tri = tri
# find the pixels of the background
free_space_x, free_space_y = np.where(np.logical_not(b))
free_space = np.array(zip(free_space_x.tolist(), free_space_y.tolist()))
# calculate the weight map
simplices = tri.find_simplex(free_space)
weight_map = np.zeros(image.shape)
# mean?
weight_map[free_space_x, free_space_y,...] = np.array([np.max(self.edist(s,p)) for s,p in zip(simplices, free_space)]).reshape((-1,1))
mask = b[...,np.newaxis].astype('float32')
weight_map = gaussian_filter(weight_map, 1.) #self.sigma)
weight_map = self.w0 * (1.-mask) * np.exp(- (weight_map*weight_map) / (2.*self.sigma**2+1e-99) )
weight_map = weight_map + 1. + mask
return weight_map
def edist(self, i, pt):
if i == -1: return [1024.,1024.,1024.]
s = self.tri.simplices[i]
p = np.zeros((4,2))
# p = np.zeros((3,2))
p[0:3,:] = self.tri.points[s]
p[3,:] = p[0,:]
# d = p - np.tile(pt,(3,1))
d = np.diff(p, axis=0)
d = np.sqrt(d[:,0]**2+d[:,1]**2)
return d
def _tri_area(self, edist):
""" Heron's formula..."""
s = np.sum(edist) / 2.
return np.sqrt(s*(s-edist[0])*(s-edist[1])*(s-edist[2]))
def create_weightmaps(path,
folders,
w0=10.,
sigma=3.,
thresh_fn=lambda x:x>0,
name_weights_folder=True):
""" Generate weightmaps for the images using the binary masks """
# set up some pipelines
w_pipe = ImageWeightMap2(w0=w0, sigma=sigma)
for d in folders:
r_dir = os.path.join(path, d)
f_labels = os.listdir(os.path.join(r_dir,'label/'))
f_labels = [l for l in f_labels if l.endswith('.tif')]
w_dir_base = 'weights'
if name_weights_folder:
w_dir_base += '_w0-{0:2.2f}_sigma-{1:2.2f}'.format(w0, sigma)
w_dir = os.path.join(r_dir, w_dir_base)
utils.check_and_makedir(w_dir)
for f in f_labels:
print 'Calculating weights for {0:s} in folder \'{1:s}\''.format(f,d)
w_label = re.match('([a-zA-Z0-9()]+)_([a-zA-Z0-9()]+_)*', f).group(0)
w_label += 'weights.tif'
label_filename = os.path.join(r_dir,'label/',f)
im_label = ImageLabels(label_filename).labels()
im_weights = np.squeeze(w_pipe(im_label.astype('bool')))
t.imsave(os.path.join(w_dir, w_label), im_weights.astype('float32'))
if __name__ == '__main__':
import argparse
DEFAULT_WORKDIR = "/media/lowe-sn00/TrainingData/"
p = argparse.ArgumentParser(description='Sequitr: weightmap calculation')
p.add_argument('-p','--workdir', default=DEFAULT_WORKDIR,
help='Path to the image data')
p.add_argument('-f', '--folders', nargs='+', required=True,
help='Specify the sub-folders of image data')
p.add_argument('--w0', type=float, default=30.,
help='Specify the amplitude')
p.add_argument('--sigma', type=float, default=3.,
help='Specify the sigma')
args = p.parse_args()
print args
# path = '/media/lowe-sn00/TrainingData/competition_fCNN/'
# folders = [f for f in os.listdir(path) if os.path.isdir(os.path.join(path,f))]
# print folders
create_weightmaps(args.workdir, args.folders, w0=args.w0, sigma=args.sigma)
|
3,113 | 8397dcdcb9ec2f35dac0c26b8878a23f9149512b |
import os
# must pip install sox
# type sudo apt install sox into cmd
duration = .2 # seconds
freq = 550 # Hz
os.system('play -nq -t alsa synth {} sine {}'.format(duration, freq))
|
3,114 | e045dc348fb2e9de51dbeada1d1826211cf89eae | from terminaltables import AsciiTable
import copy
table_data = [
['WAR', 'WAW'],
['S1 -> S2: R1', 'row1 column2'],
['row2 column1', 'row2 column2'],
['row3 column1', 'row3 column2']
]
table = AsciiTable(table_data)
def getDependenceStr(ins1, ins2, reg):
return f"{ins1} -> {ins2}: {reg}"
def getInstructionStr(ins, reg1, reg2, reg3):
return f"{ins} {reg1} {reg2} {reg3}"
def getInstructionArr(ins):
return ins.split(' ')
def validateInput(str):
if str.strip() == '':
return True
return len(str.split()) == 4
def getInstructionFromUser(insNum):
ins = input(f"S{insNum}: ")
while not validateInput(ins):
print("The value instruction you entered is invalid. Please try again")
print("Remember the instruction must be in the format:"
"ins Reg1 Reg2 Reg3 ")
ins = input(f"S{insNum}: ")
return ins
def findDependencies(instructions):
dependencies = {'waw': findWAWs(instructions),
'war': findWARs(instructions),
'trueDeps': findTrueDependencies(instructions)}
return dependencies
def findWAWs(instructions):
waws = {}
insDict = {}
i = 1
for ins in instructions:
insDict[f'S{i}'] = ins
i += 1
workingIns = copy.deepcopy(insDict)
for (key, value) in insDict.items():
insParts = value.split()
del workingIns[key]
for (key2, otherIns) in workingIns.items():
if insParts[1] == otherIns.split()[1]:
waws[f'{key} -> {key2}'] = insParts[1]
break # Find only the first occurance of a waw
return waws
def findWARs(ins):
wars = {}
insDict = {}
i = 1
for ins in instructions:
insDict[f'S{i}'] = ins
i += 1
workingIns = copy.deepcopy(insDict)
for (key, value) in insDict.items():
insParts = value.split()
del workingIns[key]
for (key2, otherIns) in workingIns.items():
if insParts[2] == otherIns.split()[1]:
wars[f'{key} -> {key2}'] = insParts[2]
if insParts[3] == otherIns.split()[1]:
wars[f'{key} -> {key2}'] = insParts[3]
return wars
def findTrueDependencies(ins):
trueDeps = {}
for i in range(len(ins)-1, -1, -1):
ins1 = ins[i].split()
for k in range(2, len(ins1), 1):
checkReg = ins1[k]
for s in range(i-1, -1, -1):
ins2 = ins[s].split()
if checkReg == ins2[1]:
trueDeps[f'S{s+1} -> S{i+1}'] = checkReg
break
return trueDeps
def resolveDependencies(instructions, dependencies):
waws = dependencies['waw']
wars = dependencies['war']
trueDeps = dependencies['trueDeps']
insDict = {}
i = 1
for ins in instructions:
insDict[f'S{i}'] = ins
i += 1
tNum = 0
# Resolve WAWs
for (dependence, reg) in waws.items():
depParts = dependence.split()
insParts = insDict[depParts[0]].split()
try:
# Check true dependence
trueDepsExist, trueDep = checkTrueDep(dependence, trueDeps, reg)
if trueDepsExist:
trueDepParts = trueDep.split()
ins1 = insDict[trueDepParts[0]].split()
ins2 = insDict[trueDepParts[2]].split()
ins1ChangeIndex = ins1.index(reg)
ins2ChangeIndex = [i for i, x in enumerate(ins2) if x == reg]
ins1[ins1ChangeIndex] = f'T{tNum}'
for index in ins2ChangeIndex:
if index != 1:
ins2[index] = f'T{tNum}'
insDict[trueDepParts[0]] = ' '.join(ins1)
insDict[trueDepParts[2]] = ' '.join(ins2)
else:
changeIndex = insParts.index(reg)
insParts[changeIndex] = f'T{tNum}'
insDict[depParts[0]] = ' '.join(insParts)
tNum += 1
except ValueError:
pass
# Resolve WARs
for (dependence, reg) in wars.items():
depParts = dependence.split()
insParts = insDict[depParts[0]].split()
try:
changeIndex = insParts.index(reg)
insParts[changeIndex] = f'T{tNum}'
insDict[depParts[0]] = ' '.join(insParts)
tNum += 1
except ValueError:
pass
return insDict
def checkTrueDep(falseDep, trueDeps, reg):
# for waws
depArr = falseDep.split()
for (trueDep, reg2) in trueDeps.items():
trueDepArr = trueDep.split()
if depArr[0] == trueDepArr[0] and reg == reg2:
return (True, trueDep)
return (None, None)
def parseDepDictToTableData(dependenciesDict):
tableData = [
['WAW', 'WAR', 'True']
]
waws = dependenciesDict['waw']
wars = dependenciesDict['war']
trueDeps = dependenciesDict['trueDeps']
wawKeys = list(waws.keys())
warKeys = list(wars.keys())
trueDepKeys = list(trueDeps.keys())
maxLength = max([len(waws), len(wars), len(trueDeps)])
for i in range(0, maxLength):
data = [f'{wawKeys[i]} -> {waws[wawKeys[i]]}'
if i < len(wawKeys) else '', # Add WAW Dependencies
f'{warKeys[i]} -> {wars[warKeys[i]]}'
if i < len(warKeys) else '', # Add WAR Dependencies
f'{trueDepKeys[i]} -> {trueDeps[trueDepKeys[i]]}'
if i < len(trueDepKeys) else ''] # Add True Dependencies
tableData.append(data)
return tableData
if __name__ == '__main__':
numIns = 0
maxNumIns = 5
stop = False
instructions = []
print("Enter up to 5 MIPs instructions below. When you're done simply"
"press enter without typing in any input")
print("Instructions must be in the format: ins Reg1 Reg2 Reg3")
print("i.e. add R1 R2 R3")
while numIns < maxNumIns and not stop:
ins = getInstructionFromUser(numIns+1)
if ins != '':
instructions.append(ins)
numIns += 1
else:
stop = True
# Genarate the table data need to show instructions given
table_data = [
['Given Instructions'],
]
i = 1
for ins in instructions:
table_data.append([f'S{i} - ' + ins])
i += 1
table = AsciiTable(table_data)
print("Here are the instructions provided:")
print('\n' + table.table + '\n')
input("Press Enter find any existing false dependencies\n")
dependenciesDict = findDependencies(instructions)
table = AsciiTable(parseDepDictToTableData(dependenciesDict))
print('\n' + table.table + '\n')
input("\nPress Enter to begin renaming registers")
resolvedInstructions = resolveDependencies(instructions, dependenciesDict)
resolvedInstructionsArr = []
for (key, value) in resolvedInstructions.items():
resolvedInstructionsArr.append(f'{key} - {value}')
resolvedTableData = [
['Resolved Instructions']
]
for ins in resolvedInstructionsArr:
resolvedTableData.append([ins])
table = AsciiTable(resolvedTableData)
print(table.table + '\n')
input('Press Enter to continue')
print('DONE!\n')
|
3,115 | f114a86a3c6bea274b01763ce3e8cd5c8aea44a0 | # -*- coding: utf-8 -*-
num = input().split()
A = float(num[0])
B = float(num[1])
C = float(num[2])
if A == 0:
print("Impossivel calcular")
else:
delta = B**2 - (4*A*C)
if delta < 0.0:
print("Impossivel calcular")
else:
raiz = delta ** 0.5
r1 = (-B+raiz)/(2*A)
r2 = (-B-raiz)/(2*A)
print("R1 = {:.5f}".format(r1))
print("R2 = {:.5f}".format(r2)) |
3,116 | 1811c0c5aca9d209638e2221cad2c30e80ee5199 | #Takes - Contact Name(Must be saved in phone's contact list), Message, Time as input
# and sends message to the given contact at given time
# Accuracy Level ~ Seconds. (Also depends on your network speed)
from selenium import webdriver
PATH = 'C:\Program Files (x86)\chromedriver.exe'
driver = webdriver.Chrome(PATH)
from selenium.webdriver.common.keys import Keys
import time
from threading import Timer
from datetime import datetime
driver.get("https://web.whatsapp.com/")
print("Scan the QR code to Log in...")
time.sleep(10)
nameofcontact = input('Give name of contact: ')
msg = input("Type the message you want to send: ")
print("Enter Time of sending Message (Hrs, Min & Sec...)")
hrs = int(input("Hrs: "))
mins = int(input("Min: "))
secs = int(input("Sec: "))
x=datetime.today()
y=x.replace(day=x.day+1, hour=hrs, minute=mins, second=secs, microsecond=0)
delta_t=y-x
secs=delta_t.seconds+1
def send_msg():
global nameofcontact, msg
css_path = 'span[title="' + nameofcontact + '"]'
nameofcontact = driver.find_element_by_css_selector(css_path)
nameofcontact.click()
chatbox = driver.find_element_by_xpath('//*[@id="main"]/footer/div[1]/div/div/div[2]/div[1]/div/div[2]')
chatbox.send_keys(msg)
chatbox.send_keys(Keys.RETURN)
t = Timer(secs, send_msg)
t.start()
|
3,117 | e50feccd583d7e33877d5fcc377a1d79dc247d3a |
import pickle
class myPickle:
def make(self, obj,fileName):
print("myPickle make file",fileName)
pickle.dump( obj, open(fileName,'wb') )
print(" DONE")
def load(self, fileName):
print("myPickle load file",fileName)
tr = pickle.load( open(fileName,'rb') )
print(" DONE")
return tr
|
3,118 | 61085eecc8fd0b70bc11e5a85c3958ba3b905eaf | # Jython/Walk_comprehension.py
import os
restFiles = [os.path.join(d[0], f) for d in os.walk(".")
for f in d[2] if f.endswith(".java") and
"PythonInterpreter" in open(os.path.join(d[0], f)).read()]
for r in restFiles:
print(r)
|
3,119 | 0aa0fcbb0ec1272bea93574a9287de9f526539c8 | import torch
def DiceLoss(pred,target,smooth=2):
# print("pred shape: ",pred.shape)
# print("target shape: ",target.shape)
index = (2*torch.sum(pred*target)+smooth)/(torch.sum(pred)+torch.sum(target)+smooth)
#if torch.sum(target).item() == 0:
#print("instersection: ",torch.sum(pred*target).item())
# print("pred: ",torch.sum(pred).item())
# print("target: ",torch.sum(target).item())
#print("Index: ", index.item())
return 1-index |
3,120 | c233ce4e14e9a59a9fb0f29589ced947efeb73a9 | """Tests for flatten_me.flatten_me."""
import pytest
ASSERTIONS = [
[[1, [2, 3], 4], [1, 2, 3, 4]],
[[['a', 'b'], 'c', ['d']], ['a', 'b', 'c', 'd']],
[['!', '?'], ['!', '?']],
[[[True, False], ['!'], ['?'], [71, '@']], [True, False, '!', '?', 71, '@']]
]
@pytest.mark.parametrize("n, result", ASSERTIONS)
def test_flatten_me(n, result):
"""Test flatten_me() for proper output in test cases."""
from flatten_me import flatten_me
assert flatten_me(n) == result
|
3,121 | ebe79cf1b54870055ce8502430f5fae833f3d96d | import matplotlib.pyplot as plt
def visualize_data(positive_images, negative_images):
# INPUTS
# positive_images - Images where the label = 1 (True)
# negative_images - Images where the label = 0 (False)
figure = plt.figure()
count = 0
for i in range(positive_images.shape[0]):
count += 1
figure.add_subplot(2, positive_images.shape[0], count)
plt.imshow(positive_images[i, :, :])
plt.axis('off')
plt.title("1")
figure.add_subplot(1, negative_images.shape[0], count)
plt.imshow(negative_images[i, :, :])
plt.axis('off')
plt.title("0")
plt.show() |
3,122 | a0086a9d27a091776378cd8bde31c59899fc07ac | """Tools for working with Scores."""
from typing import List, Optional
from citrine._serialization import properties
from citrine._serialization.polymorphic_serializable import PolymorphicSerializable
from citrine._serialization.serializable import Serializable
from citrine._session import Session
from citrine.informatics.constraints import Constraint
from citrine.informatics.objectives import Objective
__all__ = ['Score', 'LIScore', 'EIScore', 'EVScore']
class Score(PolymorphicSerializable['Score']):
"""[ALPHA] A Citrine Score is used to rank materials according to objectives and constraints.
Abstract type that returns the proper type given a serialized dict.
"""
@classmethod
def get_type(cls, data):
"""Return the subtype."""
return {
'MLI': LIScore,
'MEI': EIScore,
'MEV': EVScore
}[data['type']]
class LIScore(Serializable['LIScore'], Score):
"""[ALPHA] Evaluates the likelihood of scoring better than some baselines for given objectives.
Parameters
----------
name: str
the name of the score
description: str
the description of the score
objectives: list[Objective]
objectives (e.g., maximize, minimize, tune, etc.)
baselines: list[float]
best-so-far values for the various objectives (there must be one for each objective)
constraints: list[Constraint]
constraints limiting the allowed values that material instances can have
"""
name = properties.String('name')
description = properties.String('description')
baselines = properties.List(properties.Float, 'baselines')
objectives = properties.List(properties.Object(Objective), 'objectives')
constraints = properties.List(properties.Object(Constraint), 'constraints')
typ = properties.String('type', default='MLI')
def __init__(self,
name: str,
description: str,
objectives: List[Objective],
baselines: List[float],
constraints: Optional[List[Constraint]] = None,
session: Optional[Session] = None):
self.name: str = name
self.description: str = description
self.objectives: List[Objective] = objectives
self.baselines: List[float] = baselines
self.constraints: List[Constraint] = constraints or []
self.session: Optional[Session] = session
def __str__(self):
return '<LIScore {!r}>'.format(self.name)
class EIScore(Serializable['EIScore'], Score):
"""
[ALPHA] Evaluates the expected magnitude of improvement beyond baselines for given objectives.
Parameters
----------
name: str
the name of the score
description: str
the description of the score
objectives: list[Objective]
objectives (e.g., maximize, minimize, tune, etc.)
baselines: list[float]
best-so-far values for the various objectives (there must be one for each objective)
constraints: list[Constraint]
constraints limiting the allowed values that material instances can have
"""
name = properties.String('name')
description = properties.String('description')
baselines = properties.List(properties.Float, 'baselines')
objectives = properties.List(properties.Object(Objective), 'objectives')
constraints = properties.List(properties.Object(Constraint), 'constraints')
typ = properties.String('type', default='MEI')
def __init__(self,
name: str,
description: str,
objectives: List[Objective],
baselines: List[float],
constraints: Optional[List[Constraint]] = None,
session: Optional[Session] = None):
self.name: str = name
self.description: str = description
self.objectives: List[Objective] = objectives
self.baselines: List[float] = baselines
self.constraints: List[Constraint] = constraints or []
self.session: Optional[Session] = session
def __str__(self):
return '<EIScore {!r}>'.format(self.name)
class EVScore(Serializable['EVScore'], Score):
"""
[ALPHA] Evaluates the expected value for given objectives.
Parameters
----------
name: str
the name of the score
description: str
the description of the score
objectives: list[Objective]
objectives (e.g., maximize, minimize, tune, etc.)
constraints: list[Constraint]
constraints limiting the allowed values that material instances can have
"""
name = properties.String('name')
description = properties.String('description')
objectives = properties.List(properties.Object(Objective), 'objectives')
constraints = properties.List(properties.Object(Constraint), 'constraints')
typ = properties.String('type', default='MEV')
def __init__(self,
name: str,
description: str,
objectives: List[Objective],
constraints: Optional[List[Constraint]] = None,
session: Optional[Session] = None):
self.name: str = name
self.description: str = description
self.objectives: List[Objective] = objectives
self.constraints: List[Constraint] = constraints or []
self.session: Optional[Session] = session
def __str__(self):
return '<EVScore {!r}>'.format(self.name)
|
3,123 | 6782761bcbf53ea5076b6dfb7de66d0e68a9f45d | import json
import requests
import config
class RequestAnnotation:
def schedule(self,
command: str,
**kwargs):
response = requests.post(url=f"http://localhost:{config.annotation_port}/{command}",
json=kwargs)
# not 'text' for annotating, but 'text' of response is meant here:
return json.loads(response.text) |
3,124 | 65aa27addaec6014fe5fd66df2c0d3632231a314 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
from connect import Connect
class Resource:
def __init__(self, row: tuple):
self.video_path = row[0]
self.pic_path = row[1]
|
3,125 | fda73b5dac038f077da460d6ebfb432b756909d9 | #
# linter.py
# Linter for SublimeLinter version 4.
#
# Written by Brian Schott (Hackerpilot)
# Copyright © 2014-2019 Economic Modeling Specialists, Intl.
#
# License: MIT
#
"""This module exports the D-Scanner plugin class."""
from SublimeLinter.lint import Linter, STREAM_STDOUT
class Dscanner(Linter):
"""Provides an interface to dscanner."""
cmd = ("dscanner", "-S", "${file}")
regex = r'^.+?\((?P<line>\d+):(?P<col>\d+)\)\[((?P<warning>warn)|(?P<error>error))\]: (?P<message>.+)$'
multiline = False
tempfile_suffix = "-"
word_re = None
defaults = {
"selector": "source.d"
}
name = "D-Scanner"
|
3,126 | f8c85f34fb55ee1c3b3020bcec87b60ae80e4ed2 | import sqlite3
class DatabaseHands(object):
def __init__(self, database):
self.conn = sqlite3.connect(database)
self.cur = self.conn.cursor()
self.cur.execute("CREATE TABLE IF NOT EXISTS hands"
+ "(id INTEGER PRIMARY KEY, first INTEGER,"
+ "second INTEGER, third INTEGER)")
self.conn.commit()
def count(self):
self.cur.execute("SELECT count(*) FROM hands")
rows = self.cur.fetchone()
return rows[0]
def insert(self, hands):
self.cur.executemany("INSERT INTO hands VALUES (NULL,?,?,?)", hands)
self.conn.commit()
# def view(self):
# self.cur.execute("SELECT * FROM hands")
# rows = self.cur.fetchall()
# return rows
def search(self, id):
self.cur.execute("SELECT * FROM hands WHERE id=?", (id,))
row = self.cur.fetchone()
return (row[1], row[2], row[3])
def __del__(self):
self.conn.close()
class DatabaseProbability(object):
def __init__(self, database):
self.conn = sqlite3.connect(database)
self.cur = self.conn.cursor()
self.cur.execute("CREATE TABLE IF NOT EXISTS probabilities"
+ "(id INTEGER PRIMARY KEY, card INTEGER,"
+ "win REAL, draw REAL, lose REAL)")
self.conn.commit()
def insert(self, probabilities):
self.cur.executemany("INSERT INTO probabilities VALUES (NULL,?,?,?,?)",
probabilities)
self.conn.commit()
# def view(self):
# self.cur.execute("SELECT * FROM probabilities")
# rows = self.cur.fetchall()
# return rows
def search(self, card):
self.cur.execute("SELECT * FROM probabilities WHERE card=?", (card,))
row = self.cur.fetchone()
return (row[2], row[3], row[4])
def __del__(self):
self.conn.close()
class DatabaseGames(object):
def __init__(self, database):
self.conn = sqlite3.connect(database)
self.cur = self.conn.cursor()
self.cur.execute("CREATE TABLE IF NOT EXISTS games"
+ "(id INTEGER PRIMARY KEY,"
+ " card1 INTEGER, card2 INTEGER, card3 INTEGER,"
+ " win REAL, draw REAL, lose REAL)")
self.conn.commit()
def count(self):
self.cur.execute("SELECT count(*) FROM games")
rows = self.cur.fetchone()
return rows[0]
def insert(self, card1, card2, card3, win, draw, lose):
self.cur.execute("INSERT INTO games VALUES"
+ " (NULL,?,?,?,?,?,?)",
(card1, card2, card3, win, draw, lose))
self.conn.commit()
def view(self):
self.cur.execute("SELECT * FROM games")
rows = self.cur.fetchall()
return rows
def search(self, card1="", card2="", card3=""):
self.cur.execute("SELECT * FROM games WHERE"
+ " card1=? AND card2=? AND card3=?",
(card1, card2, card3))
row = self.cur.fetchone()
return row
def __del__(self):
self.conn.close()
|
3,127 | a7d8efe3231b3e3b9bfc5ef64a936816e8b67d6c | """
Copyright © 2017 Bilal Elmoussaoui <bil.elmoussaoui@gmail.com>
This file is part of Authenticator.
Authenticator is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Authenticator is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Authenticator. If not, see <http://www.gnu.org/licenses/>.
"""
from gettext import gettext as _
from gi.repository import Gio, Gtk, GObject, Handy
from Authenticator.models import Settings, Keyring
__all__ = ['SettingsWindow']
@Gtk.Template(resource_path='/com/github/bilelmoussaoui/Authenticator/settings.ui')
class SettingsWindow(Handy.PreferencesWindow):
__gtype_name__ = 'SettingsWindow'
dark_theme_switch: Gtk.Switch = Gtk.Template.Child()
night_light_switch: Gtk.Switch = Gtk.Template.Child()
lock_row: Handy.ExpanderRow = Gtk.Template.Child()
lock_timeout_row: Handy.ActionRow = Gtk.Template.Child()
lock_timeout_spinbtn: Gtk.SpinButton = Gtk.Template.Child()
def __init__(self):
super(SettingsWindow, self).__init__()
self.__init_widgets()
self.__bind_signals()
def __init_widgets(self):
self._password_widget = PasswordWidget()
self._password_widget.parent = self
self.lock_row.add(self._password_widget)
def _on_lock_row_expanded(self, *_):
keyring = Keyring.get_default()
if keyring.has_password():
keyring.set_password_state(self.lock_row.props.expanded)
self.lock_row_toggle_btn.props.active = False
def __on_lock_switch_toggled(self, toggle_btn: Gtk.ToggleButton, *_):
toggled = toggle_btn.props.active
expansion_enabled = self.lock_row.props.enable_expansion
if not Keyring.get_default().has_password() and not toggled and expansion_enabled:
self.lock_row.props.enable_expansion = False
def __bind_signals(self):
settings = Settings.get_default()
self.dark_theme_switch.set_active(settings.dark_theme and not settings.night_light)
self.night_light_switch.set_active(settings.night_light)
settings.bind("night-light", self.night_light_switch,
"active", Gio.SettingsBindFlags.DEFAULT)
keyring = Keyring.get_default()
# Hackish solution to get the expander from HdyExpanderRow
self.lock_row.props.enable_expansion = keyring.has_password()
self.lock_row_toggle_btn = self.lock_row.get_children()[0].get_children()[3]
self.lock_row.props.enable_expansion = Keyring.get_default().is_password_enabled()
self.lock_row.connect("notify::enable-expansion", self.__on_enable_password)
self.lock_row_toggle_btn.connect("notify::active", self.__on_lock_switch_toggled)
self.lock_row.connect("notify::expanded", self._on_lock_row_expanded)
keyring.bind_property("can-be-locked", self.lock_timeout_row, "sensitive",
GObject.BindingFlags.DEFAULT | GObject.BindingFlags.SYNC_CREATE)
self.lock_timeout_spinbtn.props.value = settings.auto_lock_timeout
settings.bind("auto-lock-timeout", self.lock_timeout_spinbtn, "value",
Gio.SettingsBindFlags.DEFAULT)
self._password_widget.connect("password-updated", self.__on_password_updated)
self._password_widget.connect("password-deleted", self.__on_password_deleted)
def on_night_light_switch(switch: Gtk.Switch, _):
# Set the application to use Light theme
if switch.get_active() and self.dark_theme_switch.get_active():
self.dark_theme_switch.set_active(False)
self.night_light_switch.connect("notify::active", on_night_light_switch)
def on_dark_theme_switch(switch: Gtk.Switch, _):
# Set the application to use Light theme
if settings.night_light and switch.get_active():
switch.set_state(False)
elif not settings.night_light:
settings.dark_theme = switch.get_active()
self.dark_theme_switch.connect("notify::active", on_dark_theme_switch)
def __on_enable_password(self, *_):
keyring = Keyring.get_default()
keyring.set_password_state(self.lock_row.props.enable_expansion)
if not keyring.has_password():
self._password_widget.set_current_password_visibility(False)
else:
self._password_widget.set_current_password_visibility(True)
def __on_password_updated(self, *_):
self.lock_row_toggle_btn.props.active = False
def __on_password_deleted(self, *__):
# self.notification.send(_("The authentication password was deleted."))
self.lock_row.set_enable_expansion(False)
self.lock_row_toggle_btn.props.active = False
@Gtk.Template(resource_path='/com/github/bilelmoussaoui/Authenticator/password_widget.ui')
class PasswordWidget(Gtk.Box):
__gtype_name__ = 'PasswordWidget'
__gsignals__ = {
'password-updated': (
GObject.SignalFlags.RUN_LAST,
None,
()
),
'password-deleted': (
GObject.SignalFlags.RUN_LAST,
None,
()
),
}
delete_password_btn: Gtk.Button = Gtk.Template.Child()
change_password_btn: Gtk.Button = Gtk.Template.Child()
password_entry: Gtk.Entry = Gtk.Template.Child()
confirm_password_entry: Gtk.Entry = Gtk.Template.Child()
current_password_entry: Gtk.Entry = Gtk.Template.Child()
current_password_box: Gtk.Box = Gtk.Template.Child()
def __init__(self):
super(PasswordWidget, self).__init__()
self.parent = None
def reset_widgets(self):
"""Reset widgets state."""
self.password_entry.set_text("")
self.confirm_password_entry.set_text("")
self.current_password_entry.set_text("")
self.password_entry.get_style_context().remove_class("error")
self.confirm_password_entry.get_style_context().remove_class("error")
self.current_password_entry.get_style_context().remove_class("error")
self.change_password_btn.set_sensitive(False)
def set_current_password_visibility(self, visibilty: bool):
if not visibilty:
self.current_password_box.hide()
self.delete_password_btn.hide()
self.change_password_btn.set_label(_("Save Password"))
else:
self.current_password_box.show()
self.delete_password_btn.show()
self.change_password_btn.set_label(_("Change Password"))
@Gtk.Template.Callback('password_entry_changed')
def __validate_password(self, *_):
keyring = Keyring.get_default()
password = self.password_entry.get_text()
repeat_password = self.confirm_password_entry.get_text()
if not password:
self.password_entry.get_style_context().add_class("error")
valid_password = False
else:
self.password_entry.get_style_context().remove_class("error")
valid_password = True
if not repeat_password or password != repeat_password:
self.confirm_password_entry.get_style_context().add_class("error")
valid_repeat_password = False
else:
self.confirm_password_entry.get_style_context().remove_class("error")
valid_repeat_password = True
to_validate = [valid_password, valid_repeat_password]
if keyring.has_password():
old_password = self.current_password_entry.get_text()
if old_password != keyring.get_password():
self.current_password_entry.get_style_context().add_class("error")
valid_old_password = False
else:
self.current_password_entry.get_style_context().remove_class("error")
valid_old_password = True
to_validate.append(valid_old_password)
self.change_password_btn.set_sensitive(all(to_validate))
@Gtk.Template.Callback('update_password_clicked')
def __save_password(self, *__):
if self.change_password_btn.get_sensitive():
keyring = Keyring.get_default()
password = self.password_entry.get_text()
keyring.set_password(password)
self.reset_widgets()
self.set_current_password_visibility(True)
self.emit("password-updated")
@Gtk.Template.Callback('reset_password_clicked')
def __reset_password(self, *args):
dialog = Gtk.MessageDialog(buttons=Gtk.ButtonsType.YES_NO)
dialog.props.message_type = Gtk.MessageType.QUESTION
dialog.props.text = _("Do you want to remove the authentication password?")
dialog.props.secondary_text = _("Authentication password enforces the privacy of your accounts.")
dialog.set_transient_for(self.parent)
response = dialog.run()
if response == Gtk.ResponseType.YES:
Keyring.get_default().remove_password()
self.reset_widgets()
self.set_current_password_visibility(False)
self.emit("password-deleted")
dialog.destroy()
|
3,128 | 20637e41df8a33e3837905a4729ae0b4a9f94dbb | """to get the all the module and its location"""
import sys
print(sys.modules)
|
3,129 | b4267612e7939b635542099e1ba31e661720607a | #from getData import getRatings
import numpy as np
num_factors = 10
num_iter = 75
regularization = 0.05
lr = 0.005
folds=5
#to make sure you are able to repeat results, set the random seed to something:
np.random.seed(17)
def split_matrix(ratings, num_users, num_movies):
#Convert data into (IxJ) matrix
X= np.zeros((num_users, num_movies))
for r in np.arange(len(ratings)):
X[ratings[r,0]-1,ratings[r,1]-1] = ratings[r,2]
#print(X.shape)
return X
def mf_gd(ratings, num_users, num_movies):
X_data= split_matrix(ratings, num_users, num_movies)
X_hat = np.zeros(num_users, num_movies) #predicted rating matrix
err = np.zeros(num_users, num_movies) #error values
# Randomly initialize weights in U and M
U = np.random.rand(num_users, num_factors)
M = np.random.rand(num_factors, num_movies)
U_prime = U
M_prime = M
for nr in np.arange(num_iter):
for i in np.arange(len(ratings)):
userID = ratings[i,0]-1
movieID = ratings[i,1]-1
actual = ratings[i,2]
prediction = np.sum(U[userID,:]*M[:,movieID]) #SVD
error = actual - prediction #compute e(i,j)
#update U and M using following equations:
#Uprime(i,k) = u(i,k) + lr(2e*m(k,j)-lamda.u(i,k))
#Mprime(k,j) = m(k,j) + lr(2e*u(i,k)-lamda.m(k,j))
for k in np.arange(num_factors):
U_prime[userID,k] = U[userID,k]+ lr * (2*error*M[k,movieID] - regularization * U[userID,k])
M_prime[k,movieID] = M[k,movieID] + lr * (2*error*U[userID,k] - regularization * M[k,movieID])
U = U_prime
M = M_prime
#Intermediate RMSE
X_hat = np.dot(U,M)
err = X_data-X_hat
e = err[np.where(np.isnan(err)==False)]
ir = np.sqrt(np.mean(e**2))
print ("Error for iteration #", nr, ":", ir)
#Return the result
X_hat = np.dot(U,M)
return X_hat
def mf():
#Read dataset
#ratings = getRatings()
ratings = np.genfromtxt("D:/Leiden/Semester 1_Sept/Assignment1/AiDM/ml-1m/ratings.dat", usecols=(0,1,2), delimiter='::',dtype='int')
#number of users and movies in data.
num_users= np.max(ratings[:,0])
num_movies= np.max(ratings[:,1])
print(num_users, num_movies)
print(len(ratings))
#5-fold cross validation
for f in np.arange(folds):
print ("Fold #", f)
#shuffle data for train and test
np.random.shuffle(ratings)
train_set = np.array([ratings[x] for x in np.arange(len(ratings)) if (x%folds) !=f])
test_set = np.array([ratings[x] for x in np.arange(len(ratings)) if (x%folds) == f])
#Matrix fact
X_hat = mf_gd(train_set, num_users, num_movies)
X_train = split_matrix(train_set, num_users, num_movies)
X_test = split_matrix(test_set, num_users, num_movies)
err_train = X_train- X_hat
err_test = X_test - X_hat
#RMSE
e_mf = err_train[np.where(np.isnan(err_train)==False)]
error_train_mf = np.sqrt(np.mean(e_mf**2))
e2_mf = err_test[np.where(np.isnan(err_test)==False)]
error_test_mf = np.sqrt(np.mean(e2_mf**2))
print ('Matrix Factorization Error -> training set: ', error_train_mf)
print ('Matrix Factorization Error -> test set: ', error_test_mf)
mf()
#Still getting a high error rate, not comparable to the website mentioned in the assignment doc.
# I need to check the logic again.
#https://medium.com/coinmonks/recommendation-engine-python-401c080c583e; followed this blogpost |
3,130 | 11163dc99ee65ab44494c08d81e110e9c42390ae | # -*- coding: utf-8 -*-
from django.contrib.auth import logout, login, authenticate
from django.contrib.auth.models import User
from django.http import HttpResponse, Http404, HttpResponseRedirect
from django.middleware.csrf import get_token
from django.template.context import Context
from django.utils.translation import ugettext_lazy as _
from account.models import Employee
from amortization import settings
from models import MenuItem
from task.forms import RequestForm
from task.models import Request, Task
__author__ = 'cm'
from django.template.loader import get_template
def base_context(request):
author = settings.ADMINS[0][0]
version = settings.VERSION
csrf_token = get_token(request)
media_url = settings.MEDIA_URL
app_name = _('Amortization & Expertise')
path = request.path
logout = False
employee = None
usr = request.user
menu = MenuItem.objects.filter(for_staff=False).order_by('order')
if usr.is_authenticated():
logout = True
employee = Employee.objects.filter(user=usr)
if employee:
employee = employee[0]
if employee.user.is_staff:
menu = MenuItem.objects.order_by('order')
return locals()
def main(request):
c = base_context(request)
template = get_template("index.html")
c['title'] = _('Request')
form = RequestForm()
# if user is authenticated
user = request.user
c['user'] = user
if user.is_authenticated():
e = Employee.objects.filter(user=user)
if e:
empl = e[0]
form = RequestForm(initial={'fio': empl.fio, 'tab_number': empl.tab_number, 'post': empl.post, 'cabinet': empl.cabinet})
c['logout'] = True
c['form'] = form
if request.method == 'POST':
postdata = request.POST.copy()
form = RequestForm(request.POST)
if form.is_valid():
empl = Employee.objects.filter(tab_number = postdata.get('tab_number', 0))
if not empl:
# django user ---
if user.is_authenticated():
# logout
logout(request)
username = postdata.get('fio', 'error!')
password = postdata.get('tab_number', 0)
User.objects.create_user(username, 'empty@surgpu.ru', password)
# login
new_user = authenticate(username=username, password=password)
if new_user:
login(request, new_user)
# amortization user
empl = Employee()
empl.tab_number = postdata.get('tab_number', 0)
empl.fio = postdata.get('fio', "error!")
empl.user = new_user
empl.post = postdata.get('post', '')
empl.cabinet = postdata.get('cabinet', '0-000')
empl.save()
uid = empl
else:
uid = empl[0]
user = authenticate(username=uid.user.username, password=uid.tab_number)
if user:
login(request, user)
req = Request()
req.user = uid
req.number = postdata.get('number', '000000000000')
req.device = postdata.get('device', 'NoName')
req.serial = postdata.get('serial', '')
req.year = postdata.get('year', '----')
req.save()
c['saved'] = True
else:
c['form'] = form
return HttpResponse(template.render(Context(c))) |
3,131 | 8186b7bddbdcdd730a3f79da1bd075c25c0c3998 | import uuid
from website.util import api_v2_url
from django.db import models
from osf.models import base
from website.security import random_string
from framework.auth import cas
from website import settings
from future.moves.urllib.parse import urljoin
def generate_client_secret():
return random_string(length=40)
class ApiOAuth2Scope(base.ObjectIDMixin, base.BaseModel):
"""
Store information about recognized OAuth2 scopes. Only scopes registered under this database model can
be requested by third parties.
"""
name = models.CharField(max_length=50, unique=True, db_index=True, null=False, blank=False)
description = models.CharField(max_length=255, null=False, blank=False)
is_active = models.BooleanField(default=True, db_index=True) # TODO: Add mechanism to deactivate a scope?
is_public = models.BooleanField(default=True, db_index=True)
def absolute_url(self):
return urljoin(settings.API_DOMAIN, '/v2/scopes/{}/'.format(self.name))
def generate_client_id():
return uuid.uuid4().hex
class ApiOAuth2Application(base.ObjectIDMixin, base.BaseModel):
"""Registration and key for user-created OAuth API applications
This collection is also used by CAS to create the master list of available applications.
Any changes made to field names in this model must be echoed in the CAS implementation.
"""
# Client ID and secret. Use separate ID field so ID format doesn't
# have to be restricted to database internals.
# Not *guaranteed* unique, but very unlikely
client_id = models.CharField(default=generate_client_id,
unique=True,
max_length=50,
db_index=True)
client_secret = models.CharField(default=generate_client_secret, max_length=40)
is_active = models.BooleanField(default=True, # Set to False if application is deactivated
db_index=True)
owner = models.ForeignKey('OSFUser', null=True, blank=True, on_delete=models.SET_NULL)
# User-specified application descriptors
name = models.CharField(db_index=True, blank=False, null=False, max_length=200)
description = models.CharField(blank=True, null=True, max_length=1000)
home_url = models.URLField(blank=False, null=False)
callback_url = models.URLField(blank=False, null=False)
def deactivate(self, save=False):
"""
Deactivate an ApiOAuth2Application
Does not delete the database record, but revokes all tokens and sets a
flag that hides this instance from API
"""
client = cas.get_client()
# Will raise a CasHttpError if deletion fails, which will also stop setting of active=False.
resp = client.revoke_application_tokens(self.client_id, self.client_secret) # noqa
self.is_active = False
if save:
self.save()
return True
def reset_secret(self, save=False):
"""
Reset the secret of an ApiOAuth2Application
Revokes all tokens
"""
client = cas.get_client()
client.revoke_application_tokens(self.client_id, self.client_secret)
self.client_secret = generate_client_secret()
if save:
self.save()
return True
@property
def url(self):
return '/settings/applications/{}/'.format(self.client_id)
@property
def absolute_url(self):
return urljoin(settings.DOMAIN, self.url)
# Properties used by Django and DRF "Links: self" field
@property
def absolute_api_v2_url(self):
path = '/applications/{}/'.format(self.client_id)
return api_v2_url(path)
# used by django and DRF
def get_absolute_url(self):
return self.absolute_api_v2_url
def generate_token_id():
return random_string(length=70)
class ApiOAuth2PersonalToken(base.ObjectIDMixin, base.BaseModel):
"""Information for user-created personal access tokens
This collection is also used by CAS to create the master list of available tokens.
Any changes made to field names in this model must be echoed in the CAS implementation.
"""
# Name of the field being `token_id` is a CAS requirement.
# This is the actual value of the token that's used to authenticate
token_id = models.CharField(max_length=70, default=generate_token_id,
unique=True)
owner = models.ForeignKey('OSFUser', db_index=True, blank=True, null=True, on_delete=models.SET_NULL)
name = models.CharField(max_length=100, blank=False, null=False, db_index=True)
scopes = models.ManyToManyField('ApiOAuth2Scope', related_name='tokens', blank=False)
is_active = models.BooleanField(default=True, db_index=True)
def deactivate(self, save=False):
"""
Deactivate an ApiOAuth2PersonalToken
Does not delete the database record, but hides this instance from API
"""
client = cas.get_client()
# Will raise a CasHttpError if deletion fails for any reason other than the token
# not yet being created. This will also stop setting of active=False.
try:
resp = client.revoke_tokens({'token': self.token_id}) # noqa
except cas.CasHTTPError as e:
if e.code == 400:
pass # Token hasn't been used yet, so not created in cas
else:
raise e
self.is_active = False
if save:
self.save()
return True
@property
def url(self):
return '/settings/tokens/{}/'.format(self._id)
@property
def absolute_url(self):
return urljoin(settings.DOMAIN, self.url)
# Properties used by Django and DRF "Links: self" field
@property
def absolute_api_v2_url(self):
path = '/tokens/{}/'.format(self._id)
return api_v2_url(path)
# used by django and DRF
def get_absolute_url(self):
return self.absolute_api_v2_url
|
3,132 | 1d4df09256324cce50fad096cdeff289af229728 | from PyQt5.QtCore import QObject, pyqtSlot
from Controllers.BookController import BookController
from Model.BookModel import BookModel
from Controllers.DatabaseController import DatabaseController
#Issuance Controller class contains the issuance properties and performs database operations for the issuance
class IssuanceController(QObject):
def __init__(self, model):
super().__init__()
self._database_controller = DatabaseController()
self._model = model
@pyqtSlot(str)
def change_issuance_id(self, value):
self._model.issuance_id = value
@pyqtSlot(str)
def change_student_id(self, value):
self._model.student_id = value
@pyqtSlot(str)
def change_staff_id(self, value):
self._model.staff_id = value
@pyqtSlot(str)
def change_book_id(self, value):
self._model.book_id = value
@pyqtSlot(str)
def change_release_date(self, value):
self._model.release_date = value
@pyqtSlot(str)
def change_due_date(self, value):
self._model.due_date = value
@pyqtSlot(bool)
def add(self, value):
self._model.is_add_click = True if value else False
def GetIssuedBooks(self):
try:
mycursor = self._database_controller.CursorTuple()
mycursor.execute(mycursor.execute("SELECT issuance_id, CONCAT(student.first_name, ' ', student.middle_name, ' ', student.last_name) AS full_name, student.student_id, book.title, issuance.book_id, issuance.release_date, issuance.due_date, CONCAT(staff.first_name, ' ', staff.middle_name, ' ', staff.last_name) AS staff_name FROM issuance LEFT JOIN book ON book.book_id = issuance.book_id LEFT JOIN student ON student.student_id = issuance.student_id LEFT JOIN staff ON staff.staff_id = issuance.staff_id WHERE issuance.is_returned = 0"))
books = mycursor.fetchall()
return books
except mysql.connector.Error as err:
print("Something went wrong: {}".format(err))
finally:
self._database_controller.Close()
def BorrowBook(self):
try:
mycursor = self._database_controller.Cursor()
sql = "INSERT INTO issuance (student_id, staff_id, book_id, release_date, due_date, is_returned) VALUES (%s, %s, %s, %s, %s, %s)"
val = (
self._model.student_id,
self._model.staff_id,
self._model.book_id,
self._model.release_date,
self._model.due_date,
'0',
)
mycursor.execute(sql, val)
self._database_controller.Commit()
except mysql.connector.Error as err:
print("Something went wrong: {}".format(err))
finally:
self._database_controller.Close()
return mycursor.rowcount, "record inserted."
def UpdateIssuance(self):
try:
mycursor = self._database_controller.Cursor()
sql = "UPDATE issuance SET is_returned = 1 WHERE issuance_id = %s"
val = (
self._model.issuance_id,
)
mycursor.execute(sql, val)
self._database_controller.Commit()
except mysql.connector.Error as err:
print("Something went wrong: {}".format(err))
finally:
self._database_controller.Close()
return mycursor.rowcount, "record inserted."
|
3,133 | be7fb94c3c423b67aa917a34328acda5926cf78a | from django.urls import path
from .views import PostListView, PostDetailView
urlpatterns = [
path('blog/', PostListView.as_view()),
path('blog/<pk>/', PostDetailView.as_view()),
] |
3,134 | 07a172c28057dc803efdbdc10a9e2e11df4e527b | from room import Room
from player import Player
from item import Item
# Declare all the rooms
items = {
'scimitar': Item('Scimitar', '+7 Attack'),
'mace': Item('Mace', '+13 Attack'),
'tower_shield': Item('Tower Shield', '+8 Block'),
'heraldic_shield': Item('Heraldic Shield', '+12 Block'),
'chainmail': Item('Chainmail', '+15 Defense'),
'gold_plate': Item('Gold Plate', '+25 Defense'),
'health_potion': Item('Health Potion', 'Heal 10 HP'),
'mana_potion': Item('Mana Potion', 'Restore 20 Mana'),
'gold': Item('Gold', 'Currency for other items from vendors'),
'demon_heart': Item('Demon Heart', 'Bestows owner with great power')
}
room = {
'outside': Room("Outside Cave Entrance",
"""North of you, the cave mount beckons""",
[items['scimitar'], items['health_potion']]),
'foyer': Room("Foyer", """Dim light filters in from the south. Dusty
passages run north and east.""",
[items['tower_shield'], items['chainmail']]),
'overlook': Room("Grand Overlook", """A steep cliff appears before you, falling
into the darkness. Ahead to the north, a light flickers in
the distance, but there is no way across the chasm.""",
[items['mace'], items['mana_potion']]),
'narrow': Room("Narrow Passage", """The narrow passage bends here from west
to north. The smell of gold permeates the air.""",
[items['gold_plate'], items['heraldic_shield']]),
'treasure': Room("Treasure Chamber", """You've found the long-lost treasure
chamber! Sadly, it has already been completely emptied by
earlier adventurers. The only exit is to the south.""",
[items['gold'], items['demon_heart']]),
}
# Link rooms together
room['outside'].n_to = room['foyer']
room['foyer'].s_to = room['outside']
room['foyer'].n_to = room['overlook']
room['foyer'].e_to = room['narrow']
room['overlook'].s_to = room['foyer']
room['narrow'].w_to = room['foyer']
room['narrow'].n_to = room['treasure']
room['treasure'].s_to = room['narrow']
# Main
player = Player(room['outside'])
suppressRoomPrint = False
while True:
if suppressRoomPrint:
suppressRoomPrint = False
else:
print (player.location)
print (f'\n{player.location.name}\n {player.location.description}\n {player.location.getItems()}\n')
inp = input("What is your command: ")
if inp == "q":
break
if inp == "n" or inp == "s" or inp == "w" or inp == "e":
newRoom = player.location.getRoomInDirection(inp)
if newRoom == None:
print('\x1b[1;37;41m + \nImpossible, try again.\n\x1b[0m')
suppressRoomPrint = True
else:
player.change_location(newRoom) |
3,135 | b53b0e6ff14750bbba3c2e5e2ea2fc5bb1abccec | import math as m
import functions_by_alexandra as fba
import funs
from functions_by_alexandra import User, a
from pkg import bps, geom
print(type(funs))
print(type(funs.add ))
#
# print(add(2,3))
print("Result: ", funs.add(10, 20))
print("Result: ", fba.add(10,20))
print(type(fba ))
print(a )
print(m.pi)
p = User()
print(p)
#print(functions_by_alexandra.add(10,20))
print(bps.happy(10,20))
|
3,136 | ff1bb2634ffec6181a42c80a4b2a19c2c27a8f9f | import socket
from Server.MachineClient.Identification import Identification
from Server.SQL import DataBase
import threading
import time
from Server.Connection.AcceptClients import Accept
from Server.Connection.ConnectionCheck import ConnectionCheck
from Server.Clients_Data import Clients
class MachineClient:
def __init__(self, host, port):
self.host = host
self.port = port
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.db = DataBase(r"C:\Users\user\Documents\RemoteControl\Server\pythonsqlite.db")
self.data = ""
self.clients = Clients()
self.remote_client = []
def connection_check(self):
connection_check = ConnectionCheck(self.s, self.clients)
if connection_check.check_database_update():
return
while True:
time.sleep(2)
connection_check.connect_db()
self.clients = connection_check.start()
def connection(self):
while True:
for c in self.clients.data:
if c[1] is not None:
print("connection")
self.s.settimeout(0.5)
try:
print(c[0].recv(10000).decode())
except socket.timeout:
pass
# c[1].send()
def accept(self):
while True:
a = Accept(self.s, self.clients, self.host)
a.accept()
self.clients = a.clients
def start(self):
self.s.bind((self.host, self.port))
self.s.listen(5)
accept = threading.Thread(target=self.accept)
accept.start()
conn_check = threading.Thread(target=self.connection_check)
conn_check.start()
# connection = threading.Thread(target=self.connection)
# connection.start()
if __name__ == "__main__":
server = MachineClient("localhost", 8080)
server.start()
|
3,137 | b32784bf398a58ba4b6e86fedcdc3ac9de0e8d51 | from django import forms
from django.core.exceptions import ValidationError
from django.db import connection
from customer.helper_funcs import dictfetchall
class OrderForm(forms.Form):
item_id = forms.IntegerField(required=True)
quantity = forms.IntegerField(required=True)
def clean(self):
cleaned_data = super().clean()
item_id = cleaned_data.get("item_id")
quantity = cleaned_data.get("quantity")
if item_id and quantity:
cursor = connection.cursor()
query = "SELECT item_id, quantity FROM item"
cursor.execute(query + ";")
items = dictfetchall(cursor)
id_exists = False
for item in items:
if item["item_id"] == item_id:
id_exists = True
if item["quantity"] - quantity < 0:
raise ValidationError("Not enough units in stock.")
if not id_exists:
raise ValidationError("Id does not exist") |
3,138 | 16db443642746af4ae45862627baaa9eca54a165 | # -*- coding: utf-8 -*-
"""
Created on Thu Sep 3 18:45:08 2020
@author: Neeraj
"""
import cv2
import numpy as np
num_down=2
num_bilateral=50
img_rgb=cv2.imread("stunning-latest-pics-of-Kajal-Agarwal.jpg") #image path
img_rgb=cv2.resize(img_rgb,(800,800))
img_color=img_rgb
for _ in range(num_down):
img_color=cv2.pyrDown(img_color)
for _ in range(num_bilateral):
img_color=cv2.bilateralFilter(img_color,d=9,
sigmaColor=9,
sigmaSpace=7)
for _ in range(num_down):
img_color=cv2.pyrUp(img_color)
img_gray=cv2.cvtColor(img_rgb,cv2.COLOR_RGB2GRAY)
img_blur=cv2.medianBlur(img_gray,7)
img_edge=cv2.adaptiveThreshold(img_blur,255,
cv2.ADAPTIVE_THRESH_MEAN_C,
cv2.THRESH_BINARY,
blockSize=9,
C=2)
img_edge=cv2.cvtColor(img_edge,cv2.COLOR_GRAY2RGB)
img_sketch=cv2.bitwise_and(img_color,img_edge)
#displaying the actual and sketched images
stack=np.hstack([img_rgb,img_sketch])
# cv2.imshow("stacked",stack)
# cv2.waitKey(0)
cv2.imwrite("cartoon1.jpg",stack) #to save the image
|
3,139 | 87c413051ed38b52fbcc0b0cf84ecd75cd1e3f0c | import sys, getopt
import sys, locale
import httplib
import json
#sys.argv = [sys.argv[0], '--id=275', '--ofile=275.json']
def getRouteId(routeName, out_filename):
conn = httplib.HTTPConnection("data.ntpc.gov.tw")
qryString = "/od/data/api/67BB3C2B-E7D1-43A7-B872-61B2F082E11B?$format=json&$filter=nameZh%20eq%20" + routeName
conn.request("GET",qryString.encode('utf8'))
response = conn.getresponse()
print response.status, response.reason
data = response.read()
print len(data)
ofile = open(out_filename, "w")
ofile.write(data)
ofile.close()
def main(argv):
route_id = ''
outputfile = ''
try:
opts, args = getopt.getopt(argv,"hi:o:",["id=","ofile="])
except getopt.GetoptError:
print 'cliGetRouteID.py -i <route id> -o <outputfile>'
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print 'cliGetRouteID.py -i <route id> -o <outputfile>'
sys.exit()
elif opt in ("-i", "--id"):
route_id = arg
elif opt in ("-o", "--ofile"):
outputfile = arg
getRouteId(route_id, outputfile)
print 'Route ID is', route_id
print 'Output file is', outputfile
if __name__ == "__main__":
main(sys.argv[1:])
|
3,140 | 46b1991bba83968466390d306a4415b362b6a868 | #!/usr/bin/env python
import sys
import json
import time
import random
import pathlib
import argparse
import subprocess
proc = None
def get_wallpaper(FOLDER):
files = [path for path in pathlib.Path(FOLDER).iterdir()
if path.is_file()]
return random.choice(files)
def get_outputs():
cmd = ['swaymsg', '-t', 'get_outputs']
proc_result = subprocess.run(cmd, capture_output=True).stdout.decode()
proc_json = json.loads(proc_result)
return [output['name'] for output in proc_json]
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Set a random wallpaper per output')
parser.add_argument('--folder', metavar='D', type=str, nargs=1,
help='folder to search for images')
parser.add_argument('--delay', metavar='S', type=int,
help='How many seconds to wait before changing the wallpaper')
args = parser.parse_args()
while True:
try:
outputs = get_outputs()
cmd = 'swaybg'
for output in outputs:
image = get_wallpaper(args.folder[0])
cmd = f'{cmd} --image={image} --output={output}'
print(cmd)
proc = subprocess.Popen(cmd, shell=True)
time.sleep(args.delay)
proc.kill()
except Exception as e:
print(e, file=sys.stderr)
finally:
if proc:
proc.kill()
|
3,141 | 11ad3e1ab4ffd491e27998a7235b7e18857632ed | # Generated by Django 3.0.4 on 2020-03-29 09:27
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('portfolio_app', '0008_feedback_product'),
]
operations = [
migrations.RemoveField(
model_name='feedback',
name='date',
),
migrations.RemoveField(
model_name='feedback',
name='product',
),
]
|
3,142 | cb13011def8fc7ed6a2e98a794343857e3e34562 | import pickle
from sklearn import linear_model
from sklearn.model_selection import train_test_split
import random
from sklearn.manifold import TSNE
import matplotlib
def loadXY():
zippedXY = pickle.load(open("../Vectorizer/zippedXY_wff_2k.p","rb"))
#random.shuffle(zippedXY)
X,Y = zip(*zippedXY)
return X,Y
def outliers(X,Y):
from sklearn.ensemble import IsolationForest
out = IsolationForest()
out.fit(X,Y)
outliers = list(out.predict(X))
print "Total outliers : ",outliers
if __name__ == "__main__":
X,Y = loadXY()
print "X and Y loaded"
Ynum = []
# converting labels to num
label2num = {}
label2num["ANGER"],label2num["SADNESS"],label2num["JOY"],label2num["FEAR"],label2num["SURPRISE"] = 0,1,2,3,4
for yy in range(len(Y)):
Ynum.append(label2num[Y[yy]])
print Ynum.index(0)
print Ynum.index(1)
print Ynum.index(2)
print Ynum.index(3)
print Ynum.index(4)
"""
########## 2D PLOT ####################
# Fitting the tsne with data
tsne = TSNE(n_components=2, verbose=1)
tsne_fit = tsne.fit_transform(X)
# Saving and loading the fitted tsne
import pickle
pickle.dump(tsne_fit,open("tsne_fit_wff_2k.p","wb"))
tsne_fit = pickle.load(open("tsne_fit_wff_2k.p","rb"))
"""
"""
# Visualize the data
from matplotlib import pyplot as plt
xx = tsne_fit[:, 0]
yy = tsne_fit[:, 1]
colors = ['red','green','blue','black','yellow']
plt.scatter(xx, yy, c=Ynum, edgecolors='none',cmap=matplotlib.colors.ListedColormap(colors))
#plt.show()
# Saving the plot in Plots/ folder
plt.draw()
plt.savefig("wff_2k_visualise.png")
#outliers(X,Ynum)
"""
################## 3D PLOT #############################
# Fitting the tsne with data
tsne = TSNE(n_components=3, verbose=1)
tsne_fit = tsne.fit_transform(X)
# Saving and loading the fitted tsne
import pickle
pickle.dump(tsne_fit,open("tsne_fit_wff_2k_3d.p","wb"))
tsne_fit = pickle.load(open("tsne_fit_wff_2k_3d.p","rb"))
"""
"""
# Visualize the data
from matplotlib import pyplot as plt
xx = tsne_fit[:, 0]
yy = tsne_fit[:, 1]
zz = tsne_fit[:, 2]
colors = ['red','green','blue','black','yellow']
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
print Ynum
ax.scatter(xx, yy,zz, c=Ynum, edgecolors='none',cmap=matplotlib.colors.ListedColormap(colors))
#plt.show()
# Saving the plot in Plots/ folder
plt.draw()
plt.savefig("wff_2k_visualise_3d__new.png")
#outliers(X,Ynum)
|
3,143 | 8dfb1312d82bb10f2376eb726f75a4a596319acb | #!/usr/local/bin/python
import requests as rq
import sqlite3 as sq
from dateutil import parser
import datetime
import pytz
import json
from os.path import expanduser
import shutil
from os.path import isfile
import time
#FRED Config
urls = {'FRED':"http://api.stlouisfed.org/fred"}
urls['FRED_SER'] = urls['FRED'] + "/series"
urls['FRED_OBS'] = urls['FRED_SER'] + "/observations"
api_key = "fc359838e2193d76d75f8a850c41fbd7"
args = {"api_key":api_key, "series_id":0, "file_type":"json", "frequency":"sa", "aggregation_method" : "avg"} #initial arguments for FRED requests
home = expanduser("~")
#change this DB location
#db = "/Volumes/Pylos/Projects/FED/projection.db"
#
bu = home+"/exhibit/unemployment"+str(time.time())+".db"
db = home+"/exhibit/unemployment.db"
if isfile(db):
print "making backup at "+bu
shutil.copyfile(db,bu)
#DB config
#db = 'unemployment.db'
conn = sq.connect(db) #connection is open
conn.row_factory = sq.Row
force = True;
#setup vars
today = datetime.datetime.now()
today = pytz.utc.localize(today);
stamp = today.strftime("%Y-%m-%d %H:%M:%S%z")
#get string date for one decade ago
tmpStamp = today.strftime("%Y-%m-%d")
lDate = tmpStamp.split("-")
lDate[0] = str(int(lDate[0]) - 10);
startDate = datetime.date(int(lDate[0]),int(lDate[1]),int(lDate[2]))
startStr = lDate[0]+"-"+lDate[1]+"-"+lDate[2]
args["observation_start"] = startStr
def get_ids():
c = conn.cursor()
c.execute("SELECT series_id FROM ser_id");
rows = c.fetchall()
return rows
#check that all series are present, and up to date.
def check_series():
if force == True:
delete_rows()
print "Forced, deleting rows"
ids = get_ids() #get all ids from db
#print ids
c = conn.cursor()
for id in ids:
i = (id["series_id"],)
if i[0] != "N/A":
c.execute("SELECT * FROM ser_data WHERE ser_id=?",i)
data = c.fetchone();
if data is None or force == True: #this id is not in db
print('There is no series named %s in database, syncing with FRED...'%i)
create_series(i)
else: #id is found
date_check = check_date(data["date"]) #check if up to date
if date_check:
update_series(i)
def get_series(id):
args["series_id"] = id;
r = rq.get(urls["FRED_SER"], params=args)
j = r.json();
_date = j["seriess"][0]["last_updated"]
return {"series":j, 'date':_date}
def get_obs(id):
args["series_id"] = id;
r = rq.get(urls["FRED_OBS"], params=args)
j = r.json();
_obs = j["observations"]
nullItems = []
for (oi, ob) in enumerate(_obs):
if ob["value"] == ".":
nullItems.append(oi)
print("Null Items found at "+str(oi))
_obs[oi] = "null"
for (ni, nn) in enumerate(nullItems):
_obs.remove("null")
# print _obs
return _obs
def create_series(id):
c = conn.cursor()
obs = get_obs(id)
ser = get_series(id)
date = ser["date"]
ser = ser["series"]
q = (id,ser,obs,date);
c.execute("INSERT INTO ser_data VALUES(?,?,?,?,?)", (stamp,str(id[0]),json.dumps(ser),json.dumps(obs),date))
conn.commit()
def delete_rows():
c = conn.cursor()
c.execute("DELETE FROM ser_data")
conn.commit()
def check_date(d):
data_date = parser.parse(d);
data_utc = data_date.astimezone(pytz.utc);
check = today < data_utc
return check
def update_series(id):
c = conn.cursor()
obs = get_obs(id)
ser = get_series(id)
date = ser["date"]
ser = ser["series"]
q = (id,ser,obs,date);
c.execute("UPDATE ser_data SET series = ?, observations = ?, date = ?, updated = ? WHERE ser_id = ? ", (json.dumps(ser),json.dumps(obs),date,stamp, str(id[0])))
conn.commit();
print("seriess updated")
check_series()
|
3,144 | b29c11b11fd357c7c4f774c3c6a857297ff0d021 | """
losettings.py
Contains a class for profiles and methods to save and load them from xml files.
Author: Stonepaw
Version 2.0
Rewrote pretty much everything. Much more modular and requires no maintence when a new attribute is added.
No longer fully supports profiles from 1.6 and earlier.
Copyright 2010-2012 Stonepaw
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import clr
import System
clr.AddReference("System.Xml")
from System import Convert
from System.IO import File, StreamReader, StreamWriter
from System.Xml import XmlDocument, XmlWriter, XmlWriterSettings
from System.Windows.Forms import MessageBox, MessageBoxIcon, MessageBoxButtons
from locommon import ExcludeRule, ExcludeGroup, Mode, PROFILEFILE, VERSION
class Profile:
"""This class contains all the variables for a profile.
Use save_to_xml to save the profile to a xml file.
Use load_from_xml to load the profile from the file.
Anytime a new variable is added it will automatically be save and loaded.
"""
def __init__(self):
self.Version = 0
self.FolderTemplate = ""
self.BaseFolder = ""
self.FileTemplate = ""
self.Name = ""
self.EmptyFolder = ""
self.EmptyData = {}
self.Postfix = {}
self.Prefix = {}
self.Seperator = {}
self.IllegalCharacters = {"?" : "", "/" : "", "\\" : "", "*" : "", ":" : " - ", "<" : "[", ">" : "]", "|" : "!", "\"" : "'"}
self.Months = {1 : "January", 2 : "February", 3 : "March", 4 : "April", 5 : "May", 6 : "June", 7 : "July", 8 :"August", 9 : "September", 10 : "October",
11 : "November", 12 : "December", 13 : "Spring", 14 : "Summer", 15 : "Fall", 16 : "Winter"}
self.TextBox = {}
self.UseFolder = True
self.UseFileName = True
self.ExcludeFolders = []
self.DontAskWhenMultiOne = True
self.ExcludeRules = []
self.ExcludeOperator = "Any"
self.RemoveEmptyFolder = True
self.ExcludedEmptyFolder = []
self.MoveFileless = False
self.FilelessFormat = ".jpg"
self.ExcludeMode = "Do not"
self.FailEmptyValues = False
self.MoveFailed = False
self.FailedFolder = ""
self.FailedFields = []
self.Mode = Mode.Move
self.CopyMode = True
self.AutoSpaceFields = True
self.ReplaceMultipleSpaces = True
self.CopyReadPercentage = True
def duplicate(self):
"""Returns a duplicate of the profile instance."""
duplicate = Profile()
for i in self.__dict__:
if type(getattr(self, i)) is dict:
setattr(duplicate, i, getattr(self, i).copy())
else:
setattr(duplicate, i, getattr(self, i))
return duplicate
def update(self):
if self.Version < 2.0:
if self.Mode is "Test":
self.Mode = "Simulate"
replacements = {"Language" : "LanguageISO", "Format" : "ShadowFormat", "Count" : "ShadowCount", "Number" : "ShadowNumber", "Series" : "ShadowSeries",
"Title" : "ShadowTitle", "Volume" : "ShadowVolume", "Year" : "ShadowYear"}
for key in self.EmptyData.keys():
if key in replacements:
self.EmptyData[replacements[key]] = self.EmptyData[key]
del(self.EmptyData[key])
insert_control_replacements = {"SeriesComplete" : "Series Complete", "Read" : "Read Percentage", "FirstLetter" : "First Letter", "AgeRating" : "Age Rating",
"AlternateSeriesMulti" : "Alternate Series Multi", "MonthNumber" : "Month Number", "AlternateNumber" : "Alternate Number",
"StartMonth" : "Start Month", "AlternateSeries" : "Alternate Series", "ScanInformation" : "Scan Information", "StartYear" : "Start Year",
"AlternateCount" : "Alternate Count"}
for key in insert_control_replacements :
if key in self.TextBox.keys():
self.TextBox[insert_control_replacements[key]] = self.TextBox[key]
del(self.TextBox[key])
if key in self.Prefix.keys():
self.Prefix[insert_control_replacements[key]] = self.Prefix[key]
del(self.Prefix[key])
if key in self.Postfix.keys():
self.Postfix[insert_control_replacements[key]] = self.Postfix[key]
del(self.Postfix[key])
if key in self.Seperator.keys():
self.Seperator[insert_control_replacements[key]] = self.Seperator[key]
del(self.Seperator[key])
self.Version = VERSION
def save_to_xml(self, xwriter):
"""
To save this profile intance to xml file using a XmlWriter.
xwriter->should be a XmlWriter instance.
"""
xwriter.WriteStartElement("Profile")
xwriter.WriteAttributeString("Name", self.Name)
xwriter.WriteStartAttribute("Version")
xwriter.WriteValue(self.Version)
xwriter.WriteEndAttribute()
for var_name in self.__dict__:
var_type = type(getattr(self, var_name))
if var_type is str and var_name != "Name":
self.write_string_to_xml(var_name, xwriter)
elif var_type is bool:
self.write_bool_to_xml(var_name, xwriter)
elif var_type is dict:
self.write_dict_to_xml(var_name, xwriter)
elif var_type is list and var_name != "ExcludeRules":
self.write_list_to_xml(var_name, xwriter)
xwriter.WriteStartElement("ExcludeRules")
xwriter.WriteAttributeString("Operator", self.ExcludeOperator)
xwriter.WriteAttributeString("ExcludeMode", self.ExcludeMode)
for rule in self.ExcludeRules:
if rule:
rule.save_xml(xwriter)
xwriter.WriteEndElement()
xwriter.WriteEndElement()
def write_dict_to_xml(self, attribute_name, xmlwriter, write_empty=False):
"""Writes a dictionary to an xml file in the form of
<attribute_name>
<Item Name="attribute_name key" Value="attribute_name value" />
<Item Name="attribute_name key" Value="attribute_name value" />
etc.
</attribute_name>
attribute_name->The name of the dictonary attribute to write.
xmlwriter->The xml writer to write with.
write_empty->A bool of whether to write empty values to the xml file. Default is don't write them.
"""
if attribute_name in ("IllegalCharacters", "Months"):
write_empty = True
dictionary = getattr(self, attribute_name)
xmlwriter.WriteStartElement(attribute_name)
for key in dictionary:
if dictionary[key] or write_empty:
xmlwriter.WriteStartElement("Item")
xmlwriter.WriteStartAttribute("Name")
xmlwriter.WriteValue(key)
xmlwriter.WriteEndAttribute()
xmlwriter.WriteStartAttribute("Value")
xmlwriter.WriteValue(dictionary[key])
xmlwriter.WriteEndAttribute()
xmlwriter.WriteEndElement()
xmlwriter.WriteEndElement()
def write_list_to_xml(self, attribute_name, xmlwriter, write_empty=False):
"""Writes a list to an xml file in the form of
<attribute_name>
<Item>value</Item>
<Item>value</Item>
etc.
</attribute_name>
attribute_name->The name of the list attribute to write.
xmlwriter->The xml writer to write with.
write_empty->A bool of whether to write empty values to the xml file. Default is don't write them.
"""
attribute_list = getattr(self, attribute_name)
xmlwriter.WriteStartElement(attribute_name)
for item in attribute_list:
if item or write_empty:
xmlwriter.WriteElementString("Item", item)
xmlwriter.WriteEndElement()
def write_string_to_xml(self, attribute_name, xmlwriter, write_empty=True):
"""Writes a string to an xml file in the form of
<attribute_name>string</attribute_name>
attribute_name->The name of the string attribute to write.
xmlwriter->The xml writer to write with.
write_empty->A bool of whether to write empty strings to the xml file. Default is write empty strings.
"""
string = getattr(self, attribute_name)
if string or write_empty:
xmlwriter.WriteElementString(attribute_name, string)
def write_bool_to_xml(self, attribute_name, xmlwriter):
"""Writes a boolean to an xml file in the form of
<attribute_name>true/false</attribute_name>
attribute_name->The name of the attribute to write.
xmlwriter->The xml writer to write with.
"""
xmlwriter.WriteStartElement(attribute_name)
xmlwriter.WriteValue(getattr(self, attribute_name))
xmlwriter.WriteEndElement()
def load_from_xml(self, Xml):
"""Loads the profile instance from the Xml.
Xml->should be a XmlNode/XmlDocument containing a profile node.
"""
try:
#Text vars
self.Name = Xml.Attributes["Name"].Value
if "Version" in Xml.Attributes:
self.Version = float(Xml.Attributes["Version"].Value)
for var_name in self.__dict__:
if type(getattr(self,var_name)) is str:
self.load_text_from_xml(Xml, var_name)
elif type(getattr(self,var_name)) is bool:
self.load_bool_from_xml(Xml, var_name)
elif type(getattr(self, var_name)) is list and var_name != "ExcludeRules":
self.load_list_from_xml(Xml, var_name)
elif type(getattr(self, var_name)) is dict:
self.load_dict_from_xml(Xml, var_name)
#Exclude Rules
exclude_rules_node = Xml.SelectSingleNode("ExcludeRules")
if exclude_rules_node is not None:
self.ExcludeOperator = exclude_rules_node.Attributes["Operator"].Value
self.ExcludeMode = exclude_rules_node.Attributes["ExcludeMode"].Value
for node in exclude_rules_node.ChildNodes:
if node.Name == "ExcludeRule":
try:
rule = ExcludeRule(node.Attributes["Field"].Value, node.Attributes["Operator"].Value, node.Attributes["Value"].Value)
except AttributeError:
rule = ExcludeRule(node.Attributes["Field"].Value, node.Attributes["Operator"].Value, node.Attributes["Text"].Value)
self.ExcludeRules.append(rule)
elif node.Name == "ExcludeGroup":
group = ExcludeGroup(node.Attributes["Operator"].Value)
group.load_from_xml(node)
self.ExcludeRules.append(group)
self.update()
except Exception, ex:
print ex
return False
def load_text_from_xml(self, xmldoc, name):
"""Loads a string with a specified node name from an XmlDocument and saves it to the attribute. The string should be saved as:
<name>string</name>
xmldoc->The XmlDocment to load from.
name->The attribute to save to and the root node name to load the string from."""
if xmldoc.SelectSingleNode(name) is not None:
setattr(self, name, xmldoc.SelectSingleNode(name).InnerText)
def load_bool_from_xml(self, xmldoc, name):
"""Loads a bool with a specified node name from an XmlDocument and saves it to the attribute. The bool should be saved as:
<name>true/false</name>
xmldoc->The XmlDocment to load from.
name->The attribute to save to and the root node name to load the bool from."""
if xmldoc.SelectSingleNode(name) is not None:
setattr(self, name, Convert.ToBoolean(xmldoc.SelectSingleNode(name).InnerText))
def load_list_from_xml(self, xmldoc, name):
"""Loads a list with a specified node name from an XmlDocument and saves it to the attribute. The list should be saved as:
<name>
<Item>list value</Item>
</name>
xmldoc->The XmlDocment to load from.
name->The attribute to save to and the root node name to load the list from."""
nodes = xmldoc.SelectNodes(name + "/Item")
if nodes.Count > 0:
setattr(self, name, [item.InnerText for item in nodes])
def load_dict_from_xml(self, xmldoc, name):
"""Loads a dict with a specified node name from an XmlDocument and saves it to the attribute. The dict should be saved as:
<name>
<Item Name="key" Value="value" />
</name>
xmldoc->The XmlDocment to load from.
name->The attribute to save to and the root node name to load the dict from."""
nodes = xmldoc.SelectNodes(name + "/Item")
if nodes.Count > 0:
dictionary = getattr(self, name)
for node in nodes:
if node.Attributes.Count == 2:
if name == "Months":
dictionary[int(node.Attributes["Name"].Value)] = node.Attributes["Value"].Value
else:
dictionary[node.Attributes["Name"].Value] = node.Attributes["Value"].Value
def load_profiles(file_path):
"""
Load profiles from a xml file. If no profiles are found it creates a blank profile.
file_path->The absolute path to the profile file
Returns a dict of the found profiles and a list of the lastused profile(s)
"""
profiles, lastused = load_profiles_from_file(file_path)
if len(profiles) == 0:
#Just in case
profiles["Default"] = Profile()
profiles["Default"].Name = "Default"
#Some default templates
profiles["Default"].FileTemplate = "{<series>}{ Vol.<volume>}{ #<number2>}{ (of <count2>)}{ ({<month>, }<year>)}"
profiles["Default"].FolderTemplate = "{<publisher>}\{<imprint>}\{<series>}{ (<startyear>{ <format>})}"
if not lastused:
lastused = [profiles.keys()[0]]
return profiles, lastused
def load_profiles_from_file(file_path):
"""
Loads profiles from a file.
file_path->The absolute path the xml file
Returns a dict of the profiles
"""
profiles = {}
lastused = ""
if File.Exists(file_path):
try:
with StreamReader(file_path) as xmlfile:
xmldoc = XmlDocument()
xmldoc.Load(xmlfile)
if xmldoc.DocumentElement.Name == "Profiles":
nodes = xmldoc.SelectNodes("Profiles/Profile")
#Individual exported profiles are saved with the document element as Profile
elif xmldoc.DocumentElement.Name == "Profile":
nodes = xmldoc.SelectNodes("Profile")
#Changed from 1.7 to 2.0 to use Profiles/Profile instead of Settings/Setting
elif xmldoc.DocumentElement.Name == "Settings":
nodes = xmldoc.SelectNodes("Settings/Setting")
elif xmldoc.DocumentElement.Name == "Setting":
nodes = xmldoc.SelectNodes("Setting")
#No valid root elements
else:
MessageBox.Show(file_path + " is not a valid Library Organizer profile file.", "Not a valid profile file", MessageBoxButtons.OK, MessageBoxIcon.Error)
return profiles, lastused
if nodes.Count > 0:
for node in nodes:
profile = Profile()
profile.Name = node.Attributes["Name"].Value
result = profile.load_from_xml(node)
#Error loading the profile
if result == False:
MessageBox.Show("An error occured loading the profile " + profile.Name + ". That profile has been skipped.")
else:
profiles[profile.Name] = profile
#Load the last used profile
rootnode = xmldoc.DocumentElement
if rootnode.HasAttribute("LastUsed"):
lastused = rootnode.Attributes["LastUsed"].Value.split(",")
except Exception, ex:
MessageBox.Show("Something seems to have gone wrong loading the xml file.\n\nThe error was:\n" + str(ex), "Error loading file", MessageBoxButtons.OK, MessageBoxIcon.Error)
return profiles, lastused
def import_profiles(file_path):
"""
Load profiles from a xml file. If no profiles are found it returns an empty dict.
file_path->The absolute path to the profile file
Returns a dict of the found profiles.
"""
profiles, lastused = load_profiles_from_file(file_path)
return profiles
def save_profiles(file_path, profiles, lastused=""):
"""
Saves the profiles to an xml file.
settings_file: The complete file path of the file to save to.
profiles: a dict of profile objects.
lastused: a string containing the last used profile.
"""
try:
xSettings = XmlWriterSettings()
xSettings.Indent = True
with XmlWriter.Create(file_path, xSettings) as writer:
writer.WriteStartElement("Profiles")
if lastused:
writer.WriteAttributeString("LastUsed", ",".join(lastused))
for profile in profiles:
profiles[profile].save_to_xml(writer)
writer.WriteEndElement()
except Exception, ex:
MessageBox.Show("An error occured writing the settings file. The error was:\n\n" + ex.message, "Error saving settings file", MessageBoxButtons.OK, MessageBoxIcon.Error)
def save_profile(file_path, profile):
"""
Saves a single profile to an xml file.
settings_file: The complete file path of the file to save to.
profile: a Profile object.
"""
try:
xSettings = XmlWriterSettings()
xSettings.Indent = True
with XmlWriter.Create(file_path, xSettings) as writer:
profile.save_to_xml(writer)
except Exception, ex:
MessageBox.Show("An error occured writing the settings file. The error was:\n\n" + ex.message, "Error saving settings file", MessageBoxButtons.OK, MessageBoxIcon.Error)
def save_last_used(file_path, lastused):
"Saves the lastused profiles to the xml file."""
x = XmlDocument()
x.Load(file_path)
x.DocumentElement.SetAttribute("LastUsed", ",".join(lastused))
x.Save(file_path) |
3,145 | 83b65b951b06b117c2e85ba348e9b591865c1c2e | #--------------------------------------------------------------------------------
# G e n e r a l I n f o r m a t i o n
#--------------------------------------------------------------------------------
# Name: Exercise 2.6 - Planetary Orbits
#
# Usage: Calculate information for planetary orbits
#
# Description: Given basic information about an orbiting body, calculate the
# planetary orbit information for said orbiting body and a second object that
# is orbiting around the first body.
#
# Inputs: Distance to the Sun (length) and velocity at perihelion.
#
# Outputs: The second orbiting body's distance to the sun (L2) and velocity (v2)
# of the second body, the Orbital period (T) and the orbital eccentricity (e)
#
# Auxiliary Files:
#
# Special Instructions:
#
#--------------------------------------------------------------------------------
# C o d e H i s t o r y
#--------------------------------------------------------------------------------
# Version: 1.0
#
# Author(s): Kole Frazier
#
#--------------------------------------------------------------------------------
#Get user input for Object 1
L1 = float(input('Enter distance to the sun: '))
v1 = float(input('Enter velocity at perihelion: '))
#Constants and necessary values
G = 6.6738*10**-11 #Gravitational Constant
M = 1.9891*10**30 #Mass of the Sun
Pi = 3.141 #Pi
#For Object 2, calculate its velocity (V2) then distance to the Sun (L2)
v2 = -v1**2 + ((2*G*M)/L1) #Pretty sure this isn't right, but I cannot find anything to correct this.
L2 = (L1*v1)/v2
print('v2: {0}\tL2: {1}'.format(v2, L2))
#Calculate T and e using a and b
a = (0.5)*(L1+L2) #Semi-major axis
b = (L1*L2)**(1.0/2.0) #Semi-minor axis
T = (2*Pi*a*b)/(L1*v1) #Orbital period
e = (L2 - L1)/(L2 + L1) #Orbital eccentricity
print('T: {0}\te:{1}'.format(T,e))
|
3,146 | a68d682ba6d441b9d7fb69ec1ee318a0ef65ed40 | """
Read a real number. If it is positive print it's square root, if it's not print the square of it.
"""
import math
print('Insert a number')
num1 = float(input())
if num1 > 0:
print(f'The square root of {num1} is {math.sqrt(num1)}')
else:
print(f'The square of {num1} is {num1**2}')
|
3,147 | 6c65d63ef07b6cdb2029e6a6e99f6ee35b448c4b | individual = html.Div([
html.Div([ # input container
html.Div([
dcc.RadioItems(id='view-radio',
options=[
{'label': i, 'value': i} for i in ['Players',
'Teams']
],
value='Players'
)
]),
html.Div([
dcc.Dropdown(id='drop-input')
]),
], className='two columns'),
html.Div([ # visuals container
html.Div([ # pic column container
html.H6(id='name-header')
dcc.Image(), # team or player image
], className='two columns'),
html.Div([ # data container
html.Div([ # graph
dcc.Graph()
]),
html.Div([ # table
dash_table.datatable()
])
])
], className='eight columns')
])
@app.callback(
Output('drop-input', 'options'),
[Input('view-radio', 'value')]
)
def update_dropdown(selection):
if selection == 'Players':
return [{'label': i, 'value':i} for i in active_players]
if selection == 'Teams':
return [{'label': i, 'value':i} for i in active_teams] |
3,148 | 174c4c1ed7f2197e012644999cf23f5e82f4b7c3 | def has23(nums):
this = nums[0] == 2 or nums[0] == 3
that = nums[1] == 2 or nums[1] == 3
return this or that
|
3,149 | dd4dc1c4a0dc47711d1d0512ef3f6b7908735766 |
import numpy as np
import tensorflow as tf
class LocNet:
def __init__(self, scope, buttom_layer):
self.scope = scope
with tf.variable_scope(scope) as scope:
self.build_graph(buttom_layer)
self.gt_loc = tf.placeholder(dtype=tf.float32, shape=(None,4),name='gt_loc')
def build_graph(self, buttom_layer):
self.variables = []
self.kernel_weights = []
pool = tf.nn.max_pool(buttom_layer,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME',
name='pool')
drop = tf.nn.dropout(pool, 0.3)
with tf.name_scope('fc1') as scope:
shape = int(np.prod(drop.get_shape()[1:]))
fc1w = tf.Variable(tf.truncated_normal([shape, 3000],
dtype=tf.float32,
stddev=1e-1), name='weights')
fc1b = tf.Variable(tf.constant(1.0, shape=[3000], dtype=tf.float32),
trainable=True, name='biases')
pool_flat = tf.reshape(drop, [-1, shape])
fc1l = tf.nn.bias_add(tf.matmul(pool_flat, fc1w), fc1b)
fc1 = tf.nn.relu(fc1l)
self.kernel_weights += [fc1w]
self.variables += [fc1w, fc1b]
with tf.name_scope('fc2') as scope:
fc2w = tf.Variable(tf.truncated_normal([3000, 4],
dtype=tf.float32,
stddev=1e-1), name='weights')
fc2b = tf.Variable(tf.constant(1.0, shape=[4], dtype=tf.float32),
trainable=True, name='biases')
self.logit = tf.nn.bias_add(tf.matmul(fc1, fc2w), fc2b)
self.kernel_weights += [fc2w]
self.variables += [fc2w, fc2b]
def loss(self):
with tf.name_scope(self.scope) as scope:
beta = tf.constant(0.05, name='beta')
loss_rms = tf.reduce_max(tf.squared_difference(self.gt_loc, self.logit))
loss_wd = [tf.reduce_mean(tf.square(w)) for w in self.kernel_weights]
loss_wd = beta * tf.add_n(loss_wd)
total_loss = loss_rms + loss_wd
return total_loss
|
3,150 | 8c42e06fd92f0110b3ba8c4e7cc0ac45b9e44378 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__copyright__ = """
This code is licensed under the MIT license.
Copyright University Innsbruck, Institute for General, Inorganic, and Theoretical Chemistry, Podewitz Group
See LICENSE for details
"""
from scipy.signal import argrelextrema
from typing import List, Tuple
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import os
class ButtonActions(object):
def __init__(self):
self.axs = []
self.integrals = []
self.scs = []
self.annots = []
def plot_rdf(self, display):
matplotlib.rcParams.update({'font.size': 10})
self.fig = plt.figure(figsize=(display.width, display.height))
self.display = display
rows, cols = self._get_rows_and_cols(display)
count = 0 # only count existing -> not enumerate
for existing, (symbol, name) in zip(display.existing_elements, display.rdf_names.items()):
if existing:
count += 1
if os.path.exists('rdf-' + str(name) + '.dat'):
arr = np.loadtxt("rdf-" + str(name) + ".dat")
else:
print("ERROR: RDF analysis for " + str(name) + " was not performed in this directory!")
ax = self.fig.add_subplot(rows, cols, count)
txt = ax.text(0.1, 0.5, '', transform=ax.transAxes)
txt.set_text("ERROR: RDF analysis for " + str(name) + "\nwas not performed in this directory!")
plt.plot()
continue
x = arr[:, 0]
y = arr[:, 1]
ax = self.fig.add_subplot(rows, cols, count)
self.axs.append(ax)
# determine integrals
sc_x, sc_y, integrals = self._find_local_minima_and_maxima(x, y, name)
sc = plt.scatter(sc_x, sc_y, s=10, c=display.colors['mark'])
self.integrals.append(integrals)
self.scs.append(sc)
annot = ax.annotate("", xy=(0, 0), xytext=(20, 20), textcoords="offset points",
bbox=dict(boxstyle="round", fc="w"), arrowprops=dict(arrowstyle="->"))
annot.set_visible(False)
self.annots.append(annot)
# title and label specifications
plt.xlabel("Distance of " + str(name) + ' to oxygen atoms in water / \u00c5')
plt.ylabel('RDF')
plt.xticks(np.arange(0, np.max(x) + 0.5, step=0.5))
ax.set_xlim([0, np.max(x)])
ax.axhline(y=1, ls='--', color=display.colors['mark'])
plt.plot(x, y, linestyle="-", color='#80b1d3')
plt.ion() # avoids 'The event loop is already running' error message
self.fig.canvas.mpl_connect('motion_notify_event', lambda event: self._hover(event))
plt.show()
def _get_rows_and_cols(self, display) -> Tuple[int, int]:
true_count = sum(display.existing_elements)
if true_count % 2 == 0:
rows = int(round(true_count / 2))
cols = int(round(true_count / 2))
if true_count == 2:
rows = 2
else:
rows = int(round(true_count / 2 + 0.5))
cols = int(round(true_count / 2 + 0.5))
if true_count == 5:
cols = 2
return rows, cols
def _find_local_minima_and_maxima(self, distances: np.array, values: np.array, name: str) -> Tuple[List[float],
List[float],
List[float]]:
n_local = 5
maxima = argrelextrema(values, np.greater, order=n_local)[0]
minima = argrelextrema(values, np.less, order=n_local)[0]
extrema = np.asarray(list(maxima) + list(minima))
ext_distances = [distances[x] for x in extrema]
ext_values = [values[x] for x in extrema]
integrals = self._get_integrals(extrema, name)
return ext_distances, ext_values, integrals
def _get_integrals(self, indices: np.array, name: str) -> List[float]:
arr = np.loadtxt("int-rdf-" + str(name) + ".dat")
return [arr[:, 1][i] for i in indices]
def _update_annot(self, ind, subplot_number: int):
index = ind['ind'][0]
integral = self.integrals[subplot_number][index]
text = "{0:.2f} waters".format(integral)
annot = self.annots[subplot_number]
annot.xy = self.scs[subplot_number].get_offsets()[index]
annot.set_text(text)
annot.get_bbox_patch().set_facecolor(self.display.colors['mark'])
annot.get_bbox_patch().set_alpha(0.4)
def _hover(self, event):
for i, a in enumerate(self.axs):
if event.inaxes == a:
contains, ind = self.scs[i].contains(event)
annot = self.annots[i]
visible = annot.get_visible()
if contains:
self._update_annot(ind, i)
annot.set_visible(True)
self.fig.canvas.draw_idle()
else:
if visible:
annot.set_visible(False)
self.fig.canvas.draw_idle()
|
3,151 | 850310b6c431981a246832e8a6f5417a88587b99 | import torch
class Activation(torch.nn.Module):
def __init__(self):
super().__init__()
self.swish = lambda x: x * torch.sigmoid(x)
self.linear = lambda x: x
self.sigmoid = lambda x: torch.sigmoid(x)
self.neg = lambda x: -x
self.sine = lambda x: torch.sin(x)
self.params = torch.nn.Parameter(torch.zeros(10))
def forward(self, x):
params = torch.sigmoid(self.params)
linear_x = self.linear(x) * params[0]
swish_x = self.swish(x) * params[1]
sigmoid_x = self.sigmoid(x) * params[2]
neg_x = self.neg(x) * params[3]
sine_x = self.sine(x) * params[4]
x = swish_x + linear_x + sigmoid_x + neg_x + sine_x
return x
class ResizableConv2d(torch.nn.Module):
def __init__(self, state_size, inchan, outchan):
super().__init__()
self.conv = torch.nn.Conv2d(inchan, outchan, 3)
self.conv2 = torch.nn.Conv2d(outchan, outchan, 3)
self.residual_conv = torch.nn.Conv2d(inchan, outchan, 3)
self.resize = lambda x: torch.nn.functional.interpolate(x, size=state_size, mode='bicubic', align_corners=True)
self.activation = Activation()
def forward(self, x):
y = self.conv(x)
y = self.conv2(y)
y = self.resize(y)
y = y + self.resize(self.residual_conv(x))
y = self.activation(y)
return y
class ActorNet(torch.nn.Module):
def __init__(self, state_size, action_size):
super().__init__()
self.conv = ResizableConv2d(state_size, 6, 3)
self.conv_backwards = ResizableConv2d(state_size, 3, 6)
self.conv2 = ResizableConv2d(state_size, 3, 3)
self.conv3 = ResizableConv2d(state_size, 3, 3)
self.conv4 = ResizableConv2d(state_size, 3, 3)
self.conv_resize = ResizableConv2d((8, 8), 3, 3)
self.linears = torch.nn.ModuleList([])
for i in action_size:
self.linears.append(torch.nn.Linear(8*8*3, i))
self.optim = torch.optim.AdamW(self.parameters(), lr=1e-4)
def forward(self, x, goal, time):
x = torch.cat([x, goal], dim=1) + time
x = self.conv(x)
x_ = self.conv_backwards(x)
x = self.conv(x_) + goal
x = x + torch.randn_like(x)
x = self.conv2(x) + time
x = x + torch.randn_like(x)
x = self.conv3(x) + goal
x = x + torch.randn_like(x)
x = self.conv4(x) + goal
x = self.conv_resize(x)
y = x
y = torch.flatten(y, start_dim=1)
y_list = []
for i in self.linears:
iy = i(y)
iy = torch.sigmoid(iy)
y_list.append(iy)
return y_list
def optimize(self, loss):
self.optim.zero_grad()
loss.backward()
self.optim.step()
print("Actor Loss:", loss.item())
class GoalkeeperNet(torch.nn.Module):
def __init__(self, state_size):
super().__init__()
self.conv = ResizableConv2d(state_size, 3, 3)
self.conv2 = ResizableConv2d(state_size, 3, 3)
self.conv3 = ResizableConv2d(state_size, 3, 3)
self.conv4 = ResizableConv2d(state_size, 3, 3)
self.flatten = torch.nn.Flatten()
self.optim = torch.optim.AdamW(self.parameters(), lr=1e-4)
def forward(self, state):
y = self.conv(state)
goal = self.conv2(y)
goal = self.conv3(goal)
return goal
def optimize(self, loss):
self.optim.zero_grad()
loss.backward()
self.optim.step()
print("Goalkeeper Loss:", loss.item()) |
3,152 | 6f9f204cbd6817d5e40f57e71614ad03b64d9003 | # Generated by Django 3.2.6 on 2021-08-15 05:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('website', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='tasks',
name='cleanlinessLevel',
field=models.IntegerField(),
),
]
|
3,153 | b888745b3ce815f7c9eb18f5e76bacfadfbff3f5 | from libs.storage.blocks.iterators.base import BaseBlockIterator
from libs.storage.const import SEPARATOR
class ContentsBlockIterator(BaseBlockIterator):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.contents = self.block_content.split(SEPARATOR)
self.titles = self.contents[0].split('\n')[1:]
def get(self, index):
return self.contents[index + 1]
|
3,154 | 913e1f5a0af436ef081ab567c44b4149299d0ec6 | # Generated by Django 2.2.4 on 2019-08-19 19:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('application', '0003_auto_20190818_1623'),
]
operations = [
migrations.AlterField(
model_name='user',
name='visited',
field=models.ManyToManyField(related_name='visitors', to='application.EscapeRoom'),
),
]
|
3,155 | ac459bff6d4281ce07b70dbccde3243412ddb414 | # processing functions for diagrams
import torch
import numpy as np
def remove_filler(dgm, val=np.inf):
"""
remove filler rows from diagram
"""
inds = (dgm[:,0] != val)
return dgm[inds,:]
def remove_zero_bars(dgm):
"""
remove zero bars from diagram
"""
inds = dgm[:,0] != dgm[:,1]
return dgm[inds,:]
def remove_infinite_bars(dgm, issub):
"""
remove infinite bars from diagram
"""
if issub:
inds = dgm[:, 1] != np.inf
return dgm[inds,:]
else:
inds = dgm[:, 1] != -np.inf
return dgm[inds,:]
|
3,156 | 284e4f79748c17d44518f2ce424db5b1697373dc | __version__ = '0.90.03'
|
3,157 | 5ddde3aa6eaa30b70743272a532874663067eed6 | #!/usr/bin/env python3
import sys
import os
import math
import random
if hasattr(sys, '__interactivehook__'):
del sys.__interactivehook__
print('Python3 startup file loaded from ~/.config/pystartup.py')
|
3,158 | 98dd7446045f09e6d709f8e5e63b0a94341a796e | # -*- coding: utf-8 -*-
import time
import re
from config import allowed_users, master_users, chat_groups
from bs4 import BeautifulSoup
import requests
import urllib.request, urllib.error, urllib.parse
import http.cookiejar
import json
import os
import sys
#from random import randint, choice
from random import uniform, choice
from string import ascii_letters
from image import check_image_request, insert_images, insert_image_request
from telepot.exception import TelegramError
import json
def _convert_to_idn(url):
"""Convert a URL to IDN notation"""
# this function should only be called with a unicode string
# strategy: if the host cannot be encoded in ascii, then
# it'll be necessary to encode it in idn form
parts = list(urllib.parse.urlsplit(url))
try:
parts[1].encode('ascii')
except UnicodeEncodeError:
# the url needs to be converted to idn notation
host = parts[1].rsplit(':', 1)
newhost = []
port = ''
if len(host) == 2:
port = host.pop()
for h in host[0].split('.'):
newhost.append(h.encode('idna').decode('utf-8'))
parts[1] = '.'.join(newhost)
if port:
parts[1] += ':' + port
return urllib.parse.urlunsplit(parts)
else:
return url
def fetch_images_from_db(chat_id,keyword_id,keyword_n,db,shared_dict):
search = False
if str(chat_id) + str(keyword_id) +'db' in shared_dict:
print("%s for group %s already in progress, sleeping for a while" % (keyword_id,chat_id))
time.sleep(uniform(1,5))
else:
shared_dict[str(chat_id) + str(keyword_id) + 'db'] = 1
search = True
query = "SELECT id,url,type FROM images WHERE chat_id = %s AND keyword_id = %s AND keyword_n = %s AND requested = FALSE LIMIT 10"
#print 'chat_id - {}, keyword_id - {}, keyword_n - {}'.format(chat_id,keyword_id,keyword_n)
data = db.fetch_data(query,(chat_id,keyword_id,keyword_n,))
if data is not None:
if len(data) < 3:
query = "DELETE FROM images WHERE chat_id = %s AND keyword_id = %s AND keyword_n = %s"
db.execute(query,(chat_id,keyword_id,keyword_n,))
return None
for i in data:
query = "UPDATE images SET requested = TRUE WHERE chat_id = %s AND keyword_n = %s AND id = %s"
db.execute(query,(chat_id,keyword_n,i[0]))
if search is True:
del shared_dict[str(chat_id) + str(keyword_id) + 'db']
return data
return data
def fetch_images_from_google(chat_id,keyword,keyword_id,keyword_n,header,db,shared_dict,bot):
if str(chat_id) + keyword in shared_dict:
bot.sendMessage(chat_id,'po etomu slovu poka idet poisk - ' + keyword)
return 1
shared_dict[str(chat_id) + keyword] = 1
query = keyword.split()
#query = str('+'.join(query).encode('utf-8'))
query = '+'.join(query)
print('query - ' + query)
url="https://www.google.co.in/search?q="+urllib.parse.quote(query)+"&source=lnms&tbm=isch"
soup = BeautifulSoup(urllib.request.urlopen(urllib.request.Request(url,headers=header)),'html.parser')
ActualImages=[]
for a in soup.find_all("div",{"class":"rg_meta"}):
link , Type =json.loads(a.text)["ou"] ,json.loads(a.text)["ity"]
ActualImages.append((link,Type))
total_images = len(ActualImages)
if total_images == 0:
del shared_dict[str(chat_id) + keyword]
return None
print("there are total" , total_images,"images")
nuran = {}
i = 0
for a, (img , Type) in enumerate( ActualImages):
if Type == 'png' or Type == 'jpg':
nuran[i] = {}
nuran[i]['url'] = img
nuran[i]['type'] = Type
i += 1
if len(nuran) < 3:
del shared_dict[str(chat_id) + keyword]
return None
del shared_dict[str(chat_id) + keyword]
#print shared_dict
insert_images(chat_id,keyword_id,keyword_n,nuran,db)
return fetch_images_from_db(chat_id,keyword_id,keyword_n,db,shared_dict)
def fetch_images(chat_id,keyword,keyword_n,header,db,shared_dict,bot):
keyword_id = check_image_request(chat_id,keyword,db)
if keyword_id is not None:
images = fetch_images_from_db(chat_id,keyword_id[0],keyword_n,db,shared_dict)
return images if images is not None else fetch_images_from_google(chat_id,keyword,keyword_id,keyword_n,header,db,shared_dict,bot)
keyword_id = insert_image_request(chat_id,keyword,db)
return fetch_images_from_google(chat_id,keyword,keyword_id,keyword_n,header,db,shared_dict,bot)
def get_image(chat_id,keyword,shared_dict,db,bot,msg=True):
print('keyword - ' + keyword)
header={'User-Agent':"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36"}
shared_dict[str(chat_id) + 'n'] += 1
keyword_n = len(keyword) % 10
nuran = fetch_images(chat_id,keyword,keyword_n,header,db,shared_dict,bot)
if nuran == 1:
shared_dict[str(chat_id) + 'n'] -= 1
return
if nuran is None and msg is True:
shared_dict[str(chat_id) + 'n'] -= 1
bot.sendMessage(chat_id,'ni4ego ne naydeno(')
return
DIR = '/tmp'
index = 0
num = 0
if msg is True:
bot.sendMessage(chat_id,'lovi fotki')
while 1:
try:
print('trying to open %s' % nuran[index][1])
url = _convert_to_idn(urllib.parse.unquote(nuran[index][1]))
print('unquotted url %s' % url)
url = urllib.parse.quote(url,safe=':/')
req = urllib.request.Request(url, headers=header)
raw_img = urllib.request.urlopen(req,timeout=5).read()
type = 'jpg' if nuran[index][2] == True else 'png'
image_name = "".join(choice(ascii_letters) for i in range(20))
f = open(os.path.join(DIR , image_name + "."+type), 'wb')
f.write(raw_img)
f.close()
print('sending %s' % os.path.join(DIR , image_name + "."+type))
bot.sendPhoto(chat_id,open(os.path.join(DIR , image_name + "."+type), 'rb'))
os.unlink(os.path.join(DIR , image_name + "."+type))
except TelegramError as e:
print("Telegram error - {}".format(e))
index += 1
#if e[0] == 'Bad Request: PHOTO_INVALID_DIMENSIONS':
# print('invalid image')
continue
except IndexError:
print("index out of range, breaking")
break
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print("error - {}".format(e))
print(exc_type, fname, exc_tb.tb_lineno)
index += 1
continue
num += 1
index += 1
if num >= 3:
break
shared_dict[str(chat_id) + 'n'] -= 1
print('chat id count for %s - %s' % (chat_id,shared_dict[str(chat_id) + 'n']))
def generate_bot_array(lst):
keyboard_lst = []
tmp_lst = []
i = 0
for val in lst:
i += 1
tmp_lst.append(val)
if i % 3 == 0:
keyboard_lst.append(tmp_lst)
tmp_lst = []
keyboard_lst.append(tmp_lst)
return keyboard_lst
def handle_msg(msg,bot,shared_dict,db):
chat_id = str(msg['chat']['id'])
if msg['chat']['id'] in master_users and chat_id + 'chat' in shared_dict:
if msg['text'].upper() == 'STOP':
bot.sendMessage(chat_id,'Chat has been ended',reply_markup={'hide_keyboard': True})
del shared_dict[chat_id + 'chat']
return
if shared_dict[chat_id + 'chat'] == 0:
if msg['text'] not in chat_groups:
bot.sendMessage(chat_id,'Incorrect group',reply_markup={'hide_keyboard': True})
del shared_dict[chat_id + 'chat']
return
shared_dict[chat_id + 'chat'] = chat_groups[msg['text']]
bot.sendMessage(chat_id,"You're talking with group %s" % msg['text'],reply_markup={'hide_keyboard': True})
else:
bot.sendMessage(shared_dict[chat_id + 'chat'],msg['text'])
elif msg['chat']['id'] in master_users and 'forward' in shared_dict and msg['text'].upper() == 'STOP FORWARD':
bot.sendMessage(chat_id,"OK, forwarding has been disabled, bye")
del shared_dict['forward']
elif msg['chat']['id'] in master_users and msg['text'].upper() == 'CHAT':
bot.sendMessage(chat_id,'Which one?',reply_markup={'keyboard': generate_bot_array(chat_groups.keys())})
shared_dict[chat_id + 'chat'] = 0
elif msg['chat']['id'] in master_users and msg['text'].upper() == 'FORWARD':
bot.sendMessage(chat_id,"OK, I'll forward all msgs to you")
shared_dict['forward'] = msg['chat']['id']
elif ((msg['chat']['type'] == 'private' and msg['chat']['id'] in allowed_users) or
(msg['chat']['type'] == 'supergroup' or msg['chat']['type'] == 'group') and msg['chat']['id'] in allowed_users):
if chat_id + 'n' not in shared_dict:
shared_dict[chat_id + 'n'] = 0
# check shared dict
if shared_dict[chat_id + 'n'] >= allowed_users[msg['chat']['id']]:
bot.sendMessage(msg['chat']['id'],'ya poka zanat')
return
# check msgs
if 'text' in msg and re.match(r'(FOTKI|ФОТКИ) .+',msg['text'].upper(),re.IGNORECASE):
bot.sendMessage(chat_id,'pristupayu k poisku fotok')
get_image(chat_id,re.match(r'^[^\s]+ (.+)$',msg['text'],re.IGNORECASE).group(1),shared_dict,db,bot)
elif msg['chat']['type'] == 'private':
bot.sendMessage(msg['from']['id'],'idi na huy, ya teba ne znayu')
else:
pass
|
3,159 | 0f0595793e98187c6aaf5b1f4b59affb06bb598e | from phylo_utils.data import fixed_equal_nucleotide_frequencies
from phylo_utils.substitution_models.tn93 import TN93
class K80(TN93):
_name = 'K80'
_freqs = fixed_equal_nucleotide_frequencies.copy()
def __init__(self, kappa, scale_q=True):
super(K80, self).__init__(kappa, kappa, 1, self._freqs, scale_q=scale_q)
|
3,160 | 41698e9d8349ddf3f42aa3d4fc405c69077d1aa3 | from concurrent.futures import ThreadPoolExecutor
from concurrent.futures import ProcessPoolExecutor
import ATLAS1
import ATLAS_v2
from atlas.config import dbConfig
import pandas as pd
import ContentCategories
import NgramMapping
import SentimentAnalysis_2
import TrigDriv_2
import TopicModeling
import logging
import traceback
from StringIO import StringIO
from atlas.models import Requests
def caller_file(full_data_dict):
#print(full_data_dict)
request = full_data_dict['filename_obj']
print("Entering File analysis", request)
filecontents = full_data_dict['file_data']
# print("filecontents:", filecontents)
# tag_dict = full_data_dict['tag_dict']
#db = pymongo.MongoClient().atlas
#s = request.encode('utf-8')
df = pd.read_csv(dbConfig.dict["requestUrl"], encoding='utf-8')
status_dict = {'status': None, "senti_list": None, 'td_list': None}
print("going to read file contents into df.")
file_contents_df = pd.read_csv(StringIO(filecontents), encoding='utf-8')
print("file contents read into df.")
if "pCategory" in file_contents_df.columns.values.tolist():
print("Calling Atlas1.main2()")
status = ATLAS1.main2(request, filecontents, full_data_dict['tag_dict'])
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '15% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "15% complete"
# file_dict = {
# '_id': binascii.hexlify(s),
# 'Product': request,
#
# 'metadata': {
# '_id': binascii.hexlify(s),
# 'lastUpdated': datetime.datetime.now().strftime("%A, %d. %B %Y %I:%M:%S %p"),
# 'name': request
# },
# 'analyticData': {
# 'sentimentData': [
#
# ],
# 'trigdrivData': {
#
# }
# }
# }
# result = db.data.insert_one(file_dict)
# sent_list = SentimentAPI_generic.senti_main(dbConfig.dict['uploadsUrl'] + request, ',')
# print sent_list
#
# target_string = "analyticData.sentimentData"
#
# db.data.update({"_id": binascii.hexlify(s)}, {"$set": {target_string: sent_list[0]}})
# print result.inserted_id
# Calling analyses files - sentiment, trigger/driver and topic modelling
try:
print("Now classifying content categories")
cc_list = ContentCategories.main(request)
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '35% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "35% complete"
except:
print("Error while classifying content categories")
print(traceback.print_exc())
# Calling analyses files - sentiment, trigger/driver and topic modelling
try:
print("Now tagging the dataset")
tagop_list = NgramMapping.main2(request, full_data_dict['tag_dict'])
#tagop_list = NgramMapping.main2("headphones", full_data_dict['tag_dict'])
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '50% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "50% complete"
except:
print("Error while tagging dataset with dictionary")
print(traceback.print_exc())
try:
print("Calling sentiment analyses to run on uploaded file...")
sent_list = SentimentAnalysis_2.senti_main2(request, filecontents, full_data_dict['senti_dict'])
#print sent_list
print("Sentiment data inserted into DB")
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '65% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "65% complete"
except:
print("Error while analysing sentiment")
#print(traceback.print_exc())
try:
td_list = TrigDriv_2.td_main2(request, full_data_dict['td_dict'])
#print td_list
print("TriggerDriver data inserted into DB")
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '80% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "80% complete"
except:
print("Error while analysing triggers/drivers")
#print(traceback.print_exc())
else:
print("Calling Atlas1.main3()")
# if 'supplements_10k_1' not in request:
status = ATLAS1.main3(request, filecontents, full_data_dict['tag_dict'])
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '15% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "15% complete"
# Calling analyses files - sentiment, trigger/driver and topic modelling
try:
print("Now classifying content categories")
cc_list = ContentCategories.main(request)
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '35% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "35% complete"
except:
print("Error while classifying content categories")
print(traceback.print_exc())
# Calling analyses files - sentiment, trigger/driver and topic modelling
try:
print("Now tagging the dataset with the dictionary provided")
tagop_list = NgramMapping.main3(request, full_data_dict['file_data'], full_data_dict['tag_dict'])
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '50% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "50% complete"
except:
print("Error while tagging dataset with dictionary")
print(traceback.print_exc())
try:
print("Calling sentiment analyses to run on uploaded file...")
sent_list = SentimentAnalysis_2.senti_main3(request, filecontents, full_data_dict['senti_dict'])
# print sent_list
print("Sentiment data inserted into DB")
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '65% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "65% complete"
except:
print("Error while analysing sentiment")
# print(traceback.print_exc())
try:
td_list = TrigDriv_2.td_main3(request, full_data_dict['td_dict'])
# print td_list
print("TriggerDriver data inserted into DB")
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '80% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "80% complete"
except:
print("Error while analysing triggers/drivers")
# print(traceback.print_exc())
# else:
# try:
# print("Now tagging the supplements dataset with the dictionary provided")
# tagop_list = NgramMapping.main3(request, full_data_dict['file_data'], full_data_dict['tag_dict'])
# except:
# print("Error while tagging supplement dataset with dictionary")
# print(traceback.print_exc())
print "Going to topic model"
# Performing Topic Modeling Analysis
num_topics = 8
topic_status = TopicModeling.main(request, num_topics)
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = 'Complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "Complete"
# if status == 200 and sent_list == 200 and td_list == 200 and topic_status == 200:
# # Update request csv status to completed
# df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = "Completed"
# elif status == 200 and sent_list == 200 and td_list == 200:
# df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = "Topic Modelling Failed"
# elif status == 200 and sent_list == 200:
# df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = "Trigger/Driver Failed"
# elif status == 200:
# df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = "Sentiment Failed"
# else:
# df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = "Scraping incomplete"
with open(dbConfig.dict["requestUrl"], 'w') as f:
df.to_csv(f, index=False)
print("Exiting return")
return request
def caller(request, site, full_data_dict):
print(full_data_dict['tag_dict']) # dict with default dict urls for automatic scraped data tagging
print("Entering", request, site)
# df = pd.read_csv(dbConfig.dict["requestUrl"], encoding='utf-8')
# db = pymongo.MongoClient().atlas
# s = request.encode('utf-8')
status = ATLAS_v2.main(request, site)
print("Atlas main finish")
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '15% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
# df.ix[(df.reqKw == request), 'reqStatus'] = "20% complete"
# Calling analyses files - sentiment, trigger/driver and topic modelling
try:
print("Now classifying content categories")
cc_list = ContentCategories.main(request)
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '35% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
# df.ix[(df.reqKw == request), 'reqStatus'] = "40% complete"
except:
print("Error while classifying content categories!")
print(traceback.print_exc())
# Calling analyses files - sentiment, trigger/driver and topic modelling
try:
print("Now tagging the dataset...")
tagop_list = NgramMapping.main(request, full_data_dict['tag_dict'])
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '50% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
# df.ix[(df.reqKw == request), 'reqStatus'] = "40% complete"
except:
print("Error while tagging dataset with dictionary")
print(traceback.print_exc())
try:
sent_list = SentimentAnalysis_2.senti_main(request)
#print sent_list
print("Sentiment data inserted into DB")
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '65% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
# df.ix[(df.reqKw == request), 'reqStatus'] = "60% complete"
except:
print("Error while analysing sentiment")
print(traceback.print_exc())
try:
td_list = TrigDriv_2.td_main(request)
#print td_list
print("TriggerDriver data inserted into DB")
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '80% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
# df.ix[(df.reqKw == request), 'reqStatus'] = "80% complete"
except:
print("Error while analysing triggers/drivers")
print(traceback.print_exc())
print "Going to topic model"
#logging.info("going to topicmodeling.main")
#
#Performing Topic Modeling Analysis
num_topics = 8
topic_status = TopicModeling.main(request, num_topics)
# df = pd.read_csv(dbConfig.dict["requestUrl"], encoding='utf-8')
# if status == 200 & sent_list[1] == 200 & topic_status == 200:
# # Update request csv status to completed
# df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = "Completed"
# else:
# df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = "Failed"
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = 'Complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
# df.ix[(df.reqKw == request), 'reqStatus'] = "Complete"
# with open(dbConfig.dict["requestUrl"], 'w') as f:
# df.to_csv(f, index=False)
print("Exiting Return")
return request
pool = ProcessPoolExecutor()
def pool_exe(request, site, full_data_dict): # to Rev
future = pool.submit(caller, request, site, full_data_dict)
print ("Exit pool exe\n")
#def pool_exe_file(request,filecontents):
# future = pool.submit(caller_file, request, filecontents)
# print("Exit file pool exe\n")
def pool_exe_file(full_data_dict): # to Upl, Soc
future = pool.submit(caller_file, full_data_dict)
print("Exit file pool exe\n")
|
3,161 | e235be879cf8a00eb9f39f90859689a29b26f1c6 | # -*- coding: utf-8 -*-
"""
Created on Sun Sep 10 12:18:06 2017
@author: wqmike123
"""
#%% build a simple CNN with gloVec as initial
from keras.preprocessing import sequence
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.layers import Embedding
from keras.layers import Conv1D, GlobalMaxPooling1D
from keras import optimizers
from keras.callbacks import EarlyStopping
#%%
class cnn:
def __init__(self,maxlen,max_voc,embedweight = None,embedding_dims = 300, batch_size = 30,\
filters = 1024, conv_kernel = 3,hidden_dim = 2048,epochs = 20,\
output_dim = 2,dropout = 0.1,trainable=False):
self.epochs = epochs
self.batch_size = batch_size
model = Sequential()
# we start off with an efficient embedding layer which maps
# our vocab indices into embedding_dims dimensions
if not isinstance(embedweight,type(None)):
model.add(Embedding(max_voc,
embedding_dims,
input_length=maxlen,weights = [embedweight],trainable = trainable))
else:
model.add(Embedding(max_voc,
embedding_dims,
input_length=maxlen))
model.add(Dropout(dropout))
# we add a Convolution1D, which will learn filters
# word group filters of size filter_length:
model.add(Conv1D(filters,
conv_kernel,
padding='valid',
activation='relu',
strides=1))
# we use max pooling:
model.add(GlobalMaxPooling1D())
# We add a vanilla hidden layer:
model.add(Dense(hidden_dim))
model.add(Dropout(dropout))
model.add(Activation('relu'))
model.add(Dense(512))
model.add(Dropout(dropout))
model.add(Activation('relu'))
model.add(Dense(128))
model.add(Dropout(dropout))
model.add(Activation('relu'))
# We project onto a single unit output layer, and squash it with a sigmoid:
model.add(Dense(output_dim))
model.add(Activation('softmax'))
opt = optimizers.SGD(lr=0.1,decay = 1e-4,momentum=0.9) #optimizers.adam(lr=0.01, decay=1e-6)
model.compile(loss='binary_crossentropy',
optimizer=opt,
metrics=['accuracy'])
self.model = model
@staticmethod
def padding(x,maxlen):
return sequence.pad_sequences(x, maxlen=maxlen)
def fit(self,x_train,y_train,x_valid,y_valid,class_weight = None,earlyStopping = True):
callback_ = None
if earlyStopping:
callback_ = EarlyStopping(monitor='val_loss', patience=10)
if class_weight:
self.model.fit(x_train, y_train,
batch_size=self.batch_size,
epochs=self.epochs,
validation_data=(x_valid, y_valid),class_weight = class_weight, shuffle=True,callbacks=[callback_])
else:
self.model.fit(x_train, y_train,
batch_size=self.batch_size,
epochs=self.epochs,
validation_data=(x_valid, y_valid), shuffle=True,callbacks=[callback_])
# def fit(self,x_train,y_train,x_valid,y_valid,class_weight = None):
# if class_weight:
# self.model.fit(x_train, y_train,
# batch_size=self.batch_size,
# epochs=self.epochs,
# validation_data=(x_valid, y_valid),class_weight = class_weight)
# else:
# self.model.fit(x_train, y_train,
# batch_size=self.batch_size,
# epochs=self.epochs,
# validation_data=(x_valid, y_valid))
def load_weight(self,fadd):
self.model.load_weights(fadd)
def save_model(self,fpath):
self.model.save(fpath)
def predict(self,test_x):
return self.model.predict(test_x)
|
3,162 | 87f3885b4357d66a745932f3c79804e6c15a57fa | import numpy as np
from ARA import *
from State import *
def theta_given_s(theta, q):
"""
Probability of an random event theta given current state s.
Args:
theta: Random event
s = [q, r, w]: State
Returns:
Unnormalized probability of the random event.
"""
if q == 0:
return .3333
else:
if theta == 0:
return 0.25
elif theta == 1:
return 0.25
else:
return 0.5
def new_w(w, d):
"""
Multi-period commitments in the next epoch.
Args:
d: Defender's actions
m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period)
s = [q, r, w]: Current State
tau: An array denoting the length of each multi-period commitment.
Returns:
next_w: Number of decision epochs remaining in the next epoch.
"""
if w.sum() > 0:
next_w = w.copy()
next_w[next_w > 0] -= 1
return next_w
else:
if d[0] == 1:
return np.array([51,0,0])
elif d[1] == 1:
return np.array([0,51,0])
else:
return np.array([0,0,51])
def attraction_h(next_r, a):
"""
Attraction function of resource (h in the paper).
Args:
next_r: Probable resource array in the next epoch.
next_w: Multi-period commitments in the next epoch.
d: Defender's actions
a: Attacker's actions
s = [q, r, w]: Current State
rho_da: A map mapping from (d_i, a_j) to response quality
rho_dq: A map mapping from (d_i, q) to response quality
h_above: attraction value when response quality is above threshold
h_below: attraction value when response quality is below threshold
dict_r: map resource to corresponding level.
thres: Threshold for a good response.
Returns:
Attraction value.
"""
if a == 0:
if next_r == 9:
return 0.8
elif next_r == 14:
return 0.1
else:
return 0.1
elif a == 1:
if next_r == 9:
return 0.1
elif next_r == 14:
return 0.1
else:
return 0.8
elif a == 2:
if next_r == 9:
return 0.1
elif next_r == 14:
return 0.3
else:
return 0.6
elif a == 3:
if next_r == 9:
return 0.1
elif next_r == 14:
return 0.2
else:
return 0.7
else:
if next_r == 9:
return 0.1
elif next_r == 14:
return 0.4
else:
return 0.5
def attraction_g(next_q, q, d, a):
"""
Attraction function of operational conditions (g in the paper).
Args:
next_q: Operational conditions in the next epoch.
next_r: Probable resource array in the next epoch.
next_w: Multi-period commitments in the next epoch.
d: Defender's actions
a: Attacker's actions
s = [q, r, w]: Current State
rho_da: A map mapping from (d_i, a_j) to response quality
rho_dq: A map mapping from (d_i, q) to response quality
g_above: attraction value when response quality is above threshold
g_below: attraction value when response quality is below threshold
thres: Threshold for a good response.
Returns:
Attraction value.
"""
if a == 0:
if next_q == 0:
xi_D = 8
else:
xi_D = 1
elif a == 1:
xi_D = 1
elif a == 2:
if next_q == 0:
xi_D = 1
else:
xi_D = 3
elif a == 3:
if next_q == 0:
xi_D = 1
else:
xi_D = 2
else:
if next_q == 0:
xi_D = 1
else:
xi_D = 4
dqq = 0
if next_q == 1 and q == 0:
if d[3] == 1:
dqq = 1
elif np.sum(d[6:]) == 3:
dqq = 1
elif next_q == 0 and q == 1:
if d[5] == 1:
dqq = 1
elif np.sum(d[6:]) == 0:
dqq = 1
return xi_D + dqq
def trans_prob(next_s, q, d):
"""
Probability of decision d from state s to state next_s
Args:
next_s = [next_q, next_r, next_w]: Next State
d: Defender's actions
s = [q, r, w]: Current State
m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period)
tau: An array denoting the length of each multi-period commitment.
c (nr * nd): cost of defender's each action
h_above: attraction value when response quality is above threshold
h_below: attraction value when response quality is below threshold
g_above: attraction value when response quality is above threshold
g_below: attraction value when response quality is below threshold
dict_r: map resource to corresponding level.
order: Order of ARA. Currently only 0 and 1 are available.
Returns:
prob: Probability.
"""
next_q, next_r, next_w = next_s
A_actions = [0, 1, 2, 3, 4]
prob = 0
for a in A_actions:
prob_r = attraction_h(next_r[0], a)
q1 = attraction_g(next_q[0], q, d, a)
q2 = attraction_g(1-next_q[0], q, d, a)
prob_q = q1 / (q1 + q2)
prob += a_given_s(a, q) * prob_r * prob_q
return prob
|
3,163 | 3f4f60ff315c8e7e4637a84629894012ed13280e | import src.integralimage as II
import src.adaboost as AB
import src.utils as UT
import numpy as np
if __name__ == "__main__":
pos_training_path = 'dataset-1/trainset/faces'
neg_training_path = 'dataset-1/trainset/non-faces'
pos_testing_path = 'dataset-1/testset/faces'
neg_testing_path = 'dataset-1/testset/non-faces'
print('Loading training faces..')
faces_train = UT.load_images(pos_training_path)
faces_train_int = list(map(II.to_integral, faces_train))
print('..done. ' + str(len(faces_train)) + ' faces loaded.\n\nLoading non faces..')
non_faces_train = UT.load_images(neg_training_path)
non_faces_train_int = list(map(II.to_integral, non_faces_train))
print('..done. ' + str(len(non_faces_train)) + ' non faces loaded.\n')
#number of rounds: default is 5
num_classifiers = 5
# For performance reasons restricting feature size
min_feature_height = 6
max_feature_height = 8
min_feature_width = 6
max_feature_width = 8
#learn algorithm
classifiers = AB.learn(faces_train_int, non_faces_train_int, num_classifiers, min_feature_height, max_feature_height, min_feature_width, max_feature_width)
for n in range(len(classifiers)):
print(classifiers[n].type, classifiers[n].top_left, classifiers[n].width, classifiers[n].height, classifiers[n].threshold)
print('Loading test faces')
faces_test = UT.load_images(pos_testing_path)
faces_test_int = list(map(II.to_integral, faces_test))
print(str(len(faces_test)) + ' faces loaded.\n\nLoading test non faces..')
non_faces_test = UT.load_images(neg_testing_path)
non_faces_test_int = list(map(II.to_integral, non_faces_test))
print(str(len(non_faces_test)) + ' non faces loaded.\n')
print('Testing selected classifiers..')
correct_faces = 0
correct_non_faces = 0
correct_faces, FN, FP, correct_non_faces = UT.count_rate(faces_test_int, non_faces_test_int, classifiers)
print('..done.\n\nResult:\n Faces: ' + str(correct_faces) + '/' + str(len(faces_test))
+ ' (' + str((float(correct_faces) / len(faces_test)) * 100) + '%)\n non-Faces: '
+ str(correct_non_faces) + '/' + str(len(non_faces_test)) + ' ('
+ str((float(correct_non_faces) / len(non_faces_test)) * 100) + '%)')
print('False Negative Rate: ' + str(FN) + '/' + str(len(faces_test))
+ ' (' + str((float(FN) / len(faces_test)) * 100) + '%)\n False Positive Rate: '
+ str(FP) + '/' + str(len(non_faces_test)) + ' ('
+ str((float(FP) / len(non_faces_test)) * 100) + '%)') |
3,164 | 4b075d8211d7047f6f08fe6f6f55e4703bdb6f1f | from django.db import models
# Create your models here.
class Todo(models.Model):
title = models.CharField(max_length=200)
completed = models.IntegerField(default=0)
|
3,165 | 4d18c056845403adc9c4b5848fafa06d0fe4ff4c | class Solution(object):
def shortestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
left= 0
#for right in range(len(s)-1, -1, -1):
for right in reversed(range(len(s))):
if s[right] == s[left]:
left += 1
if left == len(s):
return s
"""s[left:] will get the right part of string, [::-1] will reverse that """
return s[left:][::-1] + self.shortestPalindrome(s[:left]) + s[left:]
sol = Solution()
print(sol.shortestPalindrome("abb")) |
3,166 | 1fdb9db4c1c8b83c72eeb34f10ef9d289b43b79f | from Bio import SeqIO
def flatten(l):
return [j for i in l for j in i]
def filter_sequences_by_len_from_fasta(file, max_len):
with open(file) as handle:
return [str(record.seq) for record in SeqIO.parse(handle, 'fasta') if len(record.seq) <= max_len] |
3,167 | 2003060f7793de678b4a259ad9424cd5927a57f7 | """ Class implementing ReportGenerator """
from urllib.parse import urlparse
import requests
from src.classes.reporter.flag import Flag
from src.classes.reporter.line_finder import LineFinder
class ReportGenerator(object):
"""
Class designed to generate reports after CSP audition
The ReportGenerator class generated report based on a list of flags issued
by the sorter classes. Then it aggregates the data into an exploitable
format.
"""
def __init__(self):
# List of flags issued by the sorters
self.flags = list()
# List of related flags
self.related_flags = list()
# Initiating line parser
self.line_finder = LineFinder(self.flags)
# Initiating html to empty styrin
self.html = ''
def run(self, html, url):
# Calling the run method to generate the report
print('[#] Running the report generator')
# Setting the html page to inspect
self.html = html
# Getting the flag location
self.getting_flags_locations()
# Generating the related flags
self.getting_related_flags(url)
def getting_flags_locations(self):
"""
Locates the flags in the resource
Calls the LineFinder class in order
:return: None
"""
print(self.flags)
self.line_finder.find_line(self.html)
def getting_related_flags(self, url):
banner = self.get_headers(url)
if banner:
csp_dict = banner[0]
headers = banner[1]
frame = self.raise_frame_option(csp_dict, headers)
protocol = self.raise_unsafe_protocol(csp_dict, headers)
trusted = self.raise_trusted_types(csp_dict)
print(frame)
print(protocol)
print(trusted)
print(csp_dict)
def get_headers(self, url):
req = requests.get(url)
try:
csp_header = req.headers['Content-Security-Policy']
csp_dict = self.extracting_csp_dict(csp_header)
return csp_dict, req.headers
except KeyError:
print('No CSP on this site')
@staticmethod
def extracting_csp_dict(header_list):
res = {}
header_list = header_list.split(';')
for i in enumerate(header_list):
header_list[i] = header_list[i].strip()
sources = header_list[i].split(' ')
res[sources[0]] = sources[1:]
return res
def generating_csp_flags(self, csp_dict):
pass
def raise_unsafe_protocol(self, csp_dict, url):
if 'block-all-mixed-content' not in csp_dict.keys() and urlparse(url).scheme == 'https':
for directive in csp_dict:
for source in csp_dict[directive][1:]:
if source == 'http':
return Flag('possible_mixed_content')
elif not self.lower_case_in('upgrade-insecure-requests', csp_dict):
return Flag('no_upgrade_insecure_requests')
return None
def raise_frame_option(self, csp_dict, header):
try:
if csp_dict['frame-ancestor'].lower() not in ['none', 'self']:
flag_id = 'permissive_frame_rule'
return Flag(flag_id)
except KeyError:
pass
if not self.lower_case_in('X-Frame-Options', csp_dict):
flag_id = 'no_frame_rule'
elif header['X-Frame-Options'].lower().startswith('allowall'):
flag_id = 'permissive_frame_rule'
elif header['X-Frame-Options'].lower().startswith('allow-from'):
flag_id = 'permissive_frame_rule'
else:
flag_id = 'missing_frame_ancestors'
return Flag(flag_id)
def raise_trusted_types(self, csp_dict):
if not self.lower_case_in('trusted_types', csp_dict):
return Flag('no_trusted_types')
return None
def raise_missing_object(self, csp_dict):
if not self.lower_case_in('object-src', csp_dict) and \
csp_dict['default-src'] != 'none':
return Flag('missing_obj_src')
return None
@staticmethod
def lower_case_in(elem, dic):
return elem.lower() in [x.lower() for x in dic.keys()]
def pretty_print_report(self):
print('*******************************************')
print('*********** REPORT FOR THE PAGE ***********')
print('*******************************************')
if self.flags:
for flag in self.flags:
print('---------------------------------------------')
print('>>> FLAGS RAISED <<<')
print('>>> At location : ', flag.location)
print('>>> Type : ', flag.id)
print('>>> Explanation : ', flag.reco_dict[flag.id]['explanation'])
if flag.content != '':
print('>>> Content : ', flag.content)
else:
print('>>> Content : one liner tag')
print('---------------------------------------------')
print('*******************************************')
else:
print('No flags have been raised for that specific page')
print('*******************************************')
|
3,168 | e94d66732a172286814bc0b0051a52c1374a4de5 | import asyncio
import sys
import aioredis
import msgpack
async def main(host: str, endpoint: str, message: str):
msg = msgpack.packb(
{
"endpoint": endpoint,
"headers": {"Content-Type": "text/json"},
"payload": message.encode("utf-8"),
},
)
redis = await aioredis.create_redis_pool(host)
await redis.rpush("acapy.outbound_transport", msg)
if __name__ == "__main__":
args = sys.argv
if len(args) <= 1:
raise SystemExit("Pass redis host URL as the first parameter")
if len(args) <= 2:
raise SystemExit("Pass endpoint as the second parameter")
if len(args) <= 3:
raise SystemExit("Pass message contents as the third parameter")
asyncio.get_event_loop().run_until_complete(main(args[1], args[2], args[3]))
|
3,169 | 20167058697450f342c2ac3787bd1721f860dc58 | from kraken.core.maths import Vec3, Vec3, Euler, Quat, Xfo
from kraken.core.objects.components.base_example_component import BaseExampleComponent
from kraken.core.objects.attributes.attribute_group import AttributeGroup
from kraken.core.objects.attributes.scalar_attribute import ScalarAttribute
from kraken.core.objects.attributes.bool_attribute import BoolAttribute
from kraken.core.objects.constraints.pose_constraint import PoseConstraint
from kraken.core.objects.component_group import ComponentGroup
from kraken.core.objects.hierarchy_group import HierarchyGroup
from kraken.core.objects.locator import Locator
from kraken.core.objects.joint import Joint
from kraken.core.objects.ctrlSpace import CtrlSpace
from kraken.core.objects.control import Control
from kraken.core.objects.operators.kl_operator import KLOperator
from kraken.core.profiler import Profiler
from kraken.helpers.utility_methods import logHierarchy
class SimpleControlComponent(BaseExampleComponent):
"""Simple Control Component Base"""
def __init__(self, name='SimpleControl', parent=None):
super(SimpleControlComponent, self).__init__(name, parent)
# ===========
# Declare IO
# ===========
# Declare Inputs Xfos
self.mainInputTgt = self.createInput('mainInput', dataType='Xfo', parent=self.inputHrcGrp).getTarget()
# Declare Output Xfos
self.outputTgt = self.createOutput('output', dataType='Xfo', parent=self.outputHrcGrp).getTarget()
# Declare Input Attrs
self.drawDebugInputAttr = self.createInput('drawDebug', dataType='Boolean', value=False, parent=self.cmpInputAttrGrp).getTarget()
# Declare Output Attrs
self.rigScaleOutputAttr = self.createOutput('rigScale', dataType='Float', value=1.0, parent=self.cmpOutputAttrGrp).getTarget()
class SimpleControlComponentGuide(SimpleControlComponent):
"""Simple Control Component Guide"""
def __init__(self, name='SimpleControl', parent=None):
Profiler.getInstance().push("Construct Simple Control Guide Component:" + name)
super(SimpleControlComponentGuide, self).__init__(name, parent)
# =========
# Attributes
# =========
# Add Component Params to IK control
guideSettingsAttrGrp = AttributeGroup("GuideSettings", parent=self)
self.ctrlSizeInputAttr = ScalarAttribute('ctrlSize', value=5.0, minValue=1.0, maxValue=50.0, parent=guideSettingsAttrGrp)
self.ctrlSizeInputAttr.setValueChangeCallback(self.resizeMainCtrl)
# =========
# Controls
# =========
# Guide Controls
self.mainCtrl = Control('main', parent=self.ctrlCmpGrp, shape='square')
self.mainCtrl.rotatePoints(90, 0, 0)
data = {
"location": 'M',
"ctrlSize": self.ctrlSizeInputAttr.getValue(),
"ctrlXfo": Xfo(tr=Vec3(0.0, 0.0, 0.0))
}
self.loadData(data)
Profiler.getInstance().pop()
# =============
# Data Methods
# =============
def saveData(self):
"""Save the data for the component to be persisted.
Return:
The JSON data object
"""
data = super(SimpleControlComponentGuide, self).saveData()
data["ctrlSize"] = self.ctrlSizeInputAttr.getValue()
data["ctrlXfo"] = self.mainCtrl.xfo
return data
def loadData(self, data):
"""Load a saved guide representation from persisted data.
Arguments:
data -- object, The JSON data object.
Return:
True if successful.
"""
super(SimpleControlComponentGuide, self).loadData( data )
self.ctrlSizeInputAttr.setValue(data["ctrlSize"])
self.mainCtrl.xfo = data["ctrlXfo"]
scaleValue = data["ctrlSize"]
self.mainCtrl.setShape('square')
self.mainCtrl.rotatePoints(90, 0, 0)
self.mainCtrl.scalePoints(Vec3(scaleValue, scaleValue, scaleValue))
return True
def getRigBuildData(self):
"""Returns the Guide data used by the Rig Component to define the layout of the final rig.
Return:
The JSON rig data object.
"""
data = super(SimpleControlComponentGuide, self).getRigBuildData()
data["ctrlSize"] = self.ctrlSizeInputAttr.getValue()
data["ctrlXfo"] = self.mainCtrl.xfo
return data
# ==========
# Callbacks
# ==========
def resizeMainCtrl(self, newSize):
self.mainCtrl.setShape('square')
self.mainCtrl.rotatePoints(90, 0, 0)
self.mainCtrl.scalePoints(Vec3(newSize, newSize, newSize))
# ==============
# Class Methods
# ==============
@classmethod
def getComponentType(cls):
"""Enables introspection of the class prior to construction to determine if it is a guide component.
Return:
The true if this component is a guide component.
"""
return 'Guide'
@classmethod
def getRigComponentClass(cls):
"""Returns the corresponding rig component class for this guide component class
Return:
The rig component class.
"""
return SimpleControlComponentRig
class SimpleControlComponentRig(SimpleControlComponent):
"""Simple Control Component Rig"""
def __init__(self, name='SimpleControl', parent=None):
Profiler.getInstance().push("Construct Simple Control Rig Component:" + name)
super(SimpleControlComponentRig, self).__init__(name, parent)
# =========
# Controls
# =========
# Add Controls
self.mainCtrl = Control('main', shape='square', parent=self.ctrlCmpGrp)
self.mainCtrlSpace = self.mainCtrl.insertCtrlSpace()
self.mainCtrl.lockScale(x=True, y=True, z=True)
# Add Component Params to Main control
mainSrtSettingsAttrGrp = AttributeGroup('DisplayInfo_MainSrtSettings', parent=self.mainCtrl)
self.rigScaleAttr = ScalarAttribute('rigScale', value=1.0, parent=mainSrtSettingsAttrGrp, minValue=0.1, maxValue=100.0)
self.rigScaleOutputAttr.connect(self.rigScaleAttr)
# ==========
# Deformers
# ==========
deformersLayer = self.getOrCreateLayer('deformers')
self.defCmpGrp = ComponentGroup(self.getName(), self, parent=deformersLayer)
self.addItem('defCmpGrp', self.defCmpGrp)
self.mainDef = Joint('main', parent=self.defCmpGrp)
self.mainDef.setComponent(self)
# ==============
# Constrain I/O
# ==============
# Constrain inputs
self.mainInputConstraint = PoseConstraint('_'.join([self.mainCtrlSpace.getName(), 'To', self.mainInputTgt.getName()]))
self.mainInputConstraint.setMaintainOffset(True)
self.mainInputConstraint.addConstrainer(self.mainInputTgt)
self.mainCtrlSpace.addConstraint(self.mainInputConstraint)
# Constrain outputs
self.mainOutputConstraint = PoseConstraint('_'.join([self.outputTgt.getName(), 'To', self.mainCtrl.getName()]))
self.mainOutputConstraint.addConstrainer(self.mainCtrl)
self.outputTgt.addConstraint(self.mainOutputConstraint)
# Constrain deformers
self.mainDefConstraint = PoseConstraint('_'.join([self.mainDef.getName(), 'To', self.mainCtrl.getName()]))
self.mainDefConstraint.addConstrainer(self.mainCtrl)
self.mainDef.addConstraint(self.mainDefConstraint)
# ===============
# Add Canvas Ops
# ===============
Profiler.getInstance().pop()
def loadData(self, data=None):
"""Load a saved guide representation from persisted data.
Arguments:
data -- object, The JSON data object.
Return:
True if successful.
"""
super(SimpleControlComponentRig, self).loadData( data )
ctrlSize = data.get('ctrlSize', 1.0)
ctrlXfo = data.get('ctrlXfo', Xfo())
# ================
# Resize Controls
# ================
self.mainCtrl.setShape('square')
self.mainCtrl.rotatePoints(90, 0, 0)
self.mainCtrl.scalePoints(Vec3(ctrlSize, ctrlSize, ctrlSize))
# =======================
# Set Control Transforms
# =======================
self.mainCtrlSpace.xfo = ctrlXfo
self.mainCtrl.xfo = ctrlXfo
# ============
# Set IO Xfos
# ============
self.mainInputTgt.xfo = ctrlXfo
self.mainDef.xfo = ctrlXfo
self.outputTgt.xfo = ctrlXfo
# ====================
# Evaluate Constraints
# ====================
self.mainInputConstraint.evaluate()
self.mainOutputConstraint.evaluate()
self.mainDefConstraint.evaluate()
# ====================
# Evaluate Canvas Ops
# ====================
from kraken.core.kraken_system import KrakenSystem
ks = KrakenSystem.getInstance()
ks.registerComponent(SimpleControlComponentGuide)
ks.registerComponent(SimpleControlComponentRig)
|
3,170 | 856afd30a2ed01a1d44bbe91a7b69998e9a51bb7 | from __future__ import print_function
import os, sys, time
import fitz
import PySimpleGUI as sg
"""
PyMuPDF utility
----------------
For a given entry in a page's getImagleList() list, function "recoverpix"
returns either the raw image data, or a modified pixmap if an /SMask entry
exists.
The item's first two entries are PDF xref numbers. The first one is the image in
question, the second one may be 0 or the object id of a soft-image mask. In this
case, we assume it being a sequence of alpha bytes belonging to our image.
We then create a new Pixmap giving it these alpha values, and return it.
If the result pixmap is CMYK, it will be converted to RGB first.
"""
print(fitz.__doc__)
if not tuple(map(int, fitz.version[0].split("."))) >= (1, 13, 17):
raise SystemExit("require PyMuPDF v1.13.17+")
dimlimit = 100 # each image side must be greater than this
relsize = 0.05 # image : pixmap size ratio must be larger than this (5%)
abssize = 2048 # absolute image size limit 2 KB: ignore if smaller
imgdir = "images" # found images are stored in this subfolder
if not os.path.exists(imgdir):
os.mkdir(imgdir)
def recoverpix(doc, item):
x = item[0] # xref of PDF image
s = item[1] # xref of its /SMask
if s == 0: # no smask: use direct image output
return doc.extractImage(x)
def getimage(pix):
if pix.colorspace.n != 4:
return pix
tpix = fitz.Pixmap(fitz.csRGB, pix)
return tpix
# we need to reconstruct the alpha channel with the smask
pix1 = fitz.Pixmap(doc, x)
pix2 = fitz.Pixmap(doc, s) # create pixmap of the /SMask entry
# sanity check
if not (pix1.irect == pix2.irect and pix1.alpha == pix2.alpha == 0 and pix2.n == 1):
pix2 = None
return getimage(pix1)
pix = fitz.Pixmap(pix1) # copy of pix1, alpha channel added
pix.setAlpha(pix2.samples) # treat pix2.samples as alpha value
pix1 = pix2 = None # free temp pixmaps
# we may need to adjust something for CMYK pixmaps here:
return getimage(pix)
fname = sys.argv[1] if len(sys.argv) == 2 else None
if not fname:
fname = sg.PopupGetFile("Select file:", title="PyMuPDF PDF Image Extraction")
if not fname:
raise SystemExit()
t0 = time.time()
doc = fitz.open(fname)
page_count = len(doc) # number of pages
xreflist = []
imglist = []
for pno in range(page_count):
sg.QuickMeter(
"Extract Images", # show our progress
pno + 1,
page_count,
"*** Scanning Pages ***",
)
il = doc.getPageImageList(pno)
imglist.extend([x[0] for x in il])
for img in il:
xref = img[0]
if xref in xreflist:
continue
width = img[2]
height = img[3]
if min(width, height) <= dimlimit:
continue
pix = recoverpix(doc, img)
if type(pix) is dict: # we got a raw image
ext = pix["ext"]
imgdata = pix["image"]
n = pix["colorspace"]
imgfile = os.path.join(imgdir, "img-%i.%s" % (xref, ext))
else: # we got a pixmap
imgfile = os.path.join(imgdir, "img-%i.png" % xref)
n = pix.n
imgdata = pix.getPNGData()
if len(imgdata) <= abssize:
continue
if len(imgdata) / (width * height * n) <= relsize:
continue
fout = open(imgfile, "wb")
fout.write(imgdata)
fout.close()
xreflist.append(xref)
t1 = time.time()
imglist = list(set(imglist))
print(len(set(imglist)), "images in total")
print(len(xreflist), "images extracted")
print("total time %g sec" % (t1 - t0)) |
3,171 | a598da0a749fcc5a6719cec31ede0eb13fab228e | import pytest
import app
import urllib.parse
@pytest.fixture
def client():
app.app.config['TESTING'] = True
with app.app.test_client() as client:
yield client
def test_query_missing_args(client):
response = client.get('/data/query')
assert 'errors' in response.json and '400' in response.status
def test_query_get_json(client):
response = client.get(f'/data/query?sql={urllib.parse.quote("select * from test")}')
assert len(response.json) == 2
def test_query_post_json(client):
response = client.post('/data/query', json={'sql': 'select * from test'})
assert len(response.json) == 2
def test_query_get_csv(client):
response = client.get(f'/data/query?sql={urllib.parse.quote("select * from test")}&format=csv')
text = response.data.decode()
assert len(text) > 0 and 'col0' in text
def test_query_post_csv(client):
response = client.post('/data/query', json={'sql': 'select * from test', 'format': 'csv'})
text = response.data.decode()
assert len(text) > 0 and 'col0' in text
def test_query_bad_sql_insert(client):
response = client.get(f'/data/query?sql={urllib.parse.quote("insert into test (col0) values (1)")}')
assert 'Illegal SQL' in response.json['message'] and 400 == response.status_code
def test_query_bad_sql_delete(client):
response = client.get(f'/data/query?sql={urllib.parse.quote("delete from test where col0 = 1")}')
assert 'Illegal SQL' in response.json['message'] and 400 == response.status_code
def test_query_bad_sql_update(client):
response = client.get(f'/data/query?sql={urllib.parse.quote("update test set col0 = 1")}')
assert 'Illegal SQL' in response.json['message'] and 400 == response.status_code
|
3,172 | cf2fcd013c3e9992da36806ca93aacb4b5399396 | from .tacotron_v2_synthesizer import Tacotron2Synthesizer
|
3,173 | 5dffda8215b8cfdb2459ec6a9e02f10a352a6fd0 | from wtforms import Form as BaseForm
from wtforms.widgets import ListWidget
class Form(BaseForm):
def as_ul(self):
widget = ListWidget()
return widget(self)
|
3,174 | a9344151a997842972aa68c417a77b3ca80e6cfa | # -*- coding:utf-8 -*-
from flask import redirect, url_for, render_template
from flask.globals import request, session
from flask_admin import BaseView, expose
from util import navigator, common
class Billings(BaseView):
@expose('/')
def index(self):
return redirect(url_for('.billingHistory'))
@expose('/billingHistory')
def billingHistory(self):
self.menuItems = session['navigator']
self.pageAuth = common.getPageAuth()
return render_template("views/pages/billings/billingHistory.html", admin_view=self)
@expose('/billingDetail')
def billingDetail(self):
self.menuItems = session['navigator']
self.pageAuth = common.getPageAuth()
#예비 정산명세서 Seq
self.billingSeq = request.args.get("billingSeq")
#정산명세서 Seq
if request.args.get("regBillingSeq") != None :
self.regBillingSeq = request.args.get("regBillingSeq")
elif request.args.get("regBillingSeq") == None :
self.regBillingSeq = ""
#승인 Seq
if request.args.get("apprSeq") != None :
self.apprSeq = request.args.get("apprSeq")
elif request.args.get("apprSeq") == None :
self.apprSeq = ""
#승인Content Seq
if request.args.get("contentSeq") != None :
self.contentSeq = request.args.get("contentSeq")
elif request.args.get("contentSeq") == None :
self.contentSeq = ""
return render_template("views/pages/billings/billingDetail.html", admin_view=self)
@expose('/billings')
def billings(self):
self.menuItems = session['navigator']
self.pageAuth = common.getPageAuth()
return render_template("views/pages/billings/billings.html", admin_view=self)
@expose('/billingsDetail')
def billingsDetail(self):
self.menuItems = session['navigator']
self.pageAuth = common.getPageAuth()
self.seq = request.args.get("seq")
return render_template("views/pages/billings/billingsDetail.html", admin_view=self)
|
3,175 | 2eb49d08136c3540e1305310f03255e2ecbf0c40 | import tkinter as tk
from tkinter import ttk
win = tk.Tk()
win.title('Loop')
############ Time consuming :
# nameLable1 = ttk.Label(win,text="Enter your name : ")
# nameLable1.grid(row=0,column=0,sticky=tk.W)
# ageLable1 = ttk.Label(win,text="Enter your age: ")
# ageLable1.grid(row=1,column=0,sticky=tk.W)
# countryLable1 = ttk.Label(win,text="Enter your country: ")
# countryLable1.grid(row=2,column=0,sticky=tk.W)
# mailLable1 = ttk.Label(win,text="Enter your mail ID : ")
# mailLable1.grid(row=3,column=0,sticky=tk.W)
############ Loop :
labels = [f"name : ","age : ","mail ID : ","city : " ,"country : ","phone number : "]
for i in range(len(labels)):
# currentLabel = f"label{i}" # without declaring also it will work fine
currentLabel = ttk.Label(win,text=labels[i])
currentLabel.grid(row=i,column=0,sticky=tk.W)
userDict = {
'name':tk.StringVar(),
'age':tk.StringVar(),
'mail':tk.StringVar(),
'city':tk.StringVar(),
'country':tk.StringVar(),
'phone':tk.StringVar(),
}
index = 0
for i in userDict:
# currentEntryBox = f"entry{i}"
currentEntryBox = ttk.Entry(win,width = 16,textvariable = userDict[i])
currentEntryBox.grid(row = index,column = 1)
index+=1
def submitAction():
for i in userDict:
# print(f"{userDict[i].get()}")
print(f"{userDict.get(i).get()}") # their is get() method in dictionary too
exit()
submitButton = tk.Button(win,text="Submit",command = submitAction)
submitButton.grid(row = index,column = 0)
submitButton.configure(foreground = "#ffffff",background = "#000000")
################################################
win.mainloop()
###########################################################################################################
########################################################################################################## |
3,176 | 967984444d9e26452226b13f33c5afbc96b5fe2b | import os
from enum import Enum
STAFF_CODE = os.getenv('STAFF_CODE', '20190607')
ADMIN_CODE = os.getenv('ADMIN_CODE', 'nerd-bear')
TEAM_NAMES = (
'밍크고래팀',
'혹등고래팀',
'대왕고래팀',
'향유고래팀',
)
TEAM_COUNT = 3
MAX_TEAM_MEMBER_COUNT = 10
class TIME_CHECK(Enum):
BEFORE_START = 0
DURING_TIME = 1
AFTER_END = 2
|
3,177 | ae0ccbb9b0a2c61d9ee9615ba8d0c1a186a81c34 | # coding=utf-8
# oscm_app/cart/models
# django imports
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.translation import ugettext_lazy as _
# OSCM imports
from ...constants import CARTS, CART_STATUSES, DEFAULT_CART_STATUS
from ...utils import get_attr
from ..cart_manager import CartQuerySet
from .cart_item import CartItem
class Cart(models.Model):
"""
This class is used to represent the Cart for the users.
"""
# Owner of the cart
owner = models.ForeignKey(
get_attr('AUTH_USER_MODEL'),
blank=False,
related_name='carts',
verbose_name=_("oscm_admin_ownerOfCart"),
help_text=_('oscm_admin_helpTextOwnerOfCart'),
limit_choices_to={'role': get_attr('DEFAULT_ROLE')},
)
# Project name
project_name = models.CharField(
verbose_name=_('oscm_admin_projectNameOfCart'),
help_text=_('oscm_admin_helpTextProjectNameOfCart'),
max_length=250,
blank=False,
null=False
)
# Creation date
creation_date = models.DateTimeField(
verbose_name=_('oscm_admin_creationDateOfCart'),
auto_now_add=True,
)
# Last edit date
last_edit_date = models.DateTimeField(
verbose_name=_('oscm_admin_lastEditDateOfCart'),
auto_now=True,
)
# Requested due date
requested_due_date = models.DateTimeField(
verbose_name=_('oscm_admin_requestedDueDateOfCart'),
help_text=_('oscm_admin_helpTextRequestedDueDateOfCart'),
)
# Default parameter for the status attribute
DEFAULT_CART_STATUS = get_attr(DEFAULT_CART_STATUS)
# Retrieved the different statuses from the settings file
CART_STATUSES = get_attr(CART_STATUSES)
# Status
status = models.IntegerField(
verbose_name=_('oscm_admin_statusOfCart'),
max_length=32,
default=DEFAULT_CART_STATUS,
choices=CART_STATUSES,
help_text=_('oscm_admin_helpTextStatusOfCart'),
)
# Short description about the cart
description = models.TextField(
verbose_name=_("oscm_admin_descriptionOfCart"),
blank=True,
help_text=_('oscm_admin_helpTextDescriptionOfCart'),
)
# Item count (not equal to quantity, but distinct item count)
class Meta:
ordering = ["status", "creation_date", ]
db_table = '%s_%s' % (get_attr('APP_NAME'), CARTS)
verbose_name = _('oscm_admin_headerOfCart')
verbose_name_plural = _('oscm_admin_headerOfCarts')
objects = CartQuerySet.as_manager()
def __str__(self):
"""
Displays the status, the owner, the project
name and the number of cart items.
"""
return _(
"cart (status: %(status)s, owner: %(owner)s, project name: "
"%(project_name)s, number of cart items: %(nb_cart_items)d, "
"total amount: %(total_amount)d)"
) % {
'status': self.CART_STATUSES[self.status][1],
'owner': self.owner,
'project_name': self.project_name,
'nb_cart_items': self.nb_cart_items,
'total_amount': self.total_amount,
}
def get_cart_items(self):
"""
Retrieves all cart items for a given cart.
"""
return CartItem.objects.filter(cart=self)
@property
def nb_cart_items(self):
"""
Retrieves the number of distinct cart items for a given cart.
"""
return CartItem.objects.filter(cart=self).count()
@property
def total_amount(self):
"""
Retrieves the total amount of cart items for a given cart.
"""
total_amount = 0
for cart_item in self.get_cart_items():
total_amount += cart_item.total_price
return total_amount
@property
def is_empty(self):
"""
Test if this cart is empty.
"""
return self.id is None or self.nb_cart_items == 0
def get_absolute_url(self):
return reverse(
'oscm:cart',
kwargs={'pk': self.pk})
def get_delete_url(self):
return reverse(
'oscm:delete_cart',
kwargs={'pk': self.pk})
|
3,178 | c9e0586942430fcd5b81c5716a06a4eef2c2f203 | # -*- coding: utf-8 -*-
"""
Created on Tue Feb 21 15:09:26 2017
@author: Jieun
"""
from scipy.stats import invgauss
from scipy.stats import norm
# rv = invgauss.ppf(0.95,mu)
# a = 8/(2*rv)
# print a
# norm.ppf uses mean = 0 and stddev = 1, which is the "standard" normal distribution
# can use a different mean and standard deivation by specifiying the loc and scale arguments
# norm.ppf(0.95, loc = 10, scale = 2)
#n = norm.ppf(0.95)
#n1 = norm.ppf(0.95)
#print n + n1
#check = norm.cdf(norm.ppf(0.95))
#print check
#print ''
# rv = invgauss.cdf(0.95, 8)
# rv1 = invgauss.cdf(0.95, 8)
# print rv
# print rv1
# print rv + rv1
# print 8/2.4
# print ''
# For inverse cdf
# 1 - alpha = 0.981
# detection probability is not given...
## measured values
# For Design A
inverse_1_alpha = norm.ppf(1-0.050)
inverse_1_beta = norm.ppf(0.90)
sum = inverse_1_alpha + inverse_1_beta
sigma = 8/sum
print sigma
print ''
print ''
# sq = 8
# sigma = sq/sum
# print sigma
# print ''
#print check
#cal= 1- check
# For Design B
#inverse_1_alpha_Design_B = norm.ppf(0.981)
#inverse_1_beta_Design_B = - inverse_1_alpha_Design_B + 0.940/1.189
#check2 = norm.cdf(inverse_1_beta_Design_B)
#print check2
#cal1 = 1- check2
# print cal
#print cal1
## inspection values
# inverse_1_alpha_expected = norm.ppf(0.981)
# inverse_1_beta_expected = inverse_1_alpha_expected - 0.90358/0.76235
# check3 = norm.cdf(inverse_1_beta_expected)
# `print check3
|
3,179 | 68a1d5a77abd19aece04bd560df121ceddccea42 | # -*- coding: utf-8 -*-
"""
Created on Tue Apr 25 13:34:46 2017
@author: Sven Geboers
"""
from math import pi,e
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm
def LevelToIntensity(NoiseLevelIndB):
I0 = 10.**(-12) #This is the treshold hearing intensity, matching 0 dB
NoiseLevel = float(NoiseLevelIndB)
Intensity = I0*10**(NoiseLevel/10)
return Intensity
def IntensityToLevel(Intensity):
I0 = 10.**(-12) #This is the treshold hearing intensity, matching 0 dB
Intensity = Intensity
NoiseLevelIndB = 10*np.log10(Intensity/I0)
return NoiseLevelIndB
#Definine the mathematical function coth(x)
coth = lambda x: (e**(x)-e**(-x))/(e**(x)-e**(-x)) #np.cosh(x)/np.sinh(x)
#Closes all previous plots so that we don't have to click them away manually
plt.close('all')
#Defining some constants:
SLHighway10 = 53.5 #dB, this is the sound level of a highway at 10 m distance
d1 = 10. #m, distance between the highway and the sound barrier
#Creating data mesh
b = np.arange(0.1, 150, 0.5)
d = np.arange(0.1, 150, 0.5)
b, d = np.meshgrid(b, d)
#Calculating maximum velocity and individual sound power
Vmax = 9.25 #m/s
IntensityTurbine40cm = lambda V: 4*10**(-6)*e**(0.2216*V)
IntensityIndividualTurbine = IntensityTurbine40cm(Vmax)
PowerIndividual = IntensityIndividualTurbine*pi*0.16 * 4
SoundPowerHighway = LevelToIntensity(SLHighway10)*pi*d1**2 * 4
#Calculating intensity and sound level
Intensity = PowerIndividual/(4*b*d)*coth(d/b*pi)+SoundPowerHighway/(4*pi*(d+d1)**2)
SL = IntensityToLevel(Intensity)
#Plots contour curve
levels = [41.,47.] #Contour levels that will be shown
fig = plt.figure()
CS = plt.contourf(d, b, SL, levels,cmap=cm.Greys)
cbar=plt.colorbar()
cbar.set_label('Sound level in dB', rotation=270)
plt.xlabel('Distance (m)')
plt.ylabel('Spacing (m)')
plt.title('Sound level in function of distance and spacing \n with a velocity of 9.25 m/s for WM6',fontweight='bold')
plt.minorticks_on()
plt.grid(b=True, which='major',linewidth=2)
plt.grid(b=True, which='minor')
plt.show()
|
3,180 | c4b4585501319fd8a8106c91751bb1408912827a | # from django.shortcuts import render
# from django.http import HttpResponse
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from django.views import generic
from django.urls import reverse_lazy
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
import json
from . import models
from django.utils import timezone
from questions.forms import UserRegistrationForm, UserLoginForm, UserSettingsForm, AskForm, AnswerForm, UserForm
# from .models import Post
# Create your views here.
def index(request):
return render(request, 'new_questions.html', {
'title': 'Вопросы',
'questions': paginate(request, models.Question.objects.all()),
'tags' : paginate(request, models.Tag.objects.hottest())[:10],
'users' : paginate(request, models.CustomUser.objects.by_rating())[:10],
'page_objects' : paginate(request, models.Question.objects.all()),
})
def top(request):
return render(request, 'new_questions.html', {
'title': 'Топ вопросов',
'questions': paginate(request, models.Question.objects.get_hot()),
'tags' : paginate(request, models.Tag.objects.hottest())[:10],
'users' : paginate(request, models.CustomUser.objects.by_rating())[:10],
'page_objects' : paginate(request, models.Question.objects.get_hot()),
})
def new(request):
return render(request, 'new_questions.html', {
'title': 'Новые',
'questions': paginate(request, models.Question.objects.get_new()),
'tags' : paginate(request, models.Tag.objects.hottest())[:10],
'users' : paginate(request, models.CustomUser.objects.by_rating())[:10],
'page_objects' : paginate(request, models.Question.objects.get_new()),
})
def hot(request, id=1):
"""docstring for Main_menu"""
return render(request, "hot.html", {
'users' : paginate(request, models.CustomUser.objects.by_rating())[:10],
'tags' : paginate(request, models.Tag.objects.hottest())[:10],
"questions" : paginate(request, objects_list = models.Question.objects.get_hot()),
"page_objects" : paginate(request, objects_list = models.Question.objects.get_hot()),
})
def profile(request, id):
return render(request, "user_settings.html", {
'users' : paginate(request, models.CustomUser.objects.by_rating())[:10],
'tags' : paginate(request, models.Tag.objects.hottest())[:10],
"profile": get_object_or_404(models.CustomUser, pk=id),
})
def user_questions(request, id): #Переделай вид страницы! не красиво!
"""docstring for Main_menu"""
return render(request, "user_question.html", {
'questions': paginate(request, models.Question.objects.get_by_user(user_id=id)),
'tags' : paginate(request, models.Tag.objects.hottest())[:10],
'users' : paginate(request, models.CustomUser.objects.by_rating())[:10],
'page_objects' : paginate(request, models.Question.objects.get_by_user(user_id=id)),
})
def question_page(request, id):
return render(request, "questions.html", {
'users' : paginate(request, models.CustomUser.objects.by_rating())[:10],
'tags' : paginate(request, models.Tag.objects.hottest())[:10],
"question": get_object_or_404(models.Question, pk=id) ,
"answers": paginate(request, objects_list = models.Answer.objects.get_hot_for_answer(id)),
"page_objects": paginate(request, objects_list = models.Answer.objects.get_hot_for_answer(id)),
})
def tag(request, id):
return render(request, 'tag_find.html', {
'users' : paginate(request, models.CustomUser.objects.by_rating())[0:10],
'tags' : paginate(request, models.Tag.objects.hottest())[0:10],
'tag' : get_object_or_404(models.Tag, pk=id) ,
'questions': paginate(request, models.Question.objects.get_by_tag(tag_id=id)),
"page_objects": paginate(request, objects_list = models.Question.objects.get_by_tag(tag_id=id)),
})
def edit(request):
user = get_object_or_404(models.CustomUser, username=request.user)
if request.method == 'POST':
form = UserSettingsForm(instance=user,
data=request.POST,
files=request.FILES
)
if form.is_valid():
form.save()
return profile(request, user.id)
else:
form = UserSettingsForm(instance=user)
return render(request, 'edit.html', {
'form': form,
'tags' : paginate(request, models.Tag.objects.hottest())[:10],
'users' : paginate(request, models.CustomUser.objects.by_rating())[:10],
})
@login_required(login_url='/log_in/')
def new_answer(request, id):
if models.Question.objects.filter(id=id).exists():
if request.method == 'POST':
form = AnswerForm(request.POST)
if form.is_valid():
#answeredQuestion = Question.objects.get_by_id(id)[0]
answeredQuestion = get_object_or_404(models.Question, pk=id)
answer = models.Answer.objects.create(author=request.user,
create_date=timezone.now(),
text=form.cleaned_data['text'],
question_id=answeredQuestion.id)
answer.save()
return redirect('/question/{}/add_answer/'.format(id))
else:
form = AnswerForm()
#return render(request, 'question/new_answer.html', {'form': form})
return render(request, 'questions.html', {
'form': form,
'question': get_object_or_404(models.Question, pk=id),
'answers' : paginate(request, models.Answer.objects.get_hot_for_answer(id)),
'tags' : paginate(request, models.Tag.objects.hottest())[:10],
'users' : paginate(request, models.CustomUser.objects.by_rating())[:10],
'page_objects' : paginate(request, models.Answer.objects.get_hot_for_answer(id)),
})
else:
raise Http404
@login_required(login_url='/log_in/')
def ask(request):
error = True
if request.method == 'POST':
firstly = False
form = AskForm(request.POST)
if form.is_valid():
ques = models.Question.objects.create(author=request.user,
create_date=timezone.now(),
is_active=True,
title=form.cleaned_data['title'],
text=form.cleaned_data['text'])
ques.save()
for tagTitle in form.cleaned_data['tags'].split():
tag = models.Tag.objects.get_or_create(title=tagTitle)[0]
ques.tags.add(tag)
ques.save()
#return question(request, ques.id)
return redirect('/question/{}/'.format(ques.id))
else:
error = False
else:
form = AskForm()
firstly = True
return render(request, 'new_ask.html', {
'firstly': firstly,
'error': error,
'form': form,
'tags' : paginate(request, models.Tag.objects.hottest())[:10],
'users' : paginate(request, models.CustomUser.objects.by_rating())[:10],
})
def signin(request):
last_page = request.GET['next']
if last_page == '/logout' or last_page == '/login':
last_page = '/'
error = False
if request.method == 'POST':
user = authenticate(username=request.POST['nickname'], password=request.POST['password'])
if user is not None:
login(request, user) # Авторизуем пользователя
return redirect(last_page)
else:
error = True
return render(request, 'login.html',
{'error': error,
'last_page': last_page,
'tags' : paginate(request, models.Tag.objects.hottest()),
'users' : paginate(request, models.CustomUser.objects.by_rating()),
})
def registration(request):
if request.method == 'POST':
user_form = UserRegistrationForm(request.POST, request.FILES)
print(user_form)
if user_form.is_valid():
user = user_form.save()
user.set_password(user.password)
user.save()
login(request, user)
return redirect(request.GET.get('next') if request.GET.get('next') != '' else '/')
else:
print(user_form.errors)
else:
user_form = UserRegistrationForm()
return render(request,'registration.html',
{'form':user_form,})
def signout(request):
if not request.user.is_authenticated:
raise Http404
logout(request)
#return redirect(request.GET['from'])
return redirect('/')
def paginate(request, objects_list):
paginator = Paginator(objects_list, 30)
page = request.GET.get('page')
try:
objects = paginator.page(page)
except PageNotAnInteger:
objects = paginator.page(1)
except EmptyPage:
objects = paginator.page(paginator.num_pages)
return objects
@require_POST
def like_question(request):
question_id = request.POST.get('question_id', '')
like_type = request.POST.get('like_type', '')
question =get_object_or_404(Question, pk=question_id)
if not question:
return JsonResponse({"status": "error"})
if (like_type == 'like'):
question.rating += 1
elif (like_type == 'dislike'):
question.rating -= 1
question.save()
return JsonResponse({"status": "ok"})
@require_POST
def like_answer(request):
answer_id = request.POST.get('answer_id', '')
like_type = request.POST.get('like_type', '')
answer =get_object_or_404(Answer, pk=answer_id)
if not answer:
return JsonResponse({"status": "error"})
if (like_type == 'like'):
answer.rating += 1
elif (like_type == 'dislike'):
answer.rating -= 1
answer.save()
return JsonResponse({"status": "ok"})
@require_POST
def approve_answer(request):
answer_id = request.POST.get('answer_id', '')
answer =get_object_or_404(Answer, pk=answer_id)
if not answer:
return JsonResponse({"status": "error"})
answer.approved = not answer.approved
answer.save()
return JsonResponse({"status": "ok"}) |
3,181 | f615e7bbfa9179d0bfb321242cd8df4ae7b48993 | import org.cogroo.gc.cmdline
import typing
class __module_protocol__(typing.Protocol):
# A module protocol which reflects the result of ``jp.JPackage("org.cogroo.gc")``.
cmdline: org.cogroo.gc.cmdline.__module_protocol__
|
3,182 | 32ca107fde4c98b61d85f6648f30c7601b31c7f3 | """
Django settings for geobombay project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS as TCP
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
DATA_DIR = os.path.join(BASE_DIR, 'data')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
try:
SECRET_KEY
except NameError:
SECRET_FILE = os.path.join(BASE_DIR, 'secret.txt')
try:
SECRET_KEY = open(SECRET_FILE).read().strip()
except IOError:
try:
from random import choice
SECRET_KEY = ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)])
secret = file(SECRET_FILE, 'w')
secret.write(SECRET_KEY)
secret.close()
except IOError:
Exception('Please create a %s file with random characters to generate your secret key!' % SECRET_FILE)
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
#'suit', #Django Suit, skin for the admin
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.gis',
'leaflet',
'cts',
'wards',
'bmc',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'geobombay.urls'
WSGI_APPLICATION = 'geobombay.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'geobombay'
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'assets', 'collected-static')
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'assets', 'static'),
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
)
TEMPLATE_CONTEXT_PROCESSORS = TCP + (
'django.core.context_processors.request',
)
#Global map / leaflet settings (for django-leaflet plugin we use for admin)
LEAFLET_CONFIG = {
'DEFAULT_CENTER': (19, 72.85521,),
'DEFAULT_ZOOM': 11,
}
try:
from local_settings import *
except:
pass
|
3,183 | d6791c8122129a46631582e7d9339ea08bd2e92b | # Default imports
from sklearn.feature_selection import SelectFromModel
from sklearn.ensemble import RandomForestClassifier
import pandas as pd
import numpy as np
data = pd.read_csv('data/house_prices_multivariate.csv')
# Your solution code here
def select_from_model(dataframe):
X = dataframe.iloc[:, :-1]
y = dataframe.iloc[:, -1]
np.random.seed(9)
model = RandomForestClassifier()
sfm = SelectFromModel(model)
sfm = sfm.fit(X, y)
feature_idx = sfm.get_support()
feature_name = X.columns[feature_idx]
return list(feature_name)
|
3,184 | 58d137d614a0d5c11bf4325c1ade13f4f4f89f52 | print("2 + 3 * 4 =")
print(2 + 3 * 4)
print("2 + (3 * 4) = ")
print(2 + (3 * 4))
|
3,185 | 006f499eed7cd5d73bb0cb9b242c90726fff35c1 | from odoo import models,fields, api
class director(models.Model):
#Clasica
_inherit = 'base.entidad'
_name = 'cinemateca.director'
name = fields.Char(string="name", required=True, help="Nombre del director")
apellidos = fields.Char(string="apellidos", required=True, help="Apellidos del director")
pelicula_ids = fields.One2many("cinemateca.pelicula", "director_id", string="sesion") |
3,186 | cce85d8a34fd20c699b7a87d402b34231b0d5dbb | from ..models import Empleado, Puesto, Tareas
from django.contrib.auth import login, logout
from django.contrib.auth.models import User, Group
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from rest_framework.views import APIView
from .serializers import EmpleadoSerializer, PuestoSerializer, TareasSerializer, UserSerializer, GroupSerializer
from rest_framework import viewsets
from . import permissions, authenticators
class EmpleadoViewSet(viewsets.ModelViewSet):
#queryset = Empleado.objects.all()
model = Empleado
serializer_class = EmpleadoSerializer
permission_classes = (permissions.IsOwner,)
def pre_save(self, obj):
#add user to object if user is logged in
if isinstance(self.request.user, User):
obj.user = self.request.user
class PuestoViewSet(viewsets.ModelViewSet):
queryset = Puesto.objects.all()
#model = Puesto
serializer_class = PuestoSerializer
permission_classes = (permissions.IsOwner,)
class TareasViewSet(viewsets.ModelViewSet):
queryset = Tareas.objects.all()
serializer_class = TareasSerializer
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
model = User
serializer_class = UserSerializer
def get_permissions(self):
#Allow non-authenticated user to create
return (AllowAny() if self.request.method == 'POST'
else permissions.IsStaffOrTargetUser()),
class GroupViewSet(viewsets.ModelViewSet):
queryset = Group.objects.all()
serializer_class = GroupSerializer
class AuthView(APIView):
authentication_classes = (authenticators.QuietBasicAuthentication,)
def post(self, request, *args, **kwargs):
login(request, request.user)
return Response(serializers.UserSerializer(request.user).data)
def delete(self, request, *args, **kwargs):
logout(request)
return Response()
|
3,187 | e71a23ef7a065bc4210e55552e19c83c428bc194 | """This module contains an algorithm to find the different
components in a graph represented as an adjacency matrix.
"""
def find_components(adjacency_matrix):
visited = set()
components = []
for node in range(len(adjacency_matrix)):
if node not in visited:
component = []
build_component(adjacency_matrix, visited, node, component)
components.append(component)
return components
def build_component(adjacency_matrix, visited, node, component):
visited.add(node)
component.append(node)
for neighbor, value in enumerate(adjacency_matrix[node]):
if value == 1 and neighbor not in visited:
build_component(adjacency_matrix, visited, neighbor, component)
|
3,188 | 0f3430cbfc928d26dc443fde518881923861f2e3 | from django.urls import path
from .views import PasswordList
urlpatterns = [
path('', PasswordList.as_view()),
]
|
3,189 | 5669476cc735f569263417b907e8f4a9802cd325 | import socket
import sys
TCP_IP = '192.168.149.129'
TCP_PORT = 5005
BUFFER_SIZE = 2000
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((TCP_IP, TCP_PORT))
while 1:
print 'user data:'
content = sys.stdin.readline();
s.send(content)
data = s.recv(BUFFER_SIZE)
print "received data:", data
s.close()
|
3,190 | a917dd6171a78142fefa8c8bfad0110729fc1bb0 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-15 18:46
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('aposta', '0003_aposta_nome'),
]
operations = [
migrations.CreateModel(
name='Aposta2',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('aposta_identificacao', models.CharField(max_length=200)),
('valor', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Concurso2',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('concurso_edicao', models.CharField(max_length=20)),
('pub_data', models.DateTimeField(verbose_name='data de publicacao')),
],
),
migrations.AlterField(
model_name='aposta',
name='dataAposta',
field=models.DateField(),
),
migrations.AddField(
model_name='aposta2',
name='Concurso2_identificao',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='aposta.Concurso2'),
),
]
|
3,191 | aee8fa7bc1426945d61421fc72732e43ddadafa1 | # -*- coding: utf-8 -*-
"""
Created on Sat Sep 29 19:10:06 2018
@author: labuser
"""
# 2018-09-29
import os
import numpy as np
from scipy.stats import cauchy
from scipy.optimize import curve_fit
import matplotlib.pyplot as plt
import pandas as pd
def limit_scan(fname, ax):
data = pd.read_csv(fname, sep='\t', comment="#", index_col=False)
data['sig'] = data['s'] - data['sb']
data.sort_values(by='f', inplace=True)
data.plot(x='f', y='sig', ax=ax)
return
def limit():
"""Using the HP 214B, see what the DIL is."""
fig, ax = plt.subplots()
fname = "1_lim_dye.txt"
folder = os.path.join("..", "2018-09-29")
fname = os.path.join(folder, fname)
limit_scan(fname, ax)
fname = "2_lim_dye.txt"
folder = os.path.join("..", "2018-09-29")
fname = os.path.join(folder, fname)
limit_scan(fname, ax)
return
def cauchy_model(x, a, loc, scale, y0):
return a*cauchy.pdf(x, loc, scale) + y0
def cauchy_fit(x, y, d):
if d is -1:
a0 = -(max(y) - min(y))*(max(x) - min(x))/10
loc0 = x[np.argmin(y)]
scale0 = (max(x) - min(x))/10
y00 = max(y)
elif d is 1:
a0 = (max(y) - min(y))*(max(x) - min(x))/10
loc0 = x[np.argmax(y)]
scale0 = (max(x) - min(x))/10
y00 = min(y)
else:
a0 = 1
loc0 = np.mean(x)
scale0 = (max(x) - min(x))/10
y00 = 1
p0 = [a0, loc0, scale0, y00]
print(p0)
popt, pcov = curve_fit(cauchy_model, x, y, p0)
print("Center Frequency is : ", popt[1]*1e-6, " MHz")
print("FWHM is : ", 2*popt[2]*1e-6, " MHz")
print("Q is : ", popt[1]/(2*popt[2]))
return popt
def mw_fscan(fname, d, ax, plotting=True):
data = pd.read_csv(fname, sep="\t", comment="#", index_col=False,
header=None, names=['f', 'b', 's', 'r'])
data.sort_values(by='f', inplace=True)
data['sig'] = data['s'] - data['b']
data['ref'] = data['r'] - data['b']
data['nrm'] = data['sig'] / data['ref'] # norm by signal / reference
data['nrm'] = data['nrm']
popt = cauchy_fit(data['f'].values, data['nrm'].values, d)
# print(popt)
if plotting is True:
data.plot(x='f', y='nrm', ax=ax)
ax.plot(data['f'].values, cauchy_model(data['f'].values, *popt))
ax.plot(data['f'].values,
data['nrm'].values - cauchy_model(data['f'].values, *popt))
return data
def cavity_resonances():
"""Using the dye laser at -180 GHz, the MW f is scanned over the
cavity resonances, finding center, FWHM, and Q values."""
fig, axes = plt.subplots()
folder = os.path.join("..", "2018-09-29")
fname = "3_fscan.txt"
fname = os.path.join(folder, fname)
mw_fscan(fname, -1, axes)
axes.axhline(0.9, c='k')
fig.tight_layout()
return
def mwion_scan():
"""Take ratios of MW on / MW off to get ionization rate at different values
of the Variable Attenuator"""
fig, ax = plt.subplots()
# Data from 2018-09-27, using the SFIP
fname = "4_mwion_blnk.txt" # -180 GHz
folder = os.path.join("..", "2018-09-29")
fname = os.path.join(folder, fname)
data = pd.read_csv(fname, sep="\t", comment="#")
data['r'] = data['s1']/data['s2']
data['f'] = np.power(10, data['d']/20) # field equivalent
data.sort_values(by='f', inplace=True)
data.plot(x='f', y='r', marker='v', ax=ax, label="-180 GHz")
return
if __name__ == "__main__":
# limit()
# cavity_resonances()
mwion_scan()
|
3,192 | 309807e04bfbf6c32b7105fe87d6ad1247ae411a | #
# PySNMP MIB module ADTRAN-ATLAS-HSSI-V35-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ADTRAN-ATLAS-HSSI-V35-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 16:59:09 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
adATLASModuleInfoFPStatus, = mibBuilder.importSymbols("ADTRAN-ATLAS-MODULE-MIB", "adATLASModuleInfoFPStatus")
adATLASUnitSlotAddress, adATLASUnitFPStatus, adATLASUnitPortAddress = mibBuilder.importSymbols("ADTRAN-ATLAS-UNIT-MIB", "adATLASUnitSlotAddress", "adATLASUnitFPStatus", "adATLASUnitPortAddress")
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "ValueSizeConstraint")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, Gauge32, Integer32, Counter64, IpAddress, ModuleIdentity, ObjectIdentity, iso, Unsigned32, Counter32, MibIdentifier, NotificationType, NotificationType, enterprises, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "Gauge32", "Integer32", "Counter64", "IpAddress", "ModuleIdentity", "ObjectIdentity", "iso", "Unsigned32", "Counter32", "MibIdentifier", "NotificationType", "NotificationType", "enterprises", "TimeTicks")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
adtran = MibIdentifier((1, 3, 6, 1, 4, 1, 664))
adMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 2))
adATLASmg = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 2, 154))
adGenATLASmg = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 2, 154, 1))
adATLASHSSIV35mg = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 2, 154, 1, 11))
adATLASHSSIV35IfceDeact = NotificationType((1, 3, 6, 1, 4, 1, 664, 2, 154) + (0,15401100)).setObjects(("IF-MIB", "ifIndex"), ("ADTRAN-ATLAS-UNIT-MIB", "adATLASUnitSlotAddress"), ("ADTRAN-ATLAS-UNIT-MIB", "adATLASUnitPortAddress"), ("ADTRAN-ATLAS-MODULE-MIB", "adATLASModuleInfoFPStatus"), ("ADTRAN-ATLAS-UNIT-MIB", "adATLASUnitFPStatus"))
adATLASHSSIV35IfceReact = NotificationType((1, 3, 6, 1, 4, 1, 664, 2, 154) + (0,15401101)).setObjects(("IF-MIB", "ifIndex"), ("ADTRAN-ATLAS-UNIT-MIB", "adATLASUnitSlotAddress"), ("ADTRAN-ATLAS-UNIT-MIB", "adATLASUnitPortAddress"), ("ADTRAN-ATLAS-MODULE-MIB", "adATLASModuleInfoFPStatus"), ("ADTRAN-ATLAS-UNIT-MIB", "adATLASUnitFPStatus"))
mibBuilder.exportSymbols("ADTRAN-ATLAS-HSSI-V35-MIB", adtran=adtran, adMgmt=adMgmt, adATLASHSSIV35IfceReact=adATLASHSSIV35IfceReact, adGenATLASmg=adGenATLASmg, adATLASmg=adATLASmg, adATLASHSSIV35IfceDeact=adATLASHSSIV35IfceDeact, adATLASHSSIV35mg=adATLASHSSIV35mg)
|
3,193 | 51b32972c97df50a45eb2b9ca58cdec0394e63ee | from fractions import Fraction as f
print f(49,98) * f(19, 95) * f(16, 64) * f(26, 65)
|
3,194 | 1630a3d0becac195feee95a1c3b23568612a48d2 | import preprocessing
import tokenization
import vectorspacemodel
import pickle
import collections
import os
import math
import operator
from itertools import islice
def take(n, iterable):
# "Return first n items of the iterable as a list"
return list(islice(iterable, n))
directory = os.getcwd()
links_path = os.path.join(directory, 'links')
# Getting Index from pickle dump
with open("D_INDEXED_FILE/index", 'rb') as f:
while True:
try:
index = pickle.load(f)
except EOFError:
break
inv_index = index
# Getting Document vectors from pickle dump
dv = {}
vec_files = [file for file in os.listdir("D_INDEXED_FILE/vectors/.") if file.endswith("vector")]
# x = index, y = filename
for x, y in enumerate(vec_files):
# Open all of the token lists
with open("D_INDEXED_FILE/vectors/" + y, 'rb') as ff:
while True:
try:
vector = pickle.load(ff)
except EOFError:
break
dv[y] = vector
# By here you will get all document vectors in dv variable
#print("Document vectors are: ", dv)
query = input("Enter the query: ")
query_vector = []
idf,terms = vectorspacemodel.get_idf(inv_index)
od = collections.OrderedDict(sorted(idf.items()))
#print("idf is: ", idf)
#print("terms are: ", terms)
processed_query = preprocessing.parse_query(query.lower())
#print("processed query is: ", processed_query)
tokenized_query = tokenization.query_tokenization(processed_query)
#print("tokenized query is: ", tokenized_query)
# This code makes the query vector and normalizes it
for x,y in enumerate((od.items())):
for i in tokenized_query.split():
if i == y[0]:
#print(y[1])
if [y[1],x] in query_vector:
query_vector.remove([y[1], x])
query_vector.append([y[1]+y[1],x])
else:
query_vector.append([y[1],x])
#print("Unnormalized query vector is: ", query_vector)
# Normalizing here
weight = 0.0
for i in range(len(query_vector)):
weight = weight + (query_vector[i][0] ** 2)
weight = math.sqrt(weight)
# print("weight is: ", weight)
for i in range(len(query_vector)):
query_vector[i][0] = query_vector[i][0] / weight
#print("the Normalized query vector is: ", query_vector)
# Calculate Similarity between query vector and all document vectors
similarity = {}
for k in dv.keys():
sim = float(0)
for i in range(len(query_vector)):
di = query_vector[i][1]
#import pdb; pdb.set_trace()
for j in range(len(dv[k])):
dj = dv[k][j][1]
if di == dj:
mul = query_vector[i][0] * dv[k][j][0]
sim += mul
#print (mul)
break
elif di < dj:
break
similarity[k] = sim
#print("document vector is: ", dv[k])
#print("query vector is: ", v1)
#print ("similarity is: ", sim)
#print(sim)
#print("cosine similarity is: ", similarity)
sorted_x = sorted(similarity.items(), key=operator.itemgetter(1), reverse=True)
#print("Sorted Cosine Similarity",sorted_x)
top_7 = take(7, sorted_x)
#print("Top 7 documents are: ", top_7)
# Getting the links file to match file with link
with open(links_path, 'rb') as f:
while True:
try:
web_links = pickle.load(f)
except EOFError:
break
#print("All the web links are: ", web_links)
#print("Top 10 documents are:\n ", ("\n".join(str(x[0][0:-7]) for x in top_5)).strip())
print("Our Search Results are: ")
for x in top_7:
#print("".join(str(x[0][0:-7])))
if x[1] == float(0):
print("No relevant documents found!")
break
else:
for j in web_links.keys():
if "".join(str(x[0][0:-7])) == j[0:-5]:
print(repr(web_links[j]).strip('\''))
# print("Total document vectors are: ", len(dv))
# print("Total unique terms for index are: ", len(inv_index))
# print("Total unique terms from terms are: ", len(terms))
# print("Toal unique terms from idf are: ", len(idf)) |
3,195 | ca3cdbd5d5d30be4f40925366994c3ea9d9b9614 | from django.db import models
from datetime import datetime
class Folder(models.Model):
folder = models.CharField(max_length=200, default = "misc")
num_of_entries = models.IntegerField(default=0)
def __str__(self):
return self.folder
class Meta:
verbose_name_plural = "Folders/Categories"
class Bookmark(models.Model):
name = models.CharField(max_length=200)
url = models.CharField(max_length=400)
folder = models.ForeignKey(Folder, on_delete=models.CASCADE)
date_of_creation = models.DateTimeField(default=datetime.now())
notes = models.TextField()
def __str__(self):
return self.name
|
3,196 | 8f554166c28fe4c9a093568a97d39b6ba515241b | # This implementation of EPG takes data as XML and produces corresponding pseudonymized data
from lxml import etree
from utils import generalize_or_supress
from hashlib import sha256
from count import getLast, saveCount
import pickle
from hmac import new
from random import random
from json import loads
from bigchain import putonBlockChain, findRecord
def EPGAinit(IDPath):
idt = open(IDPath,'rt').read()
Qti = etree.fromstring(idt)
print('Loading Identifiers')
print('Quasi Specifiers..')
print(', '.join(Qti.keys()))
print('Applying EPGAD_Init on Qti')
gQti = [generalize_or_supress(i[1],i[0]) for i in zip(Qti.keys(),Qti.values())]
hmacKey = ""
for i in gQti:
hmacKey+=i
Gi = sha256(hmacKey.encode()).hexdigest()
countObj = getLast(Gi)
GiObj = pickle.loads(countObj.GiObj)
if GiObj['cQueue'].empty():
if 'count' not in GiObj.keys():
GiObj['count'] = 0
count = 0
else:
GiObj['count']+=1
count = GiObj['count']
countObj.GiObj = pickle.dumps(GiObj)
saveCount(countObj)
prime = 179426549
if count >= prime:
raise Exception('Prime Exceeded')
else:
res = count**2%prime
if count <= prime/2:
GUi = res
else:
GUi = prime - res
Hi = new(Gi.encode() + str(GUi).encode() , hmacKey.encode() , sha256).hexdigest()
return Hi, GUi
def EPGAD(ReportPath, Hi=None, GUi = None):
if Hi == None:
Hi = sha256(str(random()).encode()).hexdigest()
jsn = open(ReportPath, 'rt').read()
jsnld = loads(jsn)
print('Report Loaded')
print('Finding Subject Information')
if 'subject' in jsnld.keys():
print('Subject Information Found')
if 'display' in jsnld['subject'].keys():
jsnld['subject']['display'] = ""
print('Subject Display Found and Suppressed')
if 'reference' in jsnld['subject'].keys():
jsnld['subject']['reference'] = Hi
print('Replacing Identifier with ', Hi)
print('Placing Record Asset on BlockChain')
print()
txid = putonBlockChain(jsnld,Hi, GUi)
print('Status OK. Retrieving Transaction')
findRecord(txid)
if __name__ == "__main__":
Hi, GUi = EPGAinit('sampleIdentity.xml')
EPGAD('sampleReport.json', Hi, GUi)
|
3,197 | ba7f66a0f9cf1028add778315033d596e10d6f16 | import numpy as np
import tensorflow as tf
x_data = np.random.rand(100)
y_data = x_data * 10 + 5
#构造线性模型
b = tf.Variable(0.)
k = tf.Variable(0.)
y=k*x_data+b
#二次代价函数 square求平方
loss= tf.reduce_mean(tf.square(y_data-y))
#定义一个梯度下降法来进行训练的优化器
optimizer=tf.train.GradientDescentOptimizer(.2)
train=optimizer.minimize(loss)
init=tf.global_variables_initializer()
with tf.Session() as ss:
ss.run(init)
for step in range(201):
ss.run(train)
if step %10==0:
print(step,ss.run([k,b]))
|
3,198 | ffd034eb5f0482c027dcc344bddb01b90249511c | import os
import io
import time
import multiprocessing as mp
from queue import Empty
import picamera
from PIL import Image
from http import server
import socketserver
import numpy as np
import cv2
class QueueOutputMJPEG(object):
def __init__(self, queue, finished):
self.queue = queue
self.finished = finished
self.stream = io.BytesIO()
def write(self, buf):
if buf.startswith(b'\xff\xd8'):
# New frame, put the last frame's data in the queue
size = self.stream.tell()
if size:
self.stream.seek(0)
if self.queue.empty():
self.queue.put(self.stream.read(size))
self.stream.seek(0)
self.stream.write(buf)
def flush(self):
self.queue.close()
self.queue.join_thread()
self.finished.set()
class QueueOutputH264(object):
def __init__(self, queue, finished):
self.queue = queue
self.finished = finished
self.stream = io.BytesIO()
def write(self, buf):
if True:
size = self.stream.tell()
if size:
self.stream.seek(0)
if self.queue.empty():
self.queue.put(self.stream.read(size))
self.stream.seek(0)
self.stream.write(buf)
def flush(self):
self.queue.close()
self.queue.join_thread()
self.finished.set()
def do_capture(queueH264, queueMJPEG, stopCap):
print('Capture started')
with picamera.PiCamera(sensor_mode=2) as camera:
camera.resolution=(1280, 720)
camera.framerate=15
camera.video_stabilization = True
camera.video_denoise = True
camera.vflip = True
camera.sharpness = 20
camera.meter_mode = 'matrix'
camera.awb_mode = 'auto'
camera.saturation = 2
camera.contrast = 10
camera.drc_strength = 'high'
camera.exposure_mode = 'antishake'
camera.exposure_compensation = 3
outputH264 = QueueOutputH264(queueH264, stopCap)
outputMJPEG = QueueOutputMJPEG(queueMJPEG, stopCap)
camera.start_recording(outputH264, format='h264', profile='high', intra_period=30, sps_timing=True, bitrate=4000000, quality=25, resize=(420,234))
camera.start_recording(outputMJPEG, splitter_port=2, format='mjpeg', resize=(672,384))
while not stopCap.wait(0): #camera.wait_recording(100)
pass
camera.stop_recording(splitter_port=2)
camera.stop_recording()
time.sleep(0.2)
camera.close()
def do_detection(ImageQueue, RectQueue, finished):
net = cv2.dnn.readNet('pedestrian-detection-adas-002.xml', 'pedestrian-detection-adas-002.bin')
net.setPreferableTarget(cv2.dnn.DNN_TARGET_MYRIAD)
st = time.monotonic()
cnt = 1
fps = 0
FutureOuts = []
ospid = os.getpid()
while not finished.wait(0):
stream = None
try:
stream = io.BytesIO(ImageQueue.get(False))
except:
pass
if len(FutureOuts) == 3:
stream = None
if not stream is None:
stream.seek(0)
try:
image = Image.open(stream).convert('RGB')
except:
pass
cv_img = np.array(image)
cv_img = cv_img[:, :, ::-1].copy()
blob = cv2.dnn.blobFromImage(cv_img, 1.0, size=(672,384),\
mean=(127.5, 127.5, 127.5), swapRB=False, crop=False)
net.setInput(blob)
FutureOuts.append(net.forwardAsync())
while FutureOuts and FutureOuts[0].wait_for(0):
out1 = FutureOuts[0].get()
if cnt >= 20:
fps = cnt/(time.monotonic() - st)
st = time.monotonic()
cnt = 1
print('%d: Detecting at %FPS' % (ospid, fps))
else:
cnt += 1
props = []
for detection in out1.reshape(-1,7):
inf = []
obj_type = int(detection[1]-1)
conf = float(detection[2])
xmin = float(detection[3])
ymin = float(detection[4])
xmax = float(detection[5])
ymax = float(detection[6])
if conf > 0.6:
prop = {'coord': (xmin, ymin, xmax, ymax), 'type': obj_type, 'conf': conf}
props.append(prop)
if RectQueue.empty():
RectQueue.put(props)
del FutureOuts[0]
class StreamingHandler(server.BaseHTTPRequestHandler):
def do_GET(self):
if '/data.html' in self.path:
strprops = "ffffd9"
if not self.server.DetectQueue.empty():
props = self.server.DetectQueue.get(False)
pcnt = 0
for prop in props:
strprops += 'Coord = ({0:4f}, {1:4f}, {2:4f}, {3:4f}. ID = {4:d}\n'.format(
prop['coord'][0], prop['coord'][1], prop['coord'][2], prop['coord'][3], pcnt)
pcnt += 1
strprops += "ffaaee"
content = strprops.encode('utf-8')
self.send_response(200)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(content))
self.end_headers()
self.wfile.write(content)
elif '/stream.mjpg' in self.path:
self.send_response(200)
self.send_header('Age', 0)
self.send_header('Cache-Control', 'no-cache, private')
self.send_header('Pragma', 'no-cache')
self.send_header('Content-Type', 'multipart/x-mixed-replace; boundary=FRAME')
self.end_headers()
while self.server.MJPEGQueue.empty():
pass
buf = io.BytesIO(self.server.MJPEGQueue.get())
try:
st = time.monotonic()
cnt = 1
fps = 0
ospid = os.getpid()
while True:
if not self.server.MJPEGQueue.empty():
buf = io.BytesIO(self.server.MJPEGQueue.get(False))
if cnt >= 20:
fps = cnt/(time.monotonic() - st)
st = time.monotonic()
cnt = 1
print('%d: Streaming MJPEG at %dFPS' % (ospid, fps))
else:
cnt += 1
self.wfile.write(b'--FRAME\r\n')
self.send_header('Content-Type', 'image/jpeg')
self.send_header('Content-Length', len(buf.getvalue()))
self.end_headers()
self.wfile.write(buf.getvalue())
self.wfile.write(b'\r\r')
except Exception as e:
print('Removed streaming clients from MJPEG %s: %s', self.client_address, str(e))
else:
#self.send_response(200)
#self.send_header('Age', 0)
#self.send_header('Cache-Control', 'no-cache, private')
#self.send_header('Pragma', 'no-cache')
#self.send_header('Content-Type', 'multipart/x-mixed-replace; boundary=FRAME')
#self.end_headers()
try:
st2 = time.monotonic()
cnt2 = 1
fps2 = 0
ospid2 = os.getpid()
while True:
if not self.server.H264Queue.empty():
frame = io.BytesIO(self.server.H264Queue.get(False))
buf = frame
if cnt2 >= 20:
fps2 = cnt2/(time.monotonic() - st2)
st2 = time.monotonic()
cnt2 = 1
print('%d: Streaming H264 at %dFPS' % (ospid2, fps2))
else:
cnt2 += 1
self.wfile.write(buf.getvalue())
#self.wfile.write(b'\r\r')
except Exception as e:
print('Removed streaming clients from H264 %s: %s', self.client_address, str(e))
# else:
# self.send_error(404)
# self.end_headers()
class StreamingServer(socketserver.ThreadingMixIn, server.HTTPServer):
allow_reuse_address = True
daemon_threads = True
def server_start(MJPEGQueue, H264Queue, DetectQueue, port, servstop):
try:
address = ('', port)
server = StreamingServer(address, StreamingHandler)
server.MJPEGQueue = MJPEGQueue
server.DetectQueue = DetectQueue
server.H264Queue = H264Queue
print('Started server')
server.serve_forever()
finally:
servstop.set()
if __name__ == '__main__':
queueH264 = mp.Queue(1)
queueMJPEG = mp.Queue(1)
queueDetectRect = mp.Queue(1)
stopCapture = mp.Event()
queueProcessedLow = mp.Queue(1)
queueProcessedHigh = mp.Queue(1)
ServerStop = mp.Event()
capture_proc = mp.Process(target=do_capture, args=(queueH264, queueMJPEG, stopCapture), daemon=True)
server_proc = mp.Process(target=server_start, args=(queueMJPEG, queueH264, queueDetectRect, 8000, stopCapture), daemon=True)
detect_proc = mp.Process(target=do_detection, args=(queueMJPEG, queueDetectRect, stopCapture), daemon=True)
capture_proc.start()
detect_proc.start()
server_proc.start()
while True:
if stopCapture.is_set():
stopCapture.set()
time.sleep(0.1)
capture_proc.terminate()
server_proc.terminate()
detect_proc.terminate()
proccessing_proc_lores.terminate()
break
time.sleep(1)
|
3,199 | 1cf5ce11b965d65426ed421ef369954c59d7eba9 | # Generated by Django 3.2.4 on 2021-06-29 13:20
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('blog', '0002_blogdetail'),
]
operations = [
migrations.RenameField(
model_name='bloglist',
old_name='about',
new_name='intro',
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.