text stringlengths 38 1.54M |
|---|
from django.shortcuts import redirect
from django.urls import reverse
class UpdateProfileMiddleware:
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
user = request.user
if not user.is_anonymous:
profile = user.profile
if not profile:
return redirect('user_login')
if not profile.picture or not profile.biography:
if request.path not in (reverse('user_profile'), reverse('user_logout')):
return redirect('user_profile')
return self.get_response(request)
|
from model import *
epochs = 2
batch_size = 128
history = model.fit(x_train, y_train, epochs=1, batch_size= batch_size,validation_split=0.1)
model.save('AttentionX.h5') |
# -*- coding:utf-8 -*-
import torch
import torch.nn as nn
import torch.nn.init as init
import torch.nn.functional as F
from torchvision.transforms.functional import normalize
import os
from .resnet_market1501 import resnet50
import sys
# ReID Loss
class ReIDLoss(nn.Module):
def __init__(self, model_path, num_classes=1501, size=(384, 128), gpu_ids=None, margin=0.3,is_trainable=False):
super(ReIDLoss, self).__init__()
self.size = size
self.gpu_ids = gpu_ids
model_structure = resnet50(num_features=256, dropout=0.5, num_classes=num_classes, cut_at_pooling=False,
FCN=True)
# load checkpoint
if self.gpu_ids is None:
checkpoint = torch.load(model_path, map_location=lambda storage, loc: storage)
else:
checkpoint = torch.load(model_path)
self.margin = margin
if self.margin is not None:
self.ranking_loss = nn.MarginRankingLoss(margin=margin)
else:
raise ValueError('self.margin is None!')
model_dict = model_structure.state_dict()
checkpoint_load = {k: v for k, v in (checkpoint['state_dict']).items() if k in model_dict}
model_dict.update(checkpoint_load)
model_structure.load_state_dict(model_dict)
self.model = model_structure
#self.model.eval()
if gpu_ids is not None:
self.model.cuda()
self.is_trainable = is_trainable
for param in self.model.parameters():
param.requires_grad = self.is_trainable
self.triple_feature_loss = nn.L1Loss()
self.softmax_feature_loss = nn.BCELoss()
self.normalize_mean = torch.Tensor([0.485, 0.456, 0.406])
self.normalize_mean = self.normalize_mean.expand(384, 128, 3).permute(2, 0, 1) # 调整为通道在前
self.normalize_std = torch.Tensor([0.229, 0.224, 0.225])
self.normalize_std = self.normalize_std.expand(384, 128, 3).permute(2, 0, 1) # 调整为通道在前
if gpu_ids is not None:
self.normalize_std = self.normalize_std.cuda()
self.normalize_mean = self.normalize_mean.cuda()
def extract_feature(self, inputs):
# 2048*6+256*6
out = self.model(inputs)
o1 = out[0].view(out[0].size(0), -1)
o1 = o1 / o1.norm(2, 1, keepdim=True).expand_as(o1)
o2 = out[2].view(out[2].size(0), -1)
o2 = o2 / o2.norm(2, 1, keepdim=True).expand_as(o2)
#feature_tri = torch.cat((o1,o2),dim=1)
#feature_tri = feature_tri / feature_tri.norm(2, 1, keepdim=True).expand_as(feature_tri)
feature_tri = torch.cat((o1,o2),dim=1)
return feature_tri
def preprocess(self, data):
"""
the input image is normalized in [-1, 1] and in bgr format, should be changed to the format accecpted by model
:param data:
:return:
"""
data_unnorm = data / 2.0 + 0.5
permute = [2, 1, 0]
data_rgb_unnorm = data_unnorm[:, permute]
data_rgb_unnorm = F.upsample(data_rgb_unnorm, size=self.size, mode='bilinear')
data_rgb = (data_rgb_unnorm - self.normalize_mean) / self.normalize_std
return data_rgb
# label 就是原始图
# data 是生成图
# targets 是pids
def forward(self, data, label, targets):
assert label.requires_grad is False
data = self.preprocess(data)
label = self.preprocess(label)
feature_tri_data = self.extract_feature(data)
feature_tri_label = self.extract_feature(label)
# avoid bugs
feature_tri_label.detach_()
feature_tri_label.requires_grad = False
return self.triple_feature_loss(feature_tri_data, feature_tri_label),\
torch.Tensor([0]).cuda(),\
torch.Tensor([0]).cuda(),\
torch.Tensor([0]).cuda(),\
torch.Tensor([0]).cuda(),\
self.uvmap_l2_loss(feature_tri_data,targets)
def uvmap_l2_loss(self,feature_tri_data,targets):
dist_mat = self.euclidean_dist(feature_tri_data, feature_tri_data)
N = dist_mat.size(0)
is_pos = targets.expand(N, N).eq(targets.expand(N, N).t())
is_pos = is_pos.type(torch.FloatTensor)
is_pos = is_pos.cuda()
dist_mat = dist_mat.cuda()
return torch.sum(dist_mat * is_pos)
def euclidean_dist(self,x, y):
# 矩阵运算直接得出欧几里得距离
"""
Args:
x: pytorch Variable, with shape [m, d]
y: pytorch Variable, with shape [n, d]
Returns:
dist: pytorch Variable, with shape [m, n]
"""
m, n = x.size(0), y.size(0)
xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n)
yy = torch.pow(y, 2).sum(1, keepdim=True).expand(n, m).t()
dist = xx + yy
dist.addmm_(1, -2, x, y.t())
dist = dist.clamp(min=1e-12).sqrt() # for numerical stability
return dist
def hard_example_mining(self,dist_mat, labels, return_inds=False):
"""For each anchor, find the hardest positive and negative sample.
Args:
dist_mat: pytorch Variable, pair wise distance between samples, shape [N, N]
labels: pytorch LongTensor, with shape [N]
return_inds: whether to return the indices. Save time if `False`(?)
Returns:
dist_ap: pytorch Variable, distance(anchor, positive); shape [N]
dist_an: pytorch Variable, distance(anchor, negative); shape [N]
p_inds: pytorch LongTensor, with shape [N];
indices of selected hard positive samples; 0 <= p_inds[i] <= N - 1
n_inds: pytorch LongTensor, with shape [N];
indices of selected hard negative samples; 0 <= n_inds[i] <= N - 1
NOTE: Only consider the case in which all labels have same num of samples,
thus we can cope with all anchors in parallel.
"""
assert len(dist_mat.size()) == 2
assert dist_mat.size(0) == dist_mat.size(1)
N = dist_mat.size(0)
# shape [N, N]
is_pos = labels.expand(N, N).eq(labels.expand(N, N).t())
is_neg = labels.expand(N, N).ne(labels.expand(N, N).t())
# `dist_ap` means distance(anchor, positive)
# both `dist_ap` and `relative_p_inds` with shape [N, 1]
dist_ap, relative_p_inds = torch.max(
dist_mat[is_pos].contiguous().view(N, -1), 1, keepdim=True)
# `dist_an` means distance(anchor, negative)
# both `dist_an` and `relative_n_inds` with shape [N, 1]
dist_an, relative_n_inds = torch.min(
dist_mat[is_neg].contiguous().view(N, -1), 1, keepdim=True)
# shape [N]
dist_ap = dist_ap.squeeze(1)
dist_an = dist_an.squeeze(1)
if return_inds:
# shape [N, N]
ind = (labels.new().resize_as_(labels)
.copy_(torch.arange(0, N).long())
.unsqueeze(0).expand(N, N))
# shape [N, 1]
p_inds = torch.gather(
ind[is_pos].contiguous().view(N, -1), 1, relative_p_inds.data)
n_inds = torch.gather(
ind[is_neg].contiguous().view(N, -1), 1, relative_n_inds.data)
# shape [N]
p_inds = p_inds.squeeze(1)
n_inds = n_inds.squeeze(1)
return dist_ap, dist_an, p_inds, n_inds
return dist_ap, dist_an
def triplet_hard_Loss(self,global_feat,feature_tri_label,labels):
"""Modified from Tong Xiao's open-reid (https://github.com/Cysu/open-reid).
Related Triplet Loss theory can be found in paper 'In Defense of the Triplet
Loss for Person Re-Identification'."""
# no normalize
dist_mat = self.euclidean_dist(global_feat, feature_tri_label)
dist_ap, dist_an = self.hard_example_mining(
dist_mat, labels)
y = dist_an.new().resize_as_(dist_an).fill_(1)
loss = self.ranking_loss(dist_an, dist_ap, y)
return loss
def triplet_Loss(self,global_feat,feature_tri_label,labels):
"""Modified from Tong Xiao's open-reid (https://github.com/Cysu/open-reid).
Related Triplet Loss theory can be found in paper 'In Defense of the Triplet
Loss for Person Re-Identification'."""
# no normalize
dist_mat = self.euclidean_dist(global_feat, feature_tri_label)
dist_ap = torch.diagonal(dist_mat) # 正例距离选择生成图特征和对应的原始图特征
_, dist_an = self.hard_example_mining(
dist_mat, labels)
y = dist_an.new().resize_as_(dist_an).fill_(1)
loss = self.ranking_loss(dist_an, dist_ap, y)
return loss
if __name__ == '__main__':
import cv2
from torchvision import transforms as T
trans = T.Compose([
# T.ColorJitter(brightness=0.1, contrast=0.1, saturation=0.1, hue=0),
T.Resize((384, 128)),
T.ToTensor(),
T.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
img1 = cv2.imread('/home/wangjian02/Projects/TextureGAN/tmp/test_img/in/0112_c1s1_019001_00.jpg')
img1 = (img1 / 255. - 0.5) * 2.0
img1 = torch.from_numpy(img1).permute(2, 0, 1).float()
img1 = img1.unsqueeze(0)
img1.requires_grad = True
img2 = cv2.imread('/home/wangjian02/Projects/TextureGAN/tmp/test_img/out_render_prw/0112_c1s1_019001_00.jpg')
img2 = (img2 / 255. - 0.5) * 2.0
img2 = torch.from_numpy(img2).permute(2, 0, 1).float()
img2 = img2.unsqueeze(0)
loss = ReIDLoss(model_path='/home/wangjian02/Projects/pcb_market1501_best/checkpoint_120.pth.tar')
l = loss(img1, img2)
l.backward()
print(l)
|
from graphics import *
## creates a polygon
## list of points = points=[point( ), point( )]
## witch = Polygon(points)
## accumulation and know pattern and loop and get list bigger
##nums = [] is a list
## for i ranget(4)
## val=eval(input("num? "))
## nums.append(val)?
## points[]= empty list
## for i in range(10):
## clickPt=win.getMouse()
## points.append(clickPt)
## witch=Polygon(points)
## witch.draw(win)
def polygon():
Width=400
Height=500
win=GraphWin("Points",Width, Height)
points=[]
for i in range(5):
clickPt=win.getMouse()
clickPt.draw(win)
points.append(clickPt)
print("Point "+"(" + str(clickPt.getX())+"," + str(clickPt.getY())+")")
shape=Polygon(points)
shape.draw(win)
win.getMouse()
win.close()
def face():
win=GraphWIn("face",200,200)
winWidth=win.getWidth()
winHeight=win.getHeight()
##alias= refers another way to refer to something in memory
##leftEye=Circle(Point(70,70),15)
##rightEye=leftEye : wrong
##whatever happens to alias happens to orginal
##rightEye=leftEye.clone(): correct
#clone creates a replica but new memory thingie so you dont change for ex left eye
#clones are not drawn you have to draw them
#rightEye.move(60,0)
##leftEye.draw(win)
|
import cv2
import numpy as np
import matplotlib.pyplot as plt
url1='/home/subhankar/subhankar_110118084.jpeg'
url='/home/subhankar/plane.jpeg'
image=cv2.imread(url,cv2.IMREAD_GRAYSCALE)
#plt.imshow(image)
#plt.show()
image_BGR=cv2.imread(url,cv2.IMREAD_COLOR)
image_RGB=cv2.cvtColor(image_BGR,cv2.COLOR_BGR2RGB)
#Resize image
image_size=cv2.resize(image_RGB,(100,100))
#Bluring Image
image_blur=cv2.blur(image_RGB,(100,100))
#Kernel of the image
kernel=np.array([[0,-1,0],
[-1,5,-1],
[0,-1,0]])
#print(kernel)
print("Hello")
#Enhance contrast
image_kernel=cv2.filter2D(image,-1,kernel)
image_yuv=cv2.cvtColor(image_BGR,cv2.COLOR_BGR2YUV)
image_yuv[:,:,0] = cv2.equalizeHist(image_yuv[:,:,0])
image_enhance_contrast=cv2.cvtColor(image_yuv,cv2.COLOR_YUV2BGR)
#Differentiating the colors in image
image_HSV=cv2.cvtColor(image_BGR,cv2.COLOR_BGR2HSV)
lower_blue=np.array([50,100,50])
upper_blue=np.array([130,255,255])
mask=cv2.inRange(image_HSV,lower_blue,upper_blue)
image_bgr_mask=cv2.bitwise_and(image_BGR,image_BGR,mask=mask)
image_rbg_mask=cv2.cvtColor(image_bgr_mask,cv2.COLOR_BGR2RGB)
#image Binaryrization
max_value=255
max_neighbor=99
subtract_from_mean=10
image_binarization=cv2.adaptiveThreshold(image,max_value,cv2.ADAPTIVE_THRESH_GAUSSIAN_C,cv2.THRESH_BINARY,max_neighbor,subtract_from_mean)
#Remove Background
#rectangle=(0,56,256,150)
#mask=np.zeros(image_RGB.shape[:2],np.uint8)
#bgd_model=np.zeros((1,65),np.float64)
#fgd_model=np.zeros((1,65),np.uint8)
##cv2.grabCut(image_RGB,mask,rectangle,bgd_model,fgd_model,5,cv2.GC_INIT_WITH_RECT)
#mask_2=np.where((mask==2) | (mask==0),0,1).astype('uint8')
##image_rgb_nobg=img_RGB * mask_2[:,:,np.newaxis]
#Detecting Edges
median_intensity=np.median(image)
lower_threshold=int(max(0,(1.0-0.33)*median_intensity))
upper_threshold=int(max(255,(1.0+0.33)*median_intensity))
image_Canny=cv2.Canny(image,lower_threshold,upper_threshold)
#Detecting Corners
image=np.float32(image)
block_size=2
aperture=29
free_parameter=0.04
detector_response=cv2.cornerHarris(image,block_size,aperture,free_parameter)
detector_response=cv2.dilate(detector_response,None)
threshold=0.02
image_BGR[detector_response > detector_response.max()*threshold]=[255,255,255]
image_gray=cv2.cvtColor(image_BGR,cv2.COLOR_BGR2GRAY)
#good Features to Track
corners_to_detect=10
min_quality_score=0.05
min_distance=25
corners=cv2.goodFeaturesToTrack(image,corners_to_detect,min_quality_score,min_distance)
corners=np.float32(corners)
for corner in corners:
x,y=corner[0]
cv2.circle(image_BGR,(x,y),25,(255,255,255),-1)
image_gray=cv2.cvtColor(image_BGR,cv2.COLOR_BGR2GRAY)
#ploting the image
plt.imshow(image_gray,cmap="gray"),plt.xticks([]),plt.yticks([])
plt.show()
|
import sys
#from pathlib import Path
import lib.Assets as Assets
import lib.Character as Character
import lib.PurchaseClones as PurchaseClones
import lib.Challenge as Challenge
# Purchase a clone using Ovid's SVO
clone = PurchaseClones.PurchaseClones
objs_returned = clone().purchase_clone(5)
# using Python's chaining methods
clone2 = PurchaseClones.PurchaseClones
objs_returned = clone2().purchase_clone2(-1)
# Use accept a Challenge to arm wrestle using Ovid's SVO
character = Character.Character
challenge = Challenge.Challenge
objs_returned = challenge().arm_wrestling_challenge(character,-1)
|
from google.appengine.api import users
from google.appengine.ext import db
from models.city import City
from models.company import Company
from models.show import Show
from models.venue import Venue
from models.performance import Performance
import unittest, datetime, random
from resources.webtest import TestApp
from handlers.public import main as public_main
from handlers.admin import main as admin_main
from resources.stubout import StubOutForTesting
from resources.mox import Mox
class ExtendedTestCase(unittest.TestCase):
public_app = TestApp(public_main.createApp())
admin_app = TestApp(admin_main.createApp())
_login_stubs = StubOutForTesting()
stubs = StubOutForTesting()
def random(self):
import hashlib, time
return hashlib.md5((time.clock()*random.random()).__str__()).hexdigest()
def setUp(self):
self.make_test_data()
self.login()
self.mox = Mox()
def tearDown(self):
self.logout()
self.stubs.UnsetAll()
for model in [City, Company, Performance, Show, Venue]:
for datum in model.all():
datum.delete()
def login(self, user="sudhir.j@gmail.com", admin=True):
self._login_stubs.Set(users, 'get_current_user', lambda user = user : users.User(user))
self._login_stubs.Set(users, 'is_current_user_admin', lambda admin = admin : admin)
def logout(self):
self._login_stubs.UnsetAll()
def make_test_data(self):
now = datetime.datetime.now()
self.now = datetime.datetime(year = now.date().year, month = now.date().month, day = now.date().day, hour = now.time().hour, minute = now.time().minute)
self.one_day_later = self.now + datetime.timedelta(days=1)
self.two_days_later = self.now + datetime.timedelta(days=2)
self.three_days_later = self.now + datetime.timedelta(days=3)
self.evam = Company(name='Evam Theatre Company', url='evam')
self.evam.put()
self.hamlet = Show(name='Hamlet', url='hamlet', company=self.evam)
self.hamlet.put()
self.chennai = City(name='Chennai', url='chennai')
self.chennai.put()
self.bangalore = City(name='Bangalore', url='bangalore')
self.bangalore.put()
self.lady_andal = Venue(name='Lady Andal', url='lady_andal', city=self.chennai)
self.lady_andal.put()
def make_performance(self, show, venue, dt):
perf = Performance(show=show, venue=venue, utc_date_time=dt)
perf.put()
return perf
|
## Ch04 SC25
RATE = 5.0
INITIAL_BALANCE = 10000.0
numYears = int(input("Enter number of years: "))
balance = INITIAL_BALANCE
year = 1
while year <= numYears:
interest = balance * RATE / 100
balance = balance + interest
print("%4d %10.2f" % (year, balance))
year += 1 |
from typing import TYPE_CHECKING, Optional
from PyQt5.QtWidgets import QLabel, QVBoxLayout, QGridLayout, QPushButton, QComboBox, QLineEdit, QSpacerItem, QWidget, QHBoxLayout
from electrum.i18n import _
from electrum.transaction import PartialTxOutput, PartialTransaction
from electrum.lnutil import MIN_FUNDING_SAT
from electrum.lnworker import hardcoded_trampoline_nodes
from electrum import ecc
from electrum.util import NotEnoughFunds, NoDynamicFeeEstimates
from electrum.gui import messages
from . import util
from .util import (WindowModalDialog, Buttons, OkButton, CancelButton,
EnterButton, ColorScheme, WWLabel, read_QIcon, IconLabel,
char_width_in_lineedit)
from .amountedit import BTCAmountEdit
from .my_treeview import create_toolbar_with_menu
if TYPE_CHECKING:
from .main_window import ElectrumWindow
class NewChannelDialog(WindowModalDialog):
def __init__(self, window: 'ElectrumWindow', amount_sat: Optional[int] = None, min_amount_sat: Optional[int] = None):
WindowModalDialog.__init__(self, window, _('Open Channel'))
self.window = window
self.network = window.network
self.config = window.config
self.lnworker = self.window.wallet.lnworker
self.trampolines = hardcoded_trampoline_nodes()
self.trampoline_names = list(self.trampolines.keys())
self.min_amount_sat = min_amount_sat or MIN_FUNDING_SAT
vbox = QVBoxLayout(self)
toolbar, menu = create_toolbar_with_menu(self.config, '')
recov_tooltip = messages.to_rtf(messages.MSG_RECOVERABLE_CHANNELS)
menu.addConfig(
_("Create recoverable channels"), self.config.cv.LIGHTNING_USE_RECOVERABLE_CHANNELS,
tooltip=recov_tooltip,
).setEnabled(self.lnworker.can_have_recoverable_channels())
vbox.addLayout(toolbar)
msg = _('Choose a remote node and an amount to fund the channel.')
msg += '\n' + _('You need to put at least') + ': ' + self.window.format_amount_and_units(self.min_amount_sat)
vbox.addWidget(WWLabel(msg))
if self.network.channel_db:
vbox.addWidget(QLabel(_('Enter Remote Node ID or connection string or invoice')))
self.remote_nodeid = QLineEdit()
self.remote_nodeid.setMinimumWidth(700)
self.suggest_button = QPushButton(self, text=_('Suggest Peer'))
self.suggest_button.clicked.connect(self.on_suggest)
else:
self.trampoline_combo = QComboBox()
self.trampoline_combo.addItems(self.trampoline_names)
self.trampoline_combo.setCurrentIndex(1)
self.amount_e = BTCAmountEdit(self.window.get_decimal_point)
self.amount_e.setAmount(amount_sat)
btn_width = 10 * char_width_in_lineedit()
self.min_button = EnterButton(_("Min"), self.spend_min)
self.min_button.setEnabled(bool(self.min_amount_sat))
self.min_button.setFixedWidth(btn_width)
self.max_button = EnterButton(_("Max"), self.spend_max)
self.max_button.setFixedWidth(btn_width)
self.max_button.setCheckable(True)
self.clear_button = QPushButton(self, text=_('Clear'))
self.clear_button.clicked.connect(self.on_clear)
self.clear_button.setFixedWidth(btn_width)
h = QGridLayout()
if self.network.channel_db:
h.addWidget(QLabel(_('Remote Node ID')), 0, 0)
h.addWidget(self.remote_nodeid, 0, 1, 1, 4)
h.addWidget(self.suggest_button, 0, 5)
else:
h.addWidget(QLabel(_('Remote Node')), 0, 0)
h.addWidget(self.trampoline_combo, 0, 1, 1, 4)
h.addWidget(QLabel('Amount'), 2, 0)
amt_hbox = QHBoxLayout()
amt_hbox.setContentsMargins(0, 0, 0, 0)
amt_hbox.addWidget(self.amount_e)
amt_hbox.addWidget(self.min_button)
amt_hbox.addWidget(self.max_button)
amt_hbox.addWidget(self.clear_button)
amt_hbox.addStretch()
h.addLayout(amt_hbox, 2, 1, 1, 4)
vbox.addLayout(h)
vbox.addStretch()
ok_button = OkButton(self)
ok_button.setDefault(True)
vbox.addLayout(Buttons(CancelButton(self), ok_button))
def on_suggest(self):
self.network.start_gossip()
nodeid = (self.lnworker.suggest_peer() or b"").hex()
if not nodeid:
self.remote_nodeid.setText("")
self.remote_nodeid.setPlaceholderText(
"Please wait until the graph is synchronized to 30%, and then try again.")
else:
self.remote_nodeid.setText(nodeid)
self.remote_nodeid.repaint() # macOS hack for #6269
def on_clear(self):
self.amount_e.setText('')
self.amount_e.setFrozen(False)
self.amount_e.repaint() # macOS hack for #6269
if self.network.channel_db:
self.remote_nodeid.setText('')
self.remote_nodeid.repaint() # macOS hack for #6269
self.max_button.setChecked(False)
self.max_button.repaint() # macOS hack for #6269
def spend_min(self):
self.max_button.setChecked(False)
self.amount_e.setAmount(self.min_amount_sat)
def spend_max(self):
self.amount_e.setFrozen(self.max_button.isChecked())
if not self.max_button.isChecked():
return
dummy_nodeid = ecc.GENERATOR.get_public_key_bytes(compressed=True)
make_tx = self.window.mktx_for_open_channel(funding_sat='!', node_id=dummy_nodeid)
try:
tx = make_tx(None)
except (NotEnoughFunds, NoDynamicFeeEstimates) as e:
self.max_button.setChecked(False)
self.amount_e.setFrozen(False)
self.window.show_error(str(e))
return
amount = tx.output_value()
amount = min(amount, self.config.LIGHTNING_MAX_FUNDING_SAT)
self.amount_e.setAmount(amount)
def run(self):
if not self.exec_():
return
if self.max_button.isChecked() and self.amount_e.get_amount() < self.config.LIGHTNING_MAX_FUNDING_SAT:
# if 'max' enabled and amount is strictly less than max allowed,
# that means we have fewer coins than max allowed, and hence we can
# spend all coins
funding_sat = '!'
else:
funding_sat = self.amount_e.get_amount()
if not funding_sat:
return
if funding_sat != '!':
if self.min_amount_sat and funding_sat < self.min_amount_sat:
self.window.show_error(_('Amount too low'))
return
if self.network.channel_db:
connect_str = str(self.remote_nodeid.text()).strip()
else:
name = self.trampoline_names[self.trampoline_combo.currentIndex()]
connect_str = str(self.trampolines[name])
if not connect_str:
return
self.window.open_channel(connect_str, funding_sat, 0)
return True
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'suivitvictimesinistre.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_VictimesSinistre(object):
def setupUi(self, VictimesSinistre):
VictimesSinistre.setObjectName(_fromUtf8("VictimesSinistre"))
VictimesSinistre.setWindowModality(QtCore.Qt.ApplicationModal)
VictimesSinistre.resize(1088, 736)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial Narrow"))
VictimesSinistre.setFont(font)
VictimesSinistre.setUnifiedTitleAndToolBarOnMac(False)
self.centralwidget = QtGui.QWidget(VictimesSinistre)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.horizontalLayout_4 = QtGui.QHBoxLayout(self.centralwidget)
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.labelVictimesEnregistrees = QtGui.QLabel(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(15)
font.setBold(True)
font.setWeight(75)
self.labelVictimesEnregistrees.setFont(font)
self.labelVictimesEnregistrees.setAlignment(QtCore.Qt.AlignCenter)
self.labelVictimesEnregistrees.setObjectName(_fromUtf8("labelVictimesEnregistrees"))
self.verticalLayout.addWidget(self.labelVictimesEnregistrees)
self.victimesEnregistreesListView = QtGui.QListView(self.centralwidget)
self.victimesEnregistreesListView.setMinimumSize(QtCore.QSize(200, 550))
self.victimesEnregistreesListView.setMaximumSize(QtCore.QSize(250, 600))
self.victimesEnregistreesListView.setAlternatingRowColors(True)
self.victimesEnregistreesListView.setObjectName(_fromUtf8("victimesEnregistreesListView"))
self.verticalLayout.addWidget(self.victimesEnregistreesListView)
self.horizontalLayout_4.addLayout(self.verticalLayout)
self.tabWidget = QtGui.QTabWidget(self.centralwidget)
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.tab_id_victime = QtGui.QWidget()
self.tab_id_victime.setObjectName(_fromUtf8("tab_id_victime"))
self.gridLayout_3 = QtGui.QGridLayout(self.tab_id_victime)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.frame_8 = QtGui.QFrame(self.tab_id_victime)
self.frame_8.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_8.setFrameShadow(QtGui.QFrame.Raised)
self.frame_8.setObjectName(_fromUtf8("frame_8"))
self.gridLayout_5 = QtGui.QGridLayout(self.frame_8)
self.gridLayout_5.setObjectName(_fromUtf8("gridLayout_5"))
self.labelNumId = QtGui.QLabel(self.frame_8)
self.labelNumId.setObjectName(_fromUtf8("labelNumId"))
self.gridLayout_5.addWidget(self.labelNumId, 0, 0, 1, 1)
self.numIdComboBox = QtGui.QComboBox(self.frame_8)
self.numIdComboBox.setObjectName(_fromUtf8("numIdComboBox"))
self.gridLayout_5.addWidget(self.numIdComboBox, 0, 1, 1, 1)
self.labelPrenom = QtGui.QLabel(self.frame_8)
self.labelPrenom.setObjectName(_fromUtf8("labelPrenom"))
self.gridLayout_5.addWidget(self.labelPrenom, 1, 0, 1, 1)
self.prenomComboBox = QtGui.QComboBox(self.frame_8)
self.prenomComboBox.setObjectName(_fromUtf8("prenomComboBox"))
self.gridLayout_5.addWidget(self.prenomComboBox, 1, 1, 1, 1)
self.labelNom = QtGui.QLabel(self.frame_8)
self.labelNom.setObjectName(_fromUtf8("labelNom"))
self.gridLayout_5.addWidget(self.labelNom, 2, 0, 1, 1)
self.nomComboBox = QtGui.QComboBox(self.frame_8)
self.nomComboBox.setObjectName(_fromUtf8("nomComboBox"))
self.gridLayout_5.addWidget(self.nomComboBox, 2, 1, 1, 1)
self.labelProfession = QtGui.QLabel(self.frame_8)
self.labelProfession.setObjectName(_fromUtf8("labelProfession"))
self.gridLayout_5.addWidget(self.labelProfession, 3, 0, 1, 1)
self.professionComboBox = QtGui.QComboBox(self.frame_8)
self.professionComboBox.setObjectName(_fromUtf8("professionComboBox"))
self.gridLayout_5.addWidget(self.professionComboBox, 3, 1, 1, 1)
self.labelCivilite = QtGui.QLabel(self.frame_8)
self.labelCivilite.setObjectName(_fromUtf8("labelCivilite"))
self.gridLayout_5.addWidget(self.labelCivilite, 4, 0, 1, 1)
self.civiliteComboBox = QtGui.QComboBox(self.frame_8)
self.civiliteComboBox.setObjectName(_fromUtf8("civiliteComboBox"))
self.gridLayout_5.addWidget(self.civiliteComboBox, 4, 1, 1, 1)
self.labelLocalisation = QtGui.QLabel(self.frame_8)
self.labelLocalisation.setObjectName(_fromUtf8("labelLocalisation"))
self.gridLayout_5.addWidget(self.labelLocalisation, 5, 0, 1, 1)
self.localisationComboBox = QtGui.QComboBox(self.frame_8)
self.localisationComboBox.setObjectName(_fromUtf8("localisationComboBox"))
self.gridLayout_5.addWidget(self.localisationComboBox, 5, 1, 1, 1)
self.labelBP = QtGui.QLabel(self.frame_8)
self.labelBP.setObjectName(_fromUtf8("labelBP"))
self.gridLayout_5.addWidget(self.labelBP, 6, 0, 1, 1)
self.bpLineEdit = QtGui.QLineEdit(self.frame_8)
self.bpLineEdit.setObjectName(_fromUtf8("bpLineEdit"))
self.gridLayout_5.addWidget(self.bpLineEdit, 6, 1, 1, 1)
self.labelVille = QtGui.QLabel(self.frame_8)
self.labelVille.setObjectName(_fromUtf8("labelVille"))
self.gridLayout_5.addWidget(self.labelVille, 7, 0, 1, 1)
self.villeComboBox = QtGui.QComboBox(self.frame_8)
self.villeComboBox.setObjectName(_fromUtf8("villeComboBox"))
self.gridLayout_5.addWidget(self.villeComboBox, 7, 1, 1, 1)
self.labelAdresse = QtGui.QLabel(self.frame_8)
self.labelAdresse.setObjectName(_fromUtf8("labelAdresse"))
self.gridLayout_5.addWidget(self.labelAdresse, 8, 0, 1, 1)
self.adressLineEdit = QtGui.QLineEdit(self.frame_8)
self.adressLineEdit.setObjectName(_fromUtf8("adressLineEdit"))
self.gridLayout_5.addWidget(self.adressLineEdit, 8, 1, 1, 1)
self.labelSociete = QtGui.QLabel(self.frame_8)
self.labelSociete.setObjectName(_fromUtf8("labelSociete"))
self.gridLayout_5.addWidget(self.labelSociete, 9, 0, 1, 1)
self.steLineEdit = QtGui.QLineEdit(self.frame_8)
self.steLineEdit.setObjectName(_fromUtf8("steLineEdit"))
self.gridLayout_5.addWidget(self.steLineEdit, 9, 1, 1, 1)
self.labelPoste = QtGui.QLabel(self.frame_8)
self.labelPoste.setObjectName(_fromUtf8("labelPoste"))
self.gridLayout_5.addWidget(self.labelPoste, 10, 0, 1, 1)
self.posteLineEdit = QtGui.QLineEdit(self.frame_8)
self.posteLineEdit.setObjectName(_fromUtf8("posteLineEdit"))
self.gridLayout_5.addWidget(self.posteLineEdit, 10, 1, 1, 1)
self.labelDepartement = QtGui.QLabel(self.frame_8)
self.labelDepartement.setObjectName(_fromUtf8("labelDepartement"))
self.gridLayout_5.addWidget(self.labelDepartement, 11, 0, 1, 1)
self.deptComboBox = QtGui.QComboBox(self.frame_8)
self.deptComboBox.setObjectName(_fromUtf8("deptComboBox"))
self.gridLayout_5.addWidget(self.deptComboBox, 11, 1, 1, 1)
self.gridLayout_3.addWidget(self.frame_8, 0, 0, 2, 1)
self.verticalLayout_5 = QtGui.QVBoxLayout()
self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5"))
self.frame_7 = QtGui.QFrame(self.tab_id_victime)
self.frame_7.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_7.setFrameShadow(QtGui.QFrame.Raised)
self.frame_7.setObjectName(_fromUtf8("frame_7"))
self.horizontalLayout_3 = QtGui.QHBoxLayout(self.frame_7)
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.gridLayout_9 = QtGui.QGridLayout()
self.gridLayout_9.setObjectName(_fromUtf8("gridLayout_9"))
self.labelPhoneBureau = QtGui.QLabel(self.frame_7)
self.labelPhoneBureau.setObjectName(_fromUtf8("labelPhoneBureau"))
self.gridLayout_9.addWidget(self.labelPhoneBureau, 0, 0, 1, 1)
self.PhoneWLineEdit = QtGui.QLineEdit(self.frame_7)
self.PhoneWLineEdit.setObjectName(_fromUtf8("PhoneWLineEdit"))
self.gridLayout_9.addWidget(self.PhoneWLineEdit, 0, 1, 1, 1)
self.labelPhonePortable = QtGui.QLabel(self.frame_7)
self.labelPhonePortable.setObjectName(_fromUtf8("labelPhonePortable"))
self.gridLayout_9.addWidget(self.labelPhonePortable, 1, 0, 1, 1)
self.PhoneHLineEdit = QtGui.QLineEdit(self.frame_7)
self.PhoneHLineEdit.setObjectName(_fromUtf8("PhoneHLineEdit"))
self.gridLayout_9.addWidget(self.PhoneHLineEdit, 1, 1, 1, 1)
self.horizontalLayout.addLayout(self.gridLayout_9)
self.gridLayout_10 = QtGui.QGridLayout()
self.gridLayout_10.setObjectName(_fromUtf8("gridLayout_10"))
self.labelFax = QtGui.QLabel(self.frame_7)
self.labelFax.setObjectName(_fromUtf8("labelFax"))
self.gridLayout_10.addWidget(self.labelFax, 0, 0, 1, 1)
self.faxLineEdit = QtGui.QLineEdit(self.frame_7)
self.faxLineEdit.setObjectName(_fromUtf8("faxLineEdit"))
self.gridLayout_10.addWidget(self.faxLineEdit, 0, 1, 1, 1)
self.labelMail = QtGui.QLabel(self.frame_7)
self.labelMail.setObjectName(_fromUtf8("labelMail"))
self.gridLayout_10.addWidget(self.labelMail, 1, 0, 1, 1)
self.mailLineEdit = QtGui.QLineEdit(self.frame_7)
self.mailLineEdit.setObjectName(_fromUtf8("mailLineEdit"))
self.gridLayout_10.addWidget(self.mailLineEdit, 1, 1, 1, 1)
self.horizontalLayout.addLayout(self.gridLayout_10)
self.horizontalLayout_3.addLayout(self.horizontalLayout)
self.verticalLayout_5.addWidget(self.frame_7)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_5.addItem(spacerItem)
self.frame = QtGui.QFrame(self.tab_id_victime)
self.frame.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame.setFrameShadow(QtGui.QFrame.Raised)
self.frame.setObjectName(_fromUtf8("frame"))
self.gridLayout_2 = QtGui.QGridLayout(self.frame)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.labelEtatCivilVictime = QtGui.QLabel(self.frame)
font = QtGui.QFont()
font.setPointSize(15)
font.setBold(True)
font.setUnderline(True)
font.setWeight(75)
self.labelEtatCivilVictime.setFont(font)
self.labelEtatCivilVictime.setAlignment(QtCore.Qt.AlignCenter)
self.labelEtatCivilVictime.setObjectName(_fromUtf8("labelEtatCivilVictime"))
self.gridLayout_2.addWidget(self.labelEtatCivilVictime, 0, 0, 1, 2)
self.gridLayout = QtGui.QGridLayout()
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.labelPieceIdOfficielle = QtGui.QLabel(self.frame)
self.labelPieceIdOfficielle.setAlignment(QtCore.Qt.AlignCenter)
self.labelPieceIdOfficielle.setObjectName(_fromUtf8("labelPieceIdOfficielle"))
self.gridLayout.addWidget(self.labelPieceIdOfficielle, 0, 0, 1, 2)
self.gridLayout_6 = QtGui.QGridLayout()
self.gridLayout_6.setObjectName(_fromUtf8("gridLayout_6"))
self.labelNumCni = QtGui.QLabel(self.frame)
self.labelNumCni.setObjectName(_fromUtf8("labelNumCni"))
self.gridLayout_6.addWidget(self.labelNumCni, 0, 0, 1, 1)
self.numCniLineEdit = QtGui.QLineEdit(self.frame)
self.numCniLineEdit.setObjectName(_fromUtf8("numCniLineEdit"))
self.gridLayout_6.addWidget(self.numCniLineEdit, 0, 1, 1, 1)
self.labelValidite = QtGui.QLabel(self.frame)
self.labelValidite.setObjectName(_fromUtf8("labelValidite"))
self.gridLayout_6.addWidget(self.labelValidite, 1, 0, 1, 1)
self.validiteDateEdit = QtGui.QDateEdit(self.frame)
self.validiteDateEdit.setObjectName(_fromUtf8("validiteDateEdit"))
self.gridLayout_6.addWidget(self.validiteDateEdit, 1, 1, 1, 1)
self.gridLayout.addLayout(self.gridLayout_6, 1, 0, 1, 1)
self.gridLayout_7 = QtGui.QGridLayout()
self.gridLayout_7.setObjectName(_fromUtf8("gridLayout_7"))
self.labelDateDelivrance = QtGui.QLabel(self.frame)
self.labelDateDelivrance.setObjectName(_fromUtf8("labelDateDelivrance"))
self.gridLayout_7.addWidget(self.labelDateDelivrance, 0, 0, 1, 1)
self.dateDelivranceDateEdit = QtGui.QDateEdit(self.frame)
self.dateDelivranceDateEdit.setObjectName(_fromUtf8("dateDelivranceDateEdit"))
self.gridLayout_7.addWidget(self.dateDelivranceDateEdit, 0, 1, 1, 1)
self.labelLieuDelivrance = QtGui.QLabel(self.frame)
self.labelLieuDelivrance.setObjectName(_fromUtf8("labelLieuDelivrance"))
self.gridLayout_7.addWidget(self.labelLieuDelivrance, 1, 0, 1, 1)
self.lieuDelivranceLineEdit = QtGui.QLineEdit(self.frame)
self.lieuDelivranceLineEdit.setObjectName(_fromUtf8("lieuDelivranceLineEdit"))
self.gridLayout_7.addWidget(self.lieuDelivranceLineEdit, 1, 1, 1, 1)
self.gridLayout.addLayout(self.gridLayout_7, 1, 1, 1, 1)
self.gridLayout_2.addLayout(self.gridLayout, 1, 1, 1, 1)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.gridLayout_11 = QtGui.QGridLayout()
self.gridLayout_11.setObjectName(_fromUtf8("gridLayout_11"))
self.statutMatComboBox = QtGui.QComboBox(self.frame)
self.statutMatComboBox.setObjectName(_fromUtf8("statutMatComboBox"))
self.gridLayout_11.addWidget(self.statutMatComboBox, 0, 1, 1, 1)
self.labelStatutMat = QtGui.QLabel(self.frame)
self.labelStatutMat.setObjectName(_fromUtf8("labelStatutMat"))
self.gridLayout_11.addWidget(self.labelStatutMat, 0, 0, 1, 1)
self.verticalLayout_2.addLayout(self.gridLayout_11)
self.gridLayout_8 = QtGui.QGridLayout()
self.gridLayout_8.setObjectName(_fromUtf8("gridLayout_8"))
self.labelLieuNaiss = QtGui.QLabel(self.frame)
self.labelLieuNaiss.setObjectName(_fromUtf8("labelLieuNaiss"))
self.gridLayout_8.addWidget(self.labelLieuNaiss, 0, 0, 1, 1)
self.lieuNaissLineEdit = QtGui.QLineEdit(self.frame)
self.lieuNaissLineEdit.setObjectName(_fromUtf8("lieuNaissLineEdit"))
self.gridLayout_8.addWidget(self.lieuNaissLineEdit, 0, 1, 1, 1)
self.labelDateNaiss = QtGui.QLabel(self.frame)
self.labelDateNaiss.setObjectName(_fromUtf8("labelDateNaiss"))
self.gridLayout_8.addWidget(self.labelDateNaiss, 1, 0, 1, 1)
self.dateNaissDateEdit = QtGui.QDateEdit(self.frame)
self.dateNaissDateEdit.setObjectName(_fromUtf8("dateNaissDateEdit"))
self.gridLayout_8.addWidget(self.dateNaissDateEdit, 1, 1, 1, 1)
self.verticalLayout_2.addLayout(self.gridLayout_8)
self.gridLayout_2.addLayout(self.verticalLayout_2, 1, 0, 1, 1)
self.verticalLayout_5.addWidget(self.frame)
self.gridLayout_3.addLayout(self.verticalLayout_5, 0, 1, 1, 1)
spacerItem1 = QtGui.QSpacerItem(20, 50, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.gridLayout_3.addItem(spacerItem1, 1, 1, 1, 1)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.frame_2 = QtGui.QFrame(self.tab_id_victime)
self.frame_2.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_2.setFrameShadow(QtGui.QFrame.Raised)
self.frame_2.setObjectName(_fromUtf8("frame_2"))
self.verticalLayout_4 = QtGui.QVBoxLayout(self.frame_2)
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.labelListeSinistresVictime = QtGui.QLabel(self.frame_2)
font = QtGui.QFont()
font.setPointSize(15)
font.setBold(True)
font.setWeight(75)
self.labelListeSinistresVictime.setFont(font)
self.labelListeSinistresVictime.setAlignment(QtCore.Qt.AlignCenter)
self.labelListeSinistresVictime.setObjectName(_fromUtf8("labelListeSinistresVictime"))
self.verticalLayout_4.addWidget(self.labelListeSinistresVictime)
self.listeSinistresVictimeTableView = QtGui.QTableView(self.frame_2)
self.listeSinistresVictimeTableView.setAlternatingRowColors(True)
self.listeSinistresVictimeTableView.setObjectName(_fromUtf8("listeSinistresVictimeTableView"))
self.verticalLayout_4.addWidget(self.listeSinistresVictimeTableView)
self.horizontalLayout_2.addWidget(self.frame_2)
self.frame_3 = QtGui.QFrame(self.tab_id_victime)
self.frame_3.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_3.setFrameShadow(QtGui.QFrame.Raised)
self.frame_3.setObjectName(_fromUtf8("frame_3"))
self.verticalLayout_3 = QtGui.QVBoxLayout(self.frame_3)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.labelAssureurVictimesSinistre = QtGui.QLabel(self.frame_3)
font = QtGui.QFont()
font.setPointSize(15)
font.setBold(True)
font.setWeight(75)
self.labelAssureurVictimesSinistre.setFont(font)
self.labelAssureurVictimesSinistre.setAlignment(QtCore.Qt.AlignCenter)
self.labelAssureurVictimesSinistre.setObjectName(_fromUtf8("labelAssureurVictimesSinistre"))
self.verticalLayout_3.addWidget(self.labelAssureurVictimesSinistre)
self.assureurVictimeSinistreTableView = QtGui.QTableView(self.frame_3)
self.assureurVictimeSinistreTableView.setAlternatingRowColors(True)
self.assureurVictimeSinistreTableView.setObjectName(_fromUtf8("assureurVictimeSinistreTableView"))
self.verticalLayout_3.addWidget(self.assureurVictimeSinistreTableView)
self.horizontalLayout_2.addWidget(self.frame_3)
self.gridLayout_3.addLayout(self.horizontalLayout_2, 2, 0, 1, 2)
self.tabWidget.addTab(self.tab_id_victime, _fromUtf8(""))
self.tab_traitementdoc = QtGui.QWidget()
self.tab_traitementdoc.setObjectName(_fromUtf8("tab_traitementdoc"))
self.gridLayout_4 = QtGui.QGridLayout(self.tab_traitementdoc)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.frame_5 = QtGui.QFrame(self.tab_traitementdoc)
self.frame_5.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_5.setFrameShadow(QtGui.QFrame.Raised)
self.frame_5.setObjectName(_fromUtf8("frame_5"))
self.gridLayout_13 = QtGui.QGridLayout(self.frame_5)
self.gridLayout_13.setObjectName(_fromUtf8("gridLayout_13"))
self.labelEtatActuelVictime = QtGui.QLabel(self.frame_5)
self.labelEtatActuelVictime.setObjectName(_fromUtf8("labelEtatActuelVictime"))
self.gridLayout_13.addWidget(self.labelEtatActuelVictime, 0, 0, 1, 1)
self.etatActuelVictimComboBox = QtGui.QComboBox(self.frame_5)
self.etatActuelVictimComboBox.setObjectName(_fromUtf8("etatActuelVictimComboBox"))
self.gridLayout_13.addWidget(self.etatActuelVictimComboBox, 0, 1, 1, 1)
self.labelProfessionVictime = QtGui.QLabel(self.frame_5)
self.labelProfessionVictime.setObjectName(_fromUtf8("labelProfessionVictime"))
self.gridLayout_13.addWidget(self.labelProfessionVictime, 1, 0, 1, 1)
self.professionVictimComboBox = QtGui.QComboBox(self.frame_5)
self.professionVictimComboBox.setObjectName(_fromUtf8("professionVictimComboBox"))
self.gridLayout_13.addWidget(self.professionVictimComboBox, 1, 1, 1, 1)
self.labelDescDommages = QtGui.QLabel(self.frame_5)
self.labelDescDommages.setObjectName(_fromUtf8("labelDescDommages"))
self.gridLayout_13.addWidget(self.labelDescDommages, 2, 0, 1, 1)
self.descDommageComboBox = QtGui.QComboBox(self.frame_5)
self.descDommageComboBox.setObjectName(_fromUtf8("descDommageComboBox"))
self.gridLayout_13.addWidget(self.descDommageComboBox, 2, 1, 1, 1)
self.labelNatDommages = QtGui.QLabel(self.frame_5)
self.labelNatDommages.setObjectName(_fromUtf8("labelNatDommages"))
self.gridLayout_13.addWidget(self.labelNatDommages, 3, 0, 1, 1)
self.natDommageComboBox = QtGui.QComboBox(self.frame_5)
self.natDommageComboBox.setObjectName(_fromUtf8("natDommageComboBox"))
self.gridLayout_13.addWidget(self.natDommageComboBox, 3, 1, 1, 1)
self.labelDescPrejudices = QtGui.QLabel(self.frame_5)
self.labelDescPrejudices.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.labelDescPrejudices.setObjectName(_fromUtf8("labelDescPrejudices"))
self.gridLayout_13.addWidget(self.labelDescPrejudices, 4, 0, 1, 1)
self.DescPrejudiceVictimTextEdit = QtGui.QTextEdit(self.frame_5)
self.DescPrejudiceVictimTextEdit.setMinimumSize(QtCore.QSize(0, 40))
self.DescPrejudiceVictimTextEdit.setMaximumSize(QtCore.QSize(16777215, 50))
self.DescPrejudiceVictimTextEdit.setObjectName(_fromUtf8("DescPrejudiceVictimTextEdit"))
self.gridLayout_13.addWidget(self.DescPrejudiceVictimTextEdit, 4, 1, 1, 1)
self.labelNatLesions = QtGui.QLabel(self.frame_5)
self.labelNatLesions.setObjectName(_fromUtf8("labelNatLesions"))
self.gridLayout_13.addWidget(self.labelNatLesions, 5, 0, 1, 1)
self.natLesionComboBox = QtGui.QComboBox(self.frame_5)
self.natLesionComboBox.setObjectName(_fromUtf8("natLesionComboBox"))
self.gridLayout_13.addWidget(self.natLesionComboBox, 5, 1, 1, 1)
self.labelAgeVictime = QtGui.QLabel(self.frame_5)
self.labelAgeVictime.setObjectName(_fromUtf8("labelAgeVictime"))
self.gridLayout_13.addWidget(self.labelAgeVictime, 6, 0, 1, 1)
self.ageVictimSpinBox = QtGui.QSpinBox(self.frame_5)
self.ageVictimSpinBox.setObjectName(_fromUtf8("ageVictimSpinBox"))
self.gridLayout_13.addWidget(self.ageVictimSpinBox, 6, 1, 1, 1)
self.labelDureeProbHosp = QtGui.QLabel(self.frame_5)
self.labelDureeProbHosp.setObjectName(_fromUtf8("labelDureeProbHosp"))
self.gridLayout_13.addWidget(self.labelDureeProbHosp, 7, 0, 1, 1)
self.dureeProbHospSpinBox = QtGui.QSpinBox(self.frame_5)
self.dureeProbHospSpinBox.setObjectName(_fromUtf8("dureeProbHospSpinBox"))
self.gridLayout_13.addWidget(self.dureeProbHospSpinBox, 7, 1, 1, 1)
self.gridLayout_4.addWidget(self.frame_5, 0, 0, 1, 1)
self.frame_6 = QtGui.QFrame(self.tab_traitementdoc)
self.frame_6.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_6.setFrameShadow(QtGui.QFrame.Raised)
self.frame_6.setObjectName(_fromUtf8("frame_6"))
self.gridLayout_12 = QtGui.QGridLayout(self.frame_6)
self.gridLayout_12.setObjectName(_fromUtf8("gridLayout_12"))
self.labelStatutMatVictime = QtGui.QLabel(self.frame_6)
self.labelStatutMatVictime.setObjectName(_fromUtf8("labelStatutMatVictime"))
self.gridLayout_12.addWidget(self.labelStatutMatVictime, 0, 0, 1, 1)
self.statutMatVictimComboBox = QtGui.QComboBox(self.frame_6)
self.statutMatVictimComboBox.setObjectName(_fromUtf8("statutMatVictimComboBox"))
self.gridLayout_12.addWidget(self.statutMatVictimComboBox, 0, 1, 1, 1)
self.labelParenteAvecAssure = QtGui.QLabel(self.frame_6)
self.labelParenteAvecAssure.setObjectName(_fromUtf8("labelParenteAvecAssure"))
self.gridLayout_12.addWidget(self.labelParenteAvecAssure, 1, 0, 1, 1)
self.parenteAvecAssureComboBox = QtGui.QComboBox(self.frame_6)
self.parenteAvecAssureComboBox.setObjectName(_fromUtf8("parenteAvecAssureComboBox"))
self.gridLayout_12.addWidget(self.parenteAvecAssureComboBox, 1, 1, 1, 1)
self.labelParenteAvecConducteur = QtGui.QLabel(self.frame_6)
self.labelParenteAvecConducteur.setObjectName(_fromUtf8("labelParenteAvecConducteur"))
self.gridLayout_12.addWidget(self.labelParenteAvecConducteur, 2, 0, 1, 1)
self.parenteAvecConducteurComboBox = QtGui.QComboBox(self.frame_6)
self.parenteAvecConducteurComboBox.setObjectName(_fromUtf8("parenteAvecConducteurComboBox"))
self.gridLayout_12.addWidget(self.parenteAvecConducteurComboBox, 2, 1, 1, 1)
self.labelEmplacementVictimeAuSinistre = QtGui.QLabel(self.frame_6)
self.labelEmplacementVictimeAuSinistre.setObjectName(_fromUtf8("labelEmplacementVictimeAuSinistre"))
self.gridLayout_12.addWidget(self.labelEmplacementVictimeAuSinistre, 3, 0, 1, 1)
self.emplacementVictimAuSinistrComboBox = QtGui.QComboBox(self.frame_6)
self.emplacementVictimAuSinistrComboBox.setObjectName(_fromUtf8("emplacementVictimAuSinistrComboBox"))
self.gridLayout_12.addWidget(self.emplacementVictimAuSinistrComboBox, 3, 1, 1, 1)
self.labelDestinationVictimeApresSinistre = QtGui.QLabel(self.frame_6)
self.labelDestinationVictimeApresSinistre.setObjectName(_fromUtf8("labelDestinationVictimeApresSinistre"))
self.gridLayout_12.addWidget(self.labelDestinationVictimeApresSinistre, 4, 0, 1, 1)
self.destVictimApresSinistrComboBox = QtGui.QComboBox(self.frame_6)
self.destVictimApresSinistrComboBox.setObjectName(_fromUtf8("destVictimApresSinistrComboBox"))
self.gridLayout_12.addWidget(self.destVictimApresSinistrComboBox, 4, 1, 1, 1)
self.labelConclusionsCertificatMedical = QtGui.QLabel(self.frame_6)
self.labelConclusionsCertificatMedical.setObjectName(_fromUtf8("labelConclusionsCertificatMedical"))
self.gridLayout_12.addWidget(self.labelConclusionsCertificatMedical, 5, 0, 1, 1)
self.conclusionCertificatMedicalComboBox = QtGui.QComboBox(self.frame_6)
self.conclusionCertificatMedicalComboBox.setObjectName(_fromUtf8("conclusionCertificatMedicalComboBox"))
self.gridLayout_12.addWidget(self.conclusionCertificatMedicalComboBox, 5, 1, 1, 1)
self.labelObsSurVictime = QtGui.QLabel(self.frame_6)
self.labelObsSurVictime.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.labelObsSurVictime.setObjectName(_fromUtf8("labelObsSurVictime"))
self.gridLayout_12.addWidget(self.labelObsSurVictime, 6, 0, 1, 1)
self.ObsSurVictimTextEdit = QtGui.QTextEdit(self.frame_6)
self.ObsSurVictimTextEdit.setMinimumSize(QtCore.QSize(0, 40))
self.ObsSurVictimTextEdit.setMaximumSize(QtCore.QSize(16777215, 50))
self.ObsSurVictimTextEdit.setObjectName(_fromUtf8("ObsSurVictimTextEdit"))
self.gridLayout_12.addWidget(self.ObsSurVictimTextEdit, 6, 1, 1, 1)
self.labelMontantEstime = QtGui.QLabel(self.frame_6)
self.labelMontantEstime.setObjectName(_fromUtf8("labelMontantEstime"))
self.gridLayout_12.addWidget(self.labelMontantEstime, 7, 0, 1, 1)
self.montantEstimeLineEdit = QtGui.QLineEdit(self.frame_6)
self.montantEstimeLineEdit.setObjectName(_fromUtf8("montantEstimeLineEdit"))
self.gridLayout_12.addWidget(self.montantEstimeLineEdit, 7, 1, 1, 1)
self.gridLayout_4.addWidget(self.frame_6, 0, 1, 1, 1)
self.frame_4 = QtGui.QFrame(self.tab_traitementdoc)
self.frame_4.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_4.setFrameShadow(QtGui.QFrame.Raised)
self.frame_4.setObjectName(_fromUtf8("frame_4"))
self.verticalLayout_6 = QtGui.QVBoxLayout(self.frame_4)
self.verticalLayout_6.setObjectName(_fromUtf8("verticalLayout_6"))
self.labelAnalysePrejudices = QtGui.QLabel(self.frame_4)
font = QtGui.QFont()
font.setPointSize(25)
font.setBold(True)
font.setWeight(75)
self.labelAnalysePrejudices.setFont(font)
self.labelAnalysePrejudices.setAlignment(QtCore.Qt.AlignCenter)
self.labelAnalysePrejudices.setObjectName(_fromUtf8("labelAnalysePrejudices"))
self.verticalLayout_6.addWidget(self.labelAnalysePrejudices)
self.analysePrejudicesVictimeTableView = QtGui.QTableView(self.frame_4)
self.analysePrejudicesVictimeTableView.setMinimumSize(QtCore.QSize(0, 200))
self.analysePrejudicesVictimeTableView.setMaximumSize(QtCore.QSize(16777215, 250))
self.analysePrejudicesVictimeTableView.setAlternatingRowColors(True)
self.analysePrejudicesVictimeTableView.setObjectName(_fromUtf8("analysePrejudicesVictimeTableView"))
self.verticalLayout_6.addWidget(self.analysePrejudicesVictimeTableView)
self.gridLayout_4.addWidget(self.frame_4, 1, 0, 1, 2)
self.tabWidget.addTab(self.tab_traitementdoc, _fromUtf8(""))
self.horizontalLayout_4.addWidget(self.tabWidget)
VictimesSinistre.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(VictimesSinistre)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1088, 22))
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menu_Fichier = QtGui.QMenu(self.menubar)
self.menu_Fichier.setObjectName(_fromUtf8("menu_Fichier"))
self.menu_Edition = QtGui.QMenu(self.menubar)
self.menu_Edition.setObjectName(_fromUtf8("menu_Edition"))
VictimesSinistre.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(VictimesSinistre)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
VictimesSinistre.setStatusBar(self.statusbar)
self.toolBar = QtGui.QToolBar(VictimesSinistre)
self.toolBar.setObjectName(_fromUtf8("toolBar"))
VictimesSinistre.addToolBar(QtCore.Qt.TopToolBarArea, self.toolBar)
self.fichierNouveau = QtGui.QAction(VictimesSinistre)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/icones/document-new.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.fichierNouveau.setIcon(icon)
self.fichierNouveau.setObjectName(_fromUtf8("fichierNouveau"))
self.fichierEnregistrer = QtGui.QAction(VictimesSinistre)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8(":/icones/document-save.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.fichierEnregistrer.setIcon(icon1)
self.fichierEnregistrer.setObjectName(_fromUtf8("fichierEnregistrer"))
self.fichierImprimer = QtGui.QAction(VictimesSinistre)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(_fromUtf8(":/icones/Imprimer.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.fichierImprimer.setIcon(icon2)
self.fichierImprimer.setObjectName(_fromUtf8("fichierImprimer"))
self.fichierFermer = QtGui.QAction(VictimesSinistre)
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(_fromUtf8(":/icones/system-log-out.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.fichierFermer.setIcon(icon3)
self.fichierFermer.setObjectName(_fromUtf8("fichierFermer"))
self.editionAnnuler = QtGui.QAction(VictimesSinistre)
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(_fromUtf8(":/icones/edit-undo.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.editionAnnuler.setIcon(icon4)
self.editionAnnuler.setObjectName(_fromUtf8("editionAnnuler"))
self.editionRefaire = QtGui.QAction(VictimesSinistre)
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(_fromUtf8(":/icones/edit-redo.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.editionRefaire.setIcon(icon5)
self.editionRefaire.setObjectName(_fromUtf8("editionRefaire"))
self.editionPremier = QtGui.QAction(VictimesSinistre)
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap(_fromUtf8(":/icones/go-first.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.editionPremier.setIcon(icon6)
self.editionPremier.setObjectName(_fromUtf8("editionPremier"))
self.editionPrecedent = QtGui.QAction(VictimesSinistre)
icon7 = QtGui.QIcon()
icon7.addPixmap(QtGui.QPixmap(_fromUtf8(":/icones/go-previous.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.editionPrecedent.setIcon(icon7)
self.editionPrecedent.setObjectName(_fromUtf8("editionPrecedent"))
self.editionSuivant = QtGui.QAction(VictimesSinistre)
icon8 = QtGui.QIcon()
icon8.addPixmap(QtGui.QPixmap(_fromUtf8(":/icones/go-next.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.editionSuivant.setIcon(icon8)
self.editionSuivant.setObjectName(_fromUtf8("editionSuivant"))
self.editionDernier = QtGui.QAction(VictimesSinistre)
icon9 = QtGui.QIcon()
icon9.addPixmap(QtGui.QPixmap(_fromUtf8(":/icones/go-last.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.editionDernier.setIcon(icon9)
self.editionDernier.setObjectName(_fromUtf8("editionDernier"))
self.menu_Fichier.addAction(self.fichierNouveau)
self.menu_Fichier.addAction(self.fichierEnregistrer)
self.menu_Fichier.addSeparator()
self.menu_Fichier.addAction(self.fichierImprimer)
self.menu_Fichier.addSeparator()
self.menu_Fichier.addAction(self.fichierFermer)
self.menu_Edition.addAction(self.editionAnnuler)
self.menu_Edition.addAction(self.editionRefaire)
self.menu_Edition.addSeparator()
self.menu_Edition.addAction(self.editionPremier)
self.menu_Edition.addAction(self.editionPrecedent)
self.menu_Edition.addAction(self.editionSuivant)
self.menu_Edition.addAction(self.editionDernier)
self.menubar.addAction(self.menu_Fichier.menuAction())
self.menubar.addAction(self.menu_Edition.menuAction())
self.toolBar.addAction(self.fichierNouveau)
self.toolBar.addSeparator()
self.toolBar.addAction(self.editionPremier)
self.toolBar.addAction(self.editionPrecedent)
self.toolBar.addAction(self.editionSuivant)
self.toolBar.addAction(self.editionDernier)
self.toolBar.addSeparator()
self.toolBar.addAction(self.editionRefaire)
self.toolBar.addAction(self.editionAnnuler)
self.toolBar.addAction(self.fichierEnregistrer)
self.toolBar.addSeparator()
self.toolBar.addAction(self.fichierImprimer)
self.toolBar.addSeparator()
self.toolBar.addAction(self.fichierFermer)
self.retranslateUi(VictimesSinistre)
self.tabWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(VictimesSinistre)
VictimesSinistre.setTabOrder(self.numIdComboBox, self.prenomComboBox)
VictimesSinistre.setTabOrder(self.prenomComboBox, self.nomComboBox)
VictimesSinistre.setTabOrder(self.nomComboBox, self.professionComboBox)
VictimesSinistre.setTabOrder(self.professionComboBox, self.civiliteComboBox)
VictimesSinistre.setTabOrder(self.civiliteComboBox, self.localisationComboBox)
VictimesSinistre.setTabOrder(self.localisationComboBox, self.bpLineEdit)
VictimesSinistre.setTabOrder(self.bpLineEdit, self.villeComboBox)
VictimesSinistre.setTabOrder(self.villeComboBox, self.adressLineEdit)
VictimesSinistre.setTabOrder(self.adressLineEdit, self.steLineEdit)
VictimesSinistre.setTabOrder(self.steLineEdit, self.posteLineEdit)
VictimesSinistre.setTabOrder(self.posteLineEdit, self.deptComboBox)
VictimesSinistre.setTabOrder(self.deptComboBox, self.PhoneWLineEdit)
VictimesSinistre.setTabOrder(self.PhoneWLineEdit, self.PhoneHLineEdit)
VictimesSinistre.setTabOrder(self.PhoneHLineEdit, self.faxLineEdit)
VictimesSinistre.setTabOrder(self.faxLineEdit, self.mailLineEdit)
VictimesSinistre.setTabOrder(self.mailLineEdit, self.statutMatComboBox)
VictimesSinistre.setTabOrder(self.statutMatComboBox, self.lieuNaissLineEdit)
VictimesSinistre.setTabOrder(self.lieuNaissLineEdit, self.numCniLineEdit)
VictimesSinistre.setTabOrder(self.numCniLineEdit, self.dateDelivranceDateEdit)
VictimesSinistre.setTabOrder(self.dateDelivranceDateEdit, self.dateNaissDateEdit)
VictimesSinistre.setTabOrder(self.dateNaissDateEdit, self.validiteDateEdit)
VictimesSinistre.setTabOrder(self.validiteDateEdit, self.lieuDelivranceLineEdit)
VictimesSinistre.setTabOrder(self.lieuDelivranceLineEdit, self.victimesEnregistreesListView)
VictimesSinistre.setTabOrder(self.victimesEnregistreesListView, self.listeSinistresVictimeTableView)
VictimesSinistre.setTabOrder(self.listeSinistresVictimeTableView, self.assureurVictimeSinistreTableView)
VictimesSinistre.setTabOrder(self.assureurVictimeSinistreTableView, self.tabWidget)
VictimesSinistre.setTabOrder(self.tabWidget, self.etatActuelVictimComboBox)
VictimesSinistre.setTabOrder(self.etatActuelVictimComboBox, self.professionVictimComboBox)
VictimesSinistre.setTabOrder(self.professionVictimComboBox, self.descDommageComboBox)
VictimesSinistre.setTabOrder(self.descDommageComboBox, self.natDommageComboBox)
VictimesSinistre.setTabOrder(self.natDommageComboBox, self.DescPrejudiceVictimTextEdit)
VictimesSinistre.setTabOrder(self.DescPrejudiceVictimTextEdit, self.natLesionComboBox)
VictimesSinistre.setTabOrder(self.natLesionComboBox, self.ageVictimSpinBox)
VictimesSinistre.setTabOrder(self.ageVictimSpinBox, self.dureeProbHospSpinBox)
VictimesSinistre.setTabOrder(self.dureeProbHospSpinBox, self.statutMatVictimComboBox)
VictimesSinistre.setTabOrder(self.statutMatVictimComboBox, self.parenteAvecAssureComboBox)
VictimesSinistre.setTabOrder(self.parenteAvecAssureComboBox, self.parenteAvecConducteurComboBox)
VictimesSinistre.setTabOrder(self.parenteAvecConducteurComboBox, self.emplacementVictimAuSinistrComboBox)
VictimesSinistre.setTabOrder(self.emplacementVictimAuSinistrComboBox, self.destVictimApresSinistrComboBox)
VictimesSinistre.setTabOrder(self.destVictimApresSinistrComboBox, self.conclusionCertificatMedicalComboBox)
VictimesSinistre.setTabOrder(self.conclusionCertificatMedicalComboBox, self.ObsSurVictimTextEdit)
VictimesSinistre.setTabOrder(self.ObsSurVictimTextEdit, self.montantEstimeLineEdit)
VictimesSinistre.setTabOrder(self.montantEstimeLineEdit, self.analysePrejudicesVictimeTableView)
def retranslateUi(self, VictimesSinistre):
VictimesSinistre.setWindowTitle(_translate("VictimesSinistre", "Victimes Sinistre", None))
self.labelVictimesEnregistrees.setText(_translate("VictimesSinistre", "Victimes Enregistrees", None))
self.victimesEnregistreesListView.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Liste des Victimes Enregistrees.</p></body></html>", None))
self.victimesEnregistreesListView.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Liste des Victimes Enregistrees.</p></body></html>", None))
self.labelNumId.setText(_translate("VictimesSinistre", "Numero Identifiant", None))
self.numIdComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Numero Identifiant.</p></body></html>", None))
self.numIdComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Numero Identifiant.</p></body></html>", None))
self.labelPrenom.setText(_translate("VictimesSinistre", "Prenom", None))
self.prenomComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Prenom.</p></body></html>", None))
self.prenomComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Prenom.</p></body></html>", None))
self.labelNom.setText(_translate("VictimesSinistre", "Nom", None))
self.nomComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Nom.</p></body></html>", None))
self.nomComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Nom.</p></body></html>", None))
self.labelProfession.setText(_translate("VictimesSinistre", "Profession", None))
self.professionComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Profession.</p></body></html>", None))
self.professionComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Profession.</p></body></html>", None))
self.labelCivilite.setText(_translate("VictimesSinistre", "Civilite", None))
self.civiliteComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Civilite.</p></body></html>", None))
self.civiliteComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Civilite.</p></body></html>", None))
self.labelLocalisation.setText(_translate("VictimesSinistre", "Localisation", None))
self.localisationComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Localisation.</p></body></html>", None))
self.localisationComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Localisation.</p></body></html>", None))
self.labelBP.setText(_translate("VictimesSinistre", "Boite Postale", None))
self.bpLineEdit.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Boite Postale.</p></body></html>", None))
self.bpLineEdit.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Boite Postale.</p></body></html>", None))
self.labelVille.setText(_translate("VictimesSinistre", "Ville", None))
self.villeComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Ville.</p></body></html>", None))
self.villeComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Ville.</p></body></html>", None))
self.labelAdresse.setText(_translate("VictimesSinistre", "Adresse", None))
self.adressLineEdit.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Adresse.</p></body></html>", None))
self.adressLineEdit.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Adresse.</p></body></html>", None))
self.labelSociete.setText(_translate("VictimesSinistre", "Societe", None))
self.steLineEdit.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Societe.</p></body></html>", None))
self.steLineEdit.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Societe.</p></body></html>", None))
self.labelPoste.setText(_translate("VictimesSinistre", "Poste occupe", None))
self.posteLineEdit.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Poste Occupe.</p></body></html>", None))
self.posteLineEdit.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Poste Occupe.</p></body></html>", None))
self.labelDepartement.setText(_translate("VictimesSinistre", "Departement", None))
self.deptComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Departement.</p></body></html>", None))
self.deptComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Departement.</p></body></html>", None))
self.labelPhoneBureau.setText(_translate("VictimesSinistre", "Telephone Bureau", None))
self.PhoneWLineEdit.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Numero du Telephone du Bureau.</p></body></html>", None))
self.PhoneWLineEdit.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Numero du Telephone du Bureau.</p></body></html>", None))
self.labelPhonePortable.setText(_translate("VictimesSinistre", "Telephone Portable", None))
self.PhoneHLineEdit.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Numero du Telephone Portable.</p></body></html>", None))
self.PhoneHLineEdit.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Numero du Telephone Portable.</p></body></html>", None))
self.labelFax.setText(_translate("VictimesSinistre", "Numero Fax", None))
self.faxLineEdit.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Numero de Fax.</p></body></html>", None))
self.faxLineEdit.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Numero de Fax.</p></body></html>", None))
self.labelMail.setText(_translate("VictimesSinistre", "Adresse E-mail", None))
self.mailLineEdit.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Adresse E-mail.</p></body></html>", None))
self.mailLineEdit.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Adresse E-mail.</p></body></html>", None))
self.labelEtatCivilVictime.setText(_translate("VictimesSinistre", "Etat-civil de la Victime", None))
self.labelPieceIdOfficielle.setText(_translate("VictimesSinistre", "Piece d\'identite Officielle", None))
self.labelNumCni.setText(_translate("VictimesSinistre", "Numero", None))
self.numCniLineEdit.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Numero de la CNI de la Victime.</p></body></html>", None))
self.numCniLineEdit.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Numero de la CNI de la Victime.</p></body></html>", None))
self.labelValidite.setText(_translate("VictimesSinistre", "Validite", None))
self.validiteDateEdit.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Validite de la CNI de la Victime.</p></body></html>", None))
self.validiteDateEdit.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Validite de la CNI de la Victime.</p></body></html>", None))
self.labelDateDelivrance.setText(_translate("VictimesSinistre", "Date delivrance", None))
self.dateDelivranceDateEdit.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Date Delivrance de la CNI de la Victime.</p></body></html>", None))
self.dateDelivranceDateEdit.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Date Delivrance de la CNI de la Victime.</p></body></html>", None))
self.labelLieuDelivrance.setText(_translate("VictimesSinistre", "Lieu delivrance", None))
self.lieuDelivranceLineEdit.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Lieu de Delivrance de la CNI de la Victime.</p></body></html>", None))
self.lieuDelivranceLineEdit.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Lieu de Delivrance de la CNI de la Victime.</p></body></html>", None))
self.statutMatComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Statut Matrimonial de la Victime.</p></body></html>", None))
self.statutMatComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Statut Matrimonial de la Victime.</p></body></html>", None))
self.labelStatutMat.setText(_translate("VictimesSinistre", "Statut matrimonial", None))
self.labelLieuNaiss.setText(_translate("VictimesSinistre", "Lieu de Naissance", None))
self.lieuNaissLineEdit.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Lieu de Naissance de la Victime.</p></body></html>", None))
self.lieuNaissLineEdit.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Lieu de Naissance de la Victime.</p></body></html>", None))
self.labelDateNaiss.setText(_translate("VictimesSinistre", "Date de Naissance", None))
self.dateNaissDateEdit.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Date de Naissance de la Victime.</p></body></html>", None))
self.dateNaissDateEdit.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Date de Naissance de la Victime.</p></body></html>", None))
self.labelListeSinistresVictime.setText(_translate("VictimesSinistre", "Liste de tous les Sinistres de la Victime", None))
self.listeSinistresVictimeTableView.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Liste de tous les Sinistres de la Victime.</p></body></html>", None))
self.listeSinistresVictimeTableView.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Liste de tous les Sinistres de la Victime.</p></body></html>", None))
self.labelAssureurVictimesSinistre.setText(_translate("VictimesSinistre", "Assureur Victimes Sinistre", None))
self.assureurVictimeSinistreTableView.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Assureur de la Victime du Sinistre.</p></body></html>", None))
self.assureurVictimeSinistreTableView.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Assureur de la Victime du Sinistre.</p></body></html>", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_id_victime), _translate("VictimesSinistre", "Identification de la Victime", None))
self.labelEtatActuelVictime.setText(_translate("VictimesSinistre", "Etat actuel Victime", None))
self.etatActuelVictimComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Etat actuel de la Victime.</p></body></html>", None))
self.etatActuelVictimComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Etat actuel de la Victime.</p></body></html>", None))
self.labelProfessionVictime.setText(_translate("VictimesSinistre", "Profession Victime", None))
self.professionVictimComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Profession de la Victime.</p></body></html>", None))
self.professionVictimComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Profession de la Victime.</p></body></html>", None))
self.labelDescDommages.setText(_translate("VictimesSinistre", "Description des dommages", None))
self.descDommageComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Description des dommages de la Victime.</p></body></html>", None))
self.descDommageComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Description des dommages de la Victime.</p></body></html>", None))
self.labelNatDommages.setText(_translate("VictimesSinistre", "Nature des dommages", None))
self.natDommageComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Natures des dommages de la Victime.</p></body></html>", None))
self.natDommageComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Natures des dommages de la Victime.</p></body></html>", None))
self.labelDescPrejudices.setText(_translate("VictimesSinistre", "Description Prejudices Victime", None))
self.DescPrejudiceVictimTextEdit.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Description des Prejudices de la Victime.</p></body></html>", None))
self.DescPrejudiceVictimTextEdit.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Description des Prejudices de la Victime.</p></body></html>", None))
self.labelNatLesions.setText(_translate("VictimesSinistre", "Nature des Lesions", None))
self.natLesionComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Nature des Lesions de la Victime.</p></body></html>", None))
self.natLesionComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Nature des Lesions de la Victime.</p></body></html>", None))
self.labelAgeVictime.setText(_translate("VictimesSinistre", "Age Victime", None))
self.ageVictimSpinBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Age de la Victime.</p></body></html>", None))
self.ageVictimSpinBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Age de la Victime.</p></body></html>", None))
self.labelDureeProbHosp.setText(_translate("VictimesSinistre", "Duree probable hospitalisation", None))
self.dureeProbHospSpinBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Duree Probable d\'hopitalisation de la Victime.</p></body></html>", None))
self.dureeProbHospSpinBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Duree Probable d\'hopitalisation de la Victime.</p></body></html>", None))
self.labelStatutMatVictime.setText(_translate("VictimesSinistre", "Statut matrimonial Victime", None))
self.statutMatVictimComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Statut Matrimonial de la Victime.</p></body></html>", None))
self.statutMatVictimComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Statut Matrimonial de la Victime.</p></body></html>", None))
self.labelParenteAvecAssure.setText(_translate("VictimesSinistre", "Parente avec Assure", None))
self.parenteAvecAssureComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Parente avec l\'Assure.</p></body></html>", None))
self.parenteAvecAssureComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Parente avec l\'Assure.</p></body></html>", None))
self.labelParenteAvecConducteur.setText(_translate("VictimesSinistre", "Parente avec Conducteur", None))
self.parenteAvecConducteurComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Parente avec le Conducteur.</p></body></html>", None))
self.parenteAvecConducteurComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Parente avec le Conducteur.</p></body></html>", None))
self.labelEmplacementVictimeAuSinistre.setText(_translate("VictimesSinistre", "Emplacement Victime au Sinistre", None))
self.emplacementVictimAuSinistrComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Emplacement de la Victime au Sinistre.</p></body></html>", None))
self.emplacementVictimAuSinistrComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Emplacement de la Victime au Sinistre.</p></body></html>", None))
self.labelDestinationVictimeApresSinistre.setText(_translate("VictimesSinistre", "Destination Victime apres Sinistre", None))
self.destVictimApresSinistrComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Destination de la Victime apres le Sinistre.</p></body></html>", None))
self.destVictimApresSinistrComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Destination de la Victime apres le Sinistre.</p></body></html>", None))
self.labelConclusionsCertificatMedical.setText(_translate("VictimesSinistre", "Conclusions Certificat Medical", None))
self.conclusionCertificatMedicalComboBox.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Conclusions du Certificat Medical.</p></body></html>", None))
self.conclusionCertificatMedicalComboBox.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Conclusions du Certificat Medical.</p></body></html>", None))
self.labelObsSurVictime.setText(_translate("VictimesSinistre", "Observation sur la Victime", None))
self.ObsSurVictimTextEdit.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Observations faites sur la Victime du Sinistre.</p></body></html>", None))
self.ObsSurVictimTextEdit.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Observations faites sur la Victime du Sinistre.</p></body></html>", None))
self.labelMontantEstime.setText(_translate("VictimesSinistre", "Montant estime", None))
self.montantEstimeLineEdit.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Montant Estime.</p></body></html>", None))
self.montantEstimeLineEdit.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Montant Estime.</p></body></html>", None))
self.labelAnalysePrejudices.setText(_translate("VictimesSinistre", "Analyse des prejudices subis par la Victime", None))
self.analysePrejudicesVictimeTableView.setToolTip(_translate("VictimesSinistre", "<html><head/><body><p>Analyse des Prejudices subis par la Victime du Sinistre.</p></body></html>", None))
self.analysePrejudicesVictimeTableView.setWhatsThis(_translate("VictimesSinistre", "<html><head/><body><p>Analyse des Prejudices subis par la Victime du Sinistre.</p></body></html>", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_traitementdoc), _translate("VictimesSinistre", "Traitement dossier de la Victime", None))
self.menu_Fichier.setTitle(_translate("VictimesSinistre", "&Fichier", None))
self.menu_Edition.setTitle(_translate("VictimesSinistre", "&Edition", None))
self.toolBar.setWindowTitle(_translate("VictimesSinistre", "toolBar", None))
self.fichierNouveau.setText(_translate("VictimesSinistre", " Nouveau", None))
self.fichierEnregistrer.setText(_translate("VictimesSinistre", "Enregistrer", None))
self.fichierImprimer.setText(_translate("VictimesSinistre", "Imprimer Fiche de Suivit Victime", None))
self.fichierFermer.setText(_translate("VictimesSinistre", "Fermer", None))
self.editionAnnuler.setText(_translate("VictimesSinistre", "Annuler", None))
self.editionRefaire.setText(_translate("VictimesSinistre", "Refaire", None))
self.editionPremier.setText(_translate("VictimesSinistre", "Premier", None))
self.editionPrecedent.setText(_translate("VictimesSinistre", "Precedent", None))
self.editionSuivant.setText(_translate("VictimesSinistre", "Suivant", None))
self.editionDernier.setText(_translate("VictimesSinistre", "Dernier", None))
import ressources_rc
|
##
## Visualize samples produced by MISO.
##
## TODO: In future interface with spliceplot to produce densities along a gene model
##
from scipy import *
from numpy import *
import matplotlib
#from plotting import colors, show_spines, axes_square
import matplotlib.pyplot as plt
from matplotlib import rc
#rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']})
import time
import misopy
from misopy.parse_csv import csv2array
from misopy.sashimi_plot.plot_utils.plotting import *
import misopy.Gene as Gene
import misopy.hypothesis_test as ht
class SamplesPlotter:
"""
Visualize a set of samples from a run of MISO.
"""
def __init__(self, samples, params, log_scores=None,
percent_acceptance=None,
true_psi=None):
"""
Given a sampler instance, store its properties.
"""
# sampler parameters
self.samples = samples
self.params = params
self.log_scores = log_scores
self.percent_acceptance = percent_acceptance
self.true_psi = true_psi
assert(len(samples) > 1)
def plot(self, fig=None, output_dir=None, num_rows=1, num_cols=1, subplot_start=1,
title=None, plot_intervals=None, value_to_label=None, label=None, bins=10,
bbox_coords=None, vanilla=False,
plot_mean=False, fig_dims=(5, 5)):
"""
Plot a set of samples.
- credible_intervals: if set to true, plot Bayesian confidence intervals
"""
plot_handle = None
num_samples, num_isoforms = shape(self.samples)
if num_isoforms == 2:
plot_handle = self.plot_two_iso_samples(fig=fig, plots_dir=output_dir, num_cols=num_cols,
num_rows=num_rows, subplot_start=subplot_start,
plot_intervals=plot_intervals,
value_to_label=value_to_label,
label=label, bbox_coords=bbox_coords,
title=title, vanilla=vanilla,
plot_mean=plot_mean, fig_dims=fig_dims)
elif num_isoforms > 2:
num_isoforms = self.samples.shape[1]
num_rows = 1
num_cols = num_isoforms
for c in range(num_cols):
plot_handle = self.plot_two_iso_samples(fig, isoform_index=c,
subplot_start=c + 1, num_cols=num_cols,
plot_intervals=plot_intervals,
title=title, bins=bins, vanilla=vanilla,
plot_mean=plot_mean, fig_dims=fig_dims)
plt.ylabel('Frequency (Isoform %d)' %(c + 1))
plt.subplots_adjust(wspace=0.5)
else:
raise Exception, "Invalid number of isoforms %d" %(num_isoforms)
return plot_handle
def plot_two_iso_samples(self, fig=None, isoform_index=0, num_rows=1, num_cols=1, subplot_start=1,
plots_dir=None, map_estimate=None, simulation_num=1,
plot_intervals=False, value_to_label=None, label=None, plot_filename=None,
bins=None, bbox_coords=None, with_legend=True, title=None, vanilla=False,
plot_mean=False, normed=False, fig_dims=(5, 5)):
"""
Plot a set of samples for Psi of a two isoform gene.
"""
if not fig:
sampled_psi_fig = plt.figure(figsize=fig_dims, dpi=300)
else:
sampled_psi_fig = fig
ax = sampled_psi_fig.add_subplot(num_rows, num_cols, subplot_start)
num_iters = int(self.params['iters'])
burn_in = int(self.params['burn_in'])
lag = int(self.params['lag'])
percent_acceptance = float(self.params['percent_accept'])
proposal_type = self.params['proposal_type']
plt.rcParams['font.size'] = 10
show_spines(ax, ['left', 'bottom'])
bins = bins
assert((value_to_label == None and label == None) or \
(value_to_label != None and label != None))
# retrieve samples
samples_to_plot = self.samples[:, isoform_index]
# picasso blue #0276FD
if not vanilla:
if bins != None:
plt.hist(samples_to_plot, align='mid', lw=0.5, facecolor='#0276FD',
edgecolor='#ffffff')
else:
plt.hist(samples_to_plot, align='mid', lw=0.5, facecolor='#0276FD',
edgecolor='#ffffff')
else:
plt.hist(samples_to_plot, align='mid', facecolor='#0276FD', edgecolor='#0276FD')
plt.xlabel(r'${\hat{\Psi}}_{\mathregular{MISO}}$')
plt.ylabel('Frequency')
plt.xlim([0, 1])
# Normalize samples
if normed:
yticks = list(plt.gca().get_yticks())
print "yticks: ", yticks
ytick_labels = ["%.2f" %(float(ytick) / float(normed)) for ytick in yticks]
ax.set_yticklabels(ytick_labels)
# samples_to_plot = samples_to_plot / float(len(samples_to_plot))
# curr_tick_labels = [label.get_label() for label in ax.get_yticklabels()]
# print "Current tick labels: ", curr_tick_labels
# new_tick_labels = []
# for label in curr_tick_labels:
# if len(label) > 0:
# new_label = "%.1f" %(float(label) / normed)
# else:
# new_label = ""
# new_tick_labels.append(new_label)
# #ax.set_yticklabels(new_tick_labels)
curr_axes = plt.gca()
# Plot MAP estimate for same data
if map_estimate:
l = plt.axvline(x=map_estimate, color='b', linewidth=1.2, ls='-', label=r'${\hat{\Psi}}_{MAP}\ =\ %.2f$' %(map_estimate))
# Plot true Psi
if self.true_psi:
plot_id = "%dsimul_%diters_%dburnin_%dlag_%s_truepsi_%.2f.pdf" \
%(simulation_num, num_iters, burn_in, lag, proposal_type, self.true_psi)
l = plt.axvline(x=self.true_psi, color='r', linewidth=1.2, ls='-', label=r'True $\Psi$')
else:
# Unknown true Psi
plot_id = "%dsimul_%diters_%dburnin_%dlag_%s_%s_truepsi.pdf" \
%(simulation_num, num_iters, burn_in, lag, proposal_type, 'unknown')
if value_to_label:
l = plt.axvline(x=value_to_label, color='r', linewidth=1.2, ls='-', label=label)
# plot credible intervals if given
if plot_intervals:
# print "Plotting %.2f confidence intervals" %(plot_intervals * 100)
interval_c1, interval_c2 = ht.compute_credible_intervals(samples_to_plot, plot_intervals)
plt.axvline(x=interval_c1, color='#999999', linewidth=0.7, ls='--',
label=r'%d' %(plot_intervals*100) + '% CI')
plt.axvline(x=interval_c2, color='#999999', linewidth=0.7, ls='--')
if plot_mean:
sample_mean = mean(samples_to_plot)
plt.axvline(x=sample_mean, color='r', linewidth=0.8, label='Mean')
if with_legend and (plot_intervals or self.true_psi):
if not bbox_coords:
lg = plt.legend(handletextpad=0.172, borderpad=0.01, labelspacing=.008,
handlelength=1.4, loc='best', numpoints=1)
else:
lg = plt.legend(handletextpad=0.172, borderpad=0.01, labelspacing=.008,
handlelength=1.4, loc='best', numpoints=1,
bbox_to_anchor=bbox_coords)
lg.get_frame().set_linewidth(0)
for t in lg.get_texts():
t.set_fontsize(8)
if title:
plt.title(title)
if plots_dir:
if not plot_filename:
plt.savefig(plots_dir + "sampled_psi_hist_%s" %(plot_id))
else:
plt.savefig(plots_dir + plot_filename + '.pdf')
plt.savefig(plots_dir + plot_filename + '.png')
return curr_axes
# Plot joint scores as function of number of samples
#log_joint_fig = plt.figure(figsize=(7,4.5), dpi=300)
#skip = 15
#print "Skip of %d when plotting log joint scores" %(skip)
#plt.plot(arange(0, len(total_log_scores), skip),
# total_log_scores[arange(0, len(total_log_scores), skip)])
#print "Total log scores plotted: ", len(total_log_scores)
#plt.xlabel('Number of iterations (lag not shown)')
#plt.ylabel('Log joint score')
#plt.savefig(plots_dir + "log_joint_scores_skip%d_%s" %(skip, plot_id))
# def load_samples(samples_filename):
# """
# Load a set of samples from a file and build an associated gene.
# """
# samples_data, h = csv2array(samples_filename, skiprows=1,
# raw_header=True)
# samples = []
# log_scores = []
# for line in samples_data:
# psi_vals = [float(v) for v in line['sampled_psi'].split(',')]
# samples.append(psi_vals)
# log_scores.append(float(line['log_score']))
# params, gene = parse_sampler_params(h[0])
# return (array(samples), array(log_scores), params, gene)
|
from wagtail.admin.edit_handlers import PublishingPanel, PrivacyModalPanel
from wagtail.images.edit_handlers import ImageChooserPanel
from wagtail.images.widgets import AdminImageChooser
class CustomImageChooserPanel(ImageChooserPanel):
def widget_overrides(self):
return {self.field_name: AdminImageChooser(show_edit_link=False)}
CUSTOM_SETTINGS_PANELS = [
PublishingPanel(help_text='Sekä julkaisu- että poistopäivämäärä ovat vapaaehtoisia kenttiä ja '
'ne voidaan jättää tyhjäksi. Voit myös halutessasi täyttää vain toisen kentistä.'),
PrivacyModalPanel(help_text='Tällä voit määritellä sivun ja sen alasivujen näkyvyyden.'),
]
|
from tkinter import *
a=Tk()
a.title("my first window")
a.geometry("500x500+0+0")
l1=Label(text='Label1',fg='red',bg='green',font='25').pack()
button2=Button(text='submit',fg='black',bg='white',font='38').pack()
l2=Label(text='label2',fg='blue',bg='yellow',font='48').pack()
button1=Button(text='submit',fg='blue',bg='red',font='38').pack()
a.mainloop() |
#import modules
from time import sleep
from ina219 import INA219
import time
t=0
#Cycle for to take measures
while True:
#Sensor and I2C configuration
ina1 = INA219(shunt_ohms=0.1,
max_expected_amps = 2.0,
address=0x40)
ina2 = INA219(shunt_ohms=0.1,
max_expected_amps = 2.0,
address=0x41)
ina3 = INA219(shunt_ohms=0.1,
max_expected_amps = 2.0,
address=0x44)
ina4 = INA219(shunt_ohms=0.1,
max_expected_amps = 2.0,
address=0x45)
ina1.configure(voltage_range=ina1.RANGE_32V,
gain=ina1.GAIN_AUTO,
bus_adc=ina1.ADC_128SAMP,
shunt_adc=ina1.ADC_128SAMP)
ina2.configure(voltage_range=ina2.RANGE_32V,
gain=ina2.GAIN_AUTO,
bus_adc=ina2.ADC_128SAMP,
shunt_adc=ina2.ADC_128SAMP)
ina3.configure(voltage_range=ina3.RANGE_32V,
gain=ina3.GAIN_AUTO,
bus_adc=ina3.ADC_128SAMP,
shunt_adc=ina3.ADC_128SAMP)
ina4.configure(voltage_range=ina4.RANGE_32V,
gain=ina4.GAIN_AUTO,
bus_adc=ina4.ADC_128SAMP,
shunt_adc=ina4.ADC_128SAMP)
t=t+1
print(t)
#Take and print values of each low current sensor
v1 = ina1.voltage()
i1 = ina1.current()/1000
p1 = ina1.power()
print("Voltage Sensor 1= "+str(v1))
print("Current Sensor 1= "+str(i1))
print("Power Sensor 1= "+str(p1))
v2 = ina2.voltage()
i2 = ina2.current()/1000
p2 = ina2.power()
print("Voltage Sensor 2= "+str(v2))
print("Current Sensor 2= "+str(i2))
print("Power Sensor 2= "+str(p2))
v3 = ina3.voltage()
i3 = ina3.current()/1000
p3 = ina3.power()
print("Voltage Sensor 3= "+str(v3))
print("Current Sensor 3= "+str(i3))
print("Power Sensor 3= "+str(p3))
v4 = ina4.voltage()
i4 = ina4.current()/1000
p4 = ina4.power()
print("Voltage Sensor 4= "+str(v4))
print("Current Sensor 4= "+str(i4))
print("Power Sensor 4= "+str(p4))
time.sleep(15) |
# Generated by Django 3.1.5 on 2021-01-14 19:47
from django.db import migrations
import multiselectfield.db.fields
class Migration(migrations.Migration):
dependencies = [
('matching', '0004_matching_skills'),
]
operations = [
migrations.AddField(
model_name='matching',
name='choreography',
field=multiselectfield.db.fields.MultiSelectField(choices=[('Contemporary', 'Contemporary'), ('Folkloric', 'Folkloric'), ('Hip-hop', 'Hip-hop'), ('None', 'None')], default='None', max_length=35),
),
migrations.AddField(
model_name='matching',
name='learning',
field=multiselectfield.db.fields.MultiSelectField(choices=[('Contemporary', 'Contemporary'), ('Folkloric', 'Folkloric'), ('Hip-hop', 'Hip-hop'), ('None', 'None')], default='None', max_length=35),
),
]
|
# -*- conding:utf-8 -*-
import requests
url = "http://192.168.2.237:8118/gs_mall_channel_mk_admin/admin/channelCustomCategory/insertCategory"
querystring = {"channelId":"125"}
payload = "{\r\n\"channelId\":\"135\",\r\n "\
"\"name\":\"test2\",\r\n " \
"\"channelName\":\"ceshi\",\r\n " \
"\"pid\":\"85\",\r\n " \
"\"type\":\"2\",\r\n " \
"\"display\":\"1\",\r\n " \
"\"imgUrl\":\"http://www.baidu.com/\",\r\n " \
"\"keywords\":\"test2\",\r\n " \
"\"externalLinks\":\"www.bjson.com\",\r\n " \
"\"suppilerCategorys\":{\r\n " \
"\"suppilerCategoryLinks\":[{\r\n " \
"\"suppilerCategoryLink\":[\r\n " \
"{\"id\":2,\"code\":\"0002\",\"name\":\"shijian\"},\r\n"\
"{\"id\":3,\"code\":\"0003\",\"name\":\"shijian2\"}\r\n " \
"]\r\n }]\r\n }\r\n}"
headers = {
'Content-Type': "application/json",
'cache-control': "no-cache",
}
response = requests.request("GET",url,data=payload, headers=headers, params=querystring)
print(response.json()) |
from mock import patch, Mock
from tornado import testing, concurrent
from zoonado.protocol.acl import ACL
from zoonado import client, protocol, exc, WatchEvent
class ClientTests(testing.AsyncTestCase):
def future_value(self, value):
f = concurrent.Future()
f.set_result(value)
return f
def future_error(self, exception):
f = concurrent.Future()
f.set_exception(exception)
return f
def test_default_acl_is_unrestricted(self):
c = client.Zoonado("host,host,host")
self.assertEqual(len(c.default_acl), 1)
self.assertEqual(
c.default_acl[0],
ACL.make(
scheme="world", id="anyone",
read=True, write=True, create=True, delete=True, admin=True
)
)
def test_normalize_path_with_leading_slash(self):
c = client.Zoonado("host1,host2,host3")
self.assertEqual(c.normalize_path("/foo/bar"), "/foo/bar")
def test_normalize_path_with_no_slash(self):
c = client.Zoonado("host1,host2,host3")
self.assertEqual(c.normalize_path("foo/bar"), "/foo/bar")
def test_normalize_path_with_extra_slashes(self):
c = client.Zoonado("host1,host2,host3")
self.assertEqual(c.normalize_path("foo//bar"), "/foo/bar")
def test_normalize_path_with_chroot(self):
c = client.Zoonado("host1,host2,host3", chroot="/bazz")
self.assertEqual(c.normalize_path("foo//bar"), "/bazz/foo/bar")
def test_normalize_path_with_chroot_missing_leading_slash(self):
c = client.Zoonado("host1,host2,host3", chroot="bazz")
self.assertEqual(c.normalize_path("foo//bar"), "/bazz/foo/bar")
def test_denormalize_path_without_chroot(self):
c = client.Zoonado("host1,host2,host3")
self.assertEqual(c.denormalize_path("/foo/bar"), "/foo/bar")
def test_denormalize_path_with_chroot(self):
c = client.Zoonado("host1,host2,host3", chroot="/bazz")
self.assertEqual(c.denormalize_path("/bazz/foo/bar"), "/foo/bar")
def test_denormalize_path_with_chroot_mismatch(self):
c = client.Zoonado("host1,host2,host3", chroot="/bazz")
self.assertEqual(c.denormalize_path("/foo/bar"), "/foo/bar")
@patch.object(client, "Session")
@testing.gen_test
def test_start_calls_session_start(self, Session):
Session.return_value.start.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3")
self.assertEqual(c.session, Session.return_value)
yield c.start()
c.session.start.assert_called_once_with()
@patch.object(client.Zoonado, "ensure_path")
@patch.object(client, "Session")
@testing.gen_test
def test_start_ensures_chroot_path(self, Session, ensure_path):
Session.return_value.start.return_value = self.future_value(None)
ensure_path.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3", chroot="/foo/bar")
yield c.start()
c.ensure_path.assert_called_once_with("/")
@patch.object(client, "Features")
@patch.object(client, "Session")
def test_features_property(self, Session, Features):
Session.return_value.conn.version_info = (3, 6, 0)
c = client.Zoonado("host1,host2,host3")
self.assertEqual(c.features, Features.return_value)
Features.assert_called_once_with((3, 6, 0))
@patch.object(client, "Features")
@patch.object(client, "Session")
def test_features_when_no_connection(self, Session, Features):
Session.return_value.conn = None
c = client.Zoonado("host1,host2,host3")
self.assertEqual(c.features, Features.return_value)
Features.assert_called_once_with((0, 0, 0))
@patch.object(client, "Session")
@testing.gen_test
def test_send_passes_to_session_send(self, Session):
request = Mock()
response = Mock()
Session.return_value.send.return_value = self.future_value(response)
c = client.Zoonado("host1,host2,host3")
actual = yield c.send(request)
c.session.send.assert_called_once_with(request)
self.assertTrue(response is actual)
@patch.object(client, "Session")
@testing.gen_test
def test_send_caches_stats_if_present_on_response(self, Session):
request = Mock(path="/bazz/foo/bar")
stat = Mock()
response = Mock(stat=stat)
Session.return_value.send.return_value = self.future_value(response)
c = client.Zoonado("host1,host2,host3", chroot="/bazz")
yield c.send(request)
self.assertEqual(c.stat_cache["/foo/bar"], stat)
@patch.object(client, "Session")
@testing.gen_test
def test_close_calls_session_close(self, Session):
Session.return_value.close.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3")
yield c.close()
c.session.close.assert_called_once_with()
@patch.object(client, "Session")
@testing.gen_test
def test_exists_request(self, Session):
Session.return_value.send.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3")
result = yield c.exists("/foo/bar")
self.assertTrue(result)
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.ExistsRequest)
self.assertEqual(request.path, "/foo/bar")
self.assertEqual(request.watch, False)
@patch.object(client, "Session")
@testing.gen_test
def test_exists_with_chroot(self, Session):
Session.return_value.send.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3", chroot="bazz")
result = yield c.exists("/foo/bar")
self.assertTrue(result)
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.ExistsRequest)
self.assertEqual(request.path, "/bazz/foo/bar")
self.assertEqual(request.watch, False)
@patch.object(client, "Session")
@testing.gen_test
def test_exists_request_with_watch(self, Session):
Session.return_value.send.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3")
yield c.exists("/foo/bar", watch=True)
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.ExistsRequest)
self.assertEqual(request.path, "/foo/bar")
self.assertEqual(request.watch, True)
@patch.object(client, "Session")
@testing.gen_test
def test_exists_no_node_error(self, Session):
Session.return_value.send.return_value = self.future_error(
exc.NoNode()
)
c = client.Zoonado("host1,host2,host3")
result = yield c.exists("/foo/bar", watch=True)
self.assertFalse(result)
@patch.object(client, "Session")
@testing.gen_test
def test_delete(self, Session):
Session.return_value.send.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3")
yield c.delete("/foo/bar")
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.DeleteRequest)
self.assertEqual(request.path, "/foo/bar")
self.assertEqual(request.version, -1)
@patch.object(client, "Session")
@testing.gen_test
def test_delete_with_chroot(self, Session):
Session.return_value.send.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3", chroot="/bar")
yield c.delete("/foo/bar")
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.DeleteRequest)
self.assertEqual(request.path, "/bar/foo/bar")
self.assertEqual(request.version, -1)
@patch.object(client, "Session")
@testing.gen_test
def test_delete_with_stat_cache_unforced(self, Session):
Session.return_value.send.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3")
c.stat_cache["/foo/bar"] = Mock(version=33)
yield c.delete("/foo/bar")
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.DeleteRequest)
self.assertEqual(request.path, "/foo/bar")
self.assertEqual(request.version, 33)
@patch.object(client, "Session")
@testing.gen_test
def test_delete_with_stat_cache_forced(self, Session):
Session.return_value.send.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3")
c.stat_cache["/foo/bar"] = Mock(version=33)
yield c.delete("/foo/bar", force=True)
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.DeleteRequest)
self.assertEqual(request.path, "/foo/bar")
self.assertEqual(request.version, -1)
@patch.object(client, "Session")
@testing.gen_test
def test_get_data(self, Session):
response = Mock(data=b"wooo")
Session.return_value.send.return_value = self.future_value(response)
c = client.Zoonado("host1,host2,host3")
result = yield c.get_data("/foo/bar")
self.assertEqual(result, u"wooo")
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.GetDataRequest)
self.assertEqual(request.path, "/foo/bar")
self.assertEqual(request.watch, False)
@patch.object(client, "Session")
@testing.gen_test
def test_get_data_with_raw_bytes(self, Session):
response = Mock(data=b"\xc0=\xc00")
Session.return_value.send.return_value = self.future_value(response)
c = client.Zoonado("host1,host2,host3")
result = yield c.get_data("/foo/bar")
self.assertEqual(result, b"\xc0=\xc00")
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.GetDataRequest)
self.assertEqual(request.path, "/foo/bar")
self.assertEqual(request.watch, False)
@patch.object(client, "Session")
@testing.gen_test
def test_get_data_with_watch(self, Session):
response = Mock(data=b"wooo")
Session.return_value.send.return_value = self.future_value(response)
c = client.Zoonado("host1,host2,host3")
yield c.get_data("/foo/bar", watch=True)
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.GetDataRequest)
self.assertEqual(request.path, "/foo/bar")
self.assertEqual(request.watch, True)
@patch.object(client, "Session")
@testing.gen_test
def test_get_data_with_chroot(self, Session):
response = Mock(data=b"wooo")
Session.return_value.send.return_value = self.future_value(response)
c = client.Zoonado("host1,host2,host3", chroot="bwee")
yield c.get_data("/foo/bar", watch=True)
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.GetDataRequest)
self.assertEqual(request.path, "/bwee/foo/bar")
self.assertEqual(request.watch, True)
@patch.object(client, "Session")
@testing.gen_test
def test_set_data(self, Session):
Session.return_value.send.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3")
yield c.set_data("/foo/bar", data="some data")
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.SetDataRequest)
self.assertEqual(request.path, "/foo/bar")
self.assertEqual(request.data, b"some data")
self.assertEqual(request.version, -1)
@patch.object(client, "Session")
@testing.gen_test
def test_set_data_with_invalid_data(self, Session):
Session.return_value.send.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3")
with self.assertRaises(Exception):
yield c.set_data("/foo/bar", data=555)
with self.assertRaises(Exception):
yield c.set_data("/foo/bar", data=object())
@patch.object(client, "Session")
@testing.gen_test
def test_set_data_with_string(self, Session):
Session.return_value.send.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3")
yield c.set_data("/foo/bar", data="foo_bar")
@patch.object(client, "Session")
@testing.gen_test
def test_set_data_with_binary_data(self, Session):
Session.return_value.send.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3")
yield c.set_data("/foo/bar", data=b"\xc0=\x00")
@patch.object(client, "Session")
@testing.gen_test
def test_set_data_with_chroot(self, Session):
Session.return_value.send.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3", chroot="/bar")
yield c.set_data("/foo/bar", data="{json}")
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.SetDataRequest)
self.assertEqual(request.path, "/bar/foo/bar")
self.assertEqual(request.data, b"{json}")
self.assertEqual(request.version, -1)
@patch.object(client, "Session")
@testing.gen_test
def test_set_data_with_stat_cache_unforced(self, Session):
Session.return_value.send.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3")
c.stat_cache["/foo/bar"] = Mock(version=33)
yield c.set_data("/foo/bar", data="{json}")
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.SetDataRequest)
self.assertEqual(request.path, "/foo/bar")
self.assertEqual(request.data, b"{json}")
self.assertEqual(request.version, 33)
@patch.object(client, "Session")
@testing.gen_test
def test_set_data_with_stat_cache_forced(self, Session):
Session.return_value.send.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3")
c.stat_cache["/foo/bar"] = Mock(version=33)
yield c.set_data("/foo/bar", data="{blarg}", force=True)
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.SetDataRequest)
self.assertEqual(request.path, "/foo/bar")
self.assertEqual(request.data, b"{blarg}")
self.assertEqual(request.version, -1)
@patch.object(client, "Session")
@testing.gen_test
def test_get_children(self, Session):
response = Mock(children=["bwee", "bwoo"])
Session.return_value.send.return_value = self.future_value(response)
c = client.Zoonado("host1,host2,host3")
result = yield c.get_children("/foo/bar")
self.assertEqual(result, ["bwee", "bwoo"])
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.GetChildren2Request)
self.assertEqual(request.path, "/foo/bar")
self.assertEqual(request.watch, False)
@patch.object(client, "Session")
@testing.gen_test
def test_get_children_with_watch(self, Session):
response = Mock(children=["bwee", "bwoo"])
Session.return_value.send.return_value = self.future_value(response)
c = client.Zoonado("host1,host2,host3")
yield c.get_children("/foo/bar", watch=True)
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.GetChildren2Request)
self.assertEqual(request.path, "/foo/bar")
self.assertEqual(request.watch, True)
@patch.object(client, "Session")
@testing.gen_test
def test_get_children_with_chroot(self, Session):
response = Mock(children=["bwee", "bwoo"])
Session.return_value.send.return_value = self.future_value(response)
c = client.Zoonado("host1,host2,host3", chroot="bwee")
yield c.get_children("/foo/bar", watch=True)
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.GetChildren2Request)
self.assertEqual(request.path, "/bwee/foo/bar")
self.assertEqual(request.watch, True)
@patch.object(client, "Session")
@testing.gen_test
def test_get_acl(self, Session):
response = Mock(children=["bwee", "bwoo"])
Session.return_value.send.return_value = self.future_value(response)
c = client.Zoonado("host1,host2,host3", chroot="bwee")
yield c.get_acl("/foo/bar")
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.GetACLRequest)
self.assertEqual(request.path, "/bwee/foo/bar")
@patch.object(client, "Session")
@testing.gen_test
def test_get_acl_with_chroot(self, Session):
response = Mock(children=["bwee", "bwoo"])
Session.return_value.send.return_value = self.future_value(response)
c = client.Zoonado("host1,host2,host3", chroot="bwee")
yield c.get_acl("/foo/bar")
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.GetACLRequest)
self.assertEqual(request.path, "/bwee/foo/bar")
@patch.object(client, "Session")
@testing.gen_test
def test_set_acl(self, Session):
Session.return_value.send.return_value = self.future_value(None)
mock_acl = Mock()
c = client.Zoonado("host1,host2,host3")
yield c.set_acl("/foo/bar", acl=[mock_acl])
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.SetACLRequest)
self.assertEqual(request.path, "/foo/bar")
self.assertEqual(request.acl, [mock_acl])
self.assertEqual(request.version, -1)
@patch.object(client, "Session")
@testing.gen_test
def test_set_acl_with_chroot(self, Session):
Session.return_value.send.return_value = self.future_value(None)
mock_acl = Mock()
c = client.Zoonado("host1,host2,host3", chroot="/bar")
yield c.set_acl("/foo/bar", acl=[mock_acl])
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.SetACLRequest)
self.assertEqual(request.path, "/bar/foo/bar")
self.assertEqual(request.acl, [mock_acl])
self.assertEqual(request.version, -1)
@patch.object(client, "Session")
@testing.gen_test
def test_set_acl_with_stat_cache_unforced(self, Session):
Session.return_value.send.return_value = self.future_value(None)
mock_acl = Mock()
c = client.Zoonado("host1,host2,host3")
c.stat_cache["/foo/bar"] = Mock(version=33)
yield c.set_acl("/foo/bar", acl=[mock_acl])
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.SetACLRequest)
self.assertEqual(request.path, "/foo/bar")
self.assertEqual(request.acl, [mock_acl])
self.assertEqual(request.version, 33)
@patch.object(client, "Session")
@testing.gen_test
def test_set_acl_with_stat_cache_forced(self, Session):
Session.return_value.send.return_value = self.future_value(None)
mock_acl = Mock()
c = client.Zoonado("host1,host2,host3")
c.stat_cache["/foo/bar"] = Mock(version=33)
yield c.set_acl("/foo/bar", acl=[mock_acl], force=True)
args, kwargs = c.session.send.call_args
request, = args
self.assertIsInstance(request, protocol.SetACLRequest)
self.assertEqual(request.path, "/foo/bar")
self.assertEqual(request.acl, [mock_acl])
self.assertEqual(request.version, -1)
@patch.object(client, "Transaction")
def test_begin_transation_returns_transaction_object(self, Transaction):
c = client.Zoonado("host1,host2,host3")
txn = c.begin_transaction()
self.assertEqual(txn, Transaction.return_value)
Transaction.assert_called_once_with(c)
@patch.object(client, "Features")
@testing.gen_test
def test_create_container_requires_feature_present(self, Features):
Features.return_value.containers = False
c = client.Zoonado("host1,host2,host3")
with self.assertRaises(ValueError):
yield c.create("/foo", container=True)
@patch.object(client, "Features")
@patch.object(client, "Session")
@testing.gen_test
def test_create_with_stat(self, Session, Features):
response = Mock(path="/foo")
Session.return_value.send.return_value = self.future_value(response)
Features.return_value.create_with_stat = True
c = client.Zoonado("host1,host2,host3")
result = yield c.create("/foo", data="bar")
args, kwargs = c.session.send.call_args
request, = args
self.assertEqual(result, "/foo")
self.assertIsInstance(request, protocol.Create2Request)
self.assertEqual(request.data, b"bar")
self.assertEqual(request.flags, 0)
@patch.object(client, "Features")
@patch.object(client, "Session")
@testing.gen_test
def test_create_without_stat(self, Session, Features):
response = Mock(path="/foo")
Session.return_value.send.return_value = self.future_value(response)
Features.return_value.create_with_stat = False
c = client.Zoonado("host1,host2,host3")
result = yield c.create("/foo", data="bar")
args, kwargs = c.session.send.call_args
request, = args
self.assertEqual(result, "/foo")
self.assertIsInstance(request, protocol.CreateRequest)
@patch.object(client, "Features")
@patch.object(client, "Session")
@testing.gen_test
def test_create_with_chroot(self, Session, Features):
response = Mock(path="/root/foo")
Session.return_value.send.return_value = self.future_value(response)
Features.return_value.create_with_stat = True
c = client.Zoonado("host1,host2,host3", chroot="/root/")
result = yield c.create("/foo", data="bar")
args, kwargs = c.session.send.call_args
request, = args
self.assertEqual(result, "/foo")
@patch.object(client, "Features")
@patch.object(client, "Session")
@testing.gen_test
def test_ensure_path(self, Session, Features):
Session.return_value.send.return_value = self.future_value(None)
Features.return_value.create_with_stat = True
c = client.Zoonado("host1,host2,host3")
yield c.ensure_path("/foo/bar/bazz")
requests = [arg[0] for arg, _ in c.session.send.call_args_list]
self.assertIsInstance(requests[0], protocol.Create2Request)
self.assertEqual(requests[0].path, "/foo")
self.assertEqual(requests[1].path, "/foo/bar")
self.assertEqual(requests[2].path, "/foo/bar/bazz")
@patch.object(client, "Features")
@patch.object(client, "Session")
@testing.gen_test
def test_ensure_path_is_normalized(self, Session, Features):
Session.return_value.send.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3", chroot="/blee")
yield c.ensure_path("bar/bazz")
requests = [arg[0] for arg, _ in c.session.send.call_args_list]
self.assertEqual(requests[0].path, "/blee")
self.assertEqual(requests[1].path, "/blee/bar")
self.assertEqual(requests[2].path, "/blee/bar/bazz")
@patch.object(client, "Features")
@patch.object(client, "Session")
@testing.gen_test
def test_ensure_path_existing_nodes(self, Session, Features):
send_results = [exc.NodeExists(), exc.NodeExists(), None]
def get_send_result(*args):
result = send_results.pop(0)
if isinstance(result, Exception):
return self.future_error(result)
else:
return self.future_value(result)
Session.return_value.send.side_effect = get_send_result
c = client.Zoonado("host1,host2,host3")
yield c.ensure_path("/foo/bar/bazz")
requests = [arg[0] for arg, _ in c.session.send.call_args_list]
self.assertEqual(requests[0].path, "/foo")
self.assertEqual(requests[1].path, "/foo/bar")
self.assertEqual(requests[2].path, "/foo/bar/bazz")
@patch.object(client, "Features")
@patch.object(client, "Session")
@testing.gen_test
def test_ensure_path_without_stats(self, Session, Features):
Session.return_value.send.return_value = self.future_value(None)
c = client.Zoonado("host1,host2,host3")
yield c.ensure_path("/foo/bar/bazz")
requests = [arg[0] for arg, _ in c.session.send.call_args_list]
self.assertIsInstance(requests[0], protocol.CreateRequest)
self.assertIsInstance(requests[1], protocol.CreateRequest)
self.assertIsInstance(requests[2], protocol.CreateRequest)
@patch.object(client, "Session")
@testing.gen_test
def test_wait_for_event_uses_session_callback_api(self, Session):
session = Session.return_value
c = client.Zoonado("host1,host2,host3")
wait = c.wait_for_event(WatchEvent.CREATED, "/foo/bar")
self.assertFalse(wait.done())
args, _ = session.add_watch_callback.call_args
added_event_type, added_path, added_callback = args
self.assertEqual(added_event_type, WatchEvent.CREATED)
self.assertEqual(added_path, "/foo/bar")
self.assertFalse(session.remove_watch_callback.called)
added_callback(wait)
self.assertTrue(wait.done())
self.assertEqual(wait.result(), None)
args, _ = session.remove_watch_callback.call_args
removed_event_type, removed_path, removed_callback = args
self.assertEqual(added_event_type, removed_event_type)
self.assertEqual(added_path, removed_path)
self.assertEqual(added_callback, removed_callback)
@patch.object(client, "Session")
@testing.gen_test
def test_wait_for_event_handles_multiple_calls(self, Session):
session = Session.return_value
c = client.Zoonado("host1,host2,host3")
wait = c.wait_for_event(WatchEvent.CREATED, "/foo/bar")
self.assertFalse(wait.done())
args, _ = session.add_watch_callback.call_args
added_event_type, added_path, added_callback = args
self.assertEqual(added_event_type, WatchEvent.CREATED)
self.assertEqual(added_path, "/foo/bar")
self.assertFalse(session.remove_watch_callback.called)
added_callback(wait)
added_callback(wait)
self.assertTrue(wait.done())
self.assertEqual(wait.result(), None)
|
# coding:UTF-8
"""
简易http服务器封装模块
@author: yubang
"""
from werkzeug.serving import run_simple
class HttpServer(object):
def start_server(self, wsgi_app, host='127.0.0.1', port=8080, debug=True, use_reload=True):
"""
启动一个简易的服务器
:param wsgi_app: wsgi接口函数
:param host: 监听的域名
:param port: 监听的端口
:param debug: 是否开启调试
:param use_reload: 是否自动加载代码
:return:
"""
run_simple(host, port, wsgi_app, use_reload, debug)
|
from urllib.request import urlopen
from inscriptis import get_text
from bs4 import BeautifulSoup
from flask import Flask, request
from flask_restful import Resource, Api, reqparse
# from .storage import models
app = Flask(__name__)
api = Api(app)
class Storage(Resource):
# get request for poending
def get(self):
parser = reqparse.RequestParser()
url = parser.add_argument('url', required=True)
text = parser.add_argument('text', required=True)
image = parser.add_argument('image', required=True)
args = parser.parse_args()
# here should be insert into db pending requests
# should be error handling
# models.parse_request.insert().values(url=url, text=text, image=image)
return {'message': 'inserted',
'data': args}, 200
def load_page(url):
# get html
response = urlopen(url)
page_contents = response.read().decode('utf-8')
return page_contents
class TextParser(Resource):
# parse text and download file
def post(self):
url = request.get_json()
page_text = get_text(load_page(url['url']))
file_name = url['url'].split('/')[2]
scraped_text_path = '/tmp/' + file_name + '.txt'
# models.parse_request.insert().values(parse_request_id=parse_request_id, content=content)
with open(scraped_text_path, 'w') as file:
file.write(page_text)
# should be error handling
return {'success': f'Text parsed into {scraped_text_path}'}, 201
class ImageParser(Resource):
# parse images and download
def post(self):
url = request.get_json()
soup_data = BeautifulSoup(load_page(url['url']), "html.parser")
images = soup_data.findAll('img')
# should be Celery for asynchronous tasks
for img in images:
temp = img.get('src')
if temp[0] == '/':
image = 'https://' + url['url'].split('/')[2] + temp
if temp[1] == '/':
image = 'https:' + temp
else:
image = temp
print(image)
file_number = 1
nametemp = f"{url['url'].split('/')[2]}_" + img.get('alt')
if len(nametemp) == 0:
filename = str(file_number)
file_number += 1
else:
filename = nametemp
try:
with open(f'/tmp/{filename}.jpg', 'wb') as file:
file.write(urlopen(image).read())
except Exception as e:
print(e, image)
# continue parsing website if img url failed
continue
return {'success': 'Images parsed into tmp catalogue'}, 201
api.add_resource(TextParser, '/api/persist_text')
api.add_resource(ImageParser, '/api/persist_image')
api.add_resource(Storage, '/api/queue_request')
|
# 给定一个 m x n 的矩阵,如果一个元素为 0,则将其所在行和列的所有元素都设为 0。请使用原地算法。
# 示例 1:
# 输入:
# [
# [1,1,1],
# [1,0,1],
# [1,1,1]
# ]
# 输出:
# [
# [1,0,1],
# [0,0,0],
# [1,0,1]
# ]
class Solution:
def setZeroes(self, matrix: List[List[int]]) -> None:
"""
Do not return anything, modify matrix in-place instead.
"""
row_num, col_num = len(matrix), len(matrix[0])
# 创建集合set()用于存放需要置零的行和列
row_set, col_set = set(), set()
for row in range(row_num):
for col in range(col_num):
if matrix[row][col]==0:
row_set.add(row)
col_set.add(col)
# 将记录的行、列中的元素赋值为0
# 再次遍历赋值
for row in range(row_num):
for col in range(col_num):
if row in row_set or col in col_set:
matrix[row][col] = 0
# # 或者行列单独赋值均可
# for row in row_set:
# for col in range(col_num):
# matrix[row][col] = 0
# for col in col_set:
# for row in range(row_num):
# matrix[row][col] = 0
|
import time, argparse
from src.celery_lambda.CeleryLambda import CeleryLambda
parser = argparse.ArgumentParser()
parser.add_argument('lambda_name', type=str,
help="The name of Lambda function")
parser.add_argument('-c', '--celery_async', action="store_true",
help="turn on to make Celery workers invoke asynchronously")
parser.add_argument('-l', '--lambda_async', action="store_true",
help="turn on to make Lambda is invoked asynchronously")
parser.add_argument('invoke_time', type=int,
help="Integer value of total number of invocations")
parser.add_argument('batch_number', type=int,
help="Number of batch invocations")
args = parser.parse_args()
print("=====Arguments======")
print("The name of Lambda fucntion = %s" %args.lambda_name)
print("Celery Async = %s" %args.celery_async)
print("Lambda Async = %s" %args.lambda_async)
print("Number of Invocation = %d" %args.invoke_time)
print("Number of Batch = %d" %args.batch_number)
print("====================")
# print("Warm up call to Lambda container")
# response = invoke_sync("warm-up-call")
# print (response)
sync_payload = {'messageType' : 'refreshConfig', 'invokeType' : 'RequestResponse'}
async_payload = {'messageType' : 'refreshConfig', 'invokeType' : 'Event'}
invocation = CeleryLambda(lambda_name = args.lambda_name,
celery_async = args.celery_async,
lambda_async = args.lambda_async,
invoke_time = args.invoke_time,
batch_number = args.batch_number,
sync_payload= sync_payload,
async_payload = async_payload,
decoder = 'utf-8')
invocation.run()
# invocation.sqs_trigger() |
import coverage
import os
import unittest
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
from project import app, db
app.config.from_object(os.environ['APP_MODE'])
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.command
def cov():
'''
Coverage test run.
:return:
'''
cov = coverage.coverage(branch=True, include='project/*', omit='*/__init__.py')
cov.start()
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
cov.stop()
cov.save()
print('Coverage Summary:')
cov.report()
basedir = os.path.abspath(os.path.dirname(__file__))
covdir = os.path.join(basedir, 'coverage')
cov.html_report(directory=covdir)
cov.erase()
@manager.command
def test():
'''
Testing without coverage.
:return:
'''
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
manager.run()
|
"""
This part of code is adopted from https://github.com/Hanjun-Dai/graph_adversarial_attack (Copyright (c) 2018 Dai, Hanjun and Li, Hui and Tian, Tian and Huang, Xin and Wang, Lin and Zhu, Jun and Song, Le)
but modified to be integrated into the repository.
"""
import os
import sys
import numpy as np
import torch
import networkx as nx
import random
from torch.nn.parameter import Parameter
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from tqdm import tqdm
from copy import deepcopy
import pickle as cp
from deeprobust.graph.utils import *
import scipy.sparse as sp
from scipy.sparse.linalg.eigen.arpack import eigsh
from deeprobust.graph import utils
from deeprobust.graph.rl.env import *
class NodeInjectionEnv(NodeAttackEnv):
"""Node attack environment. It executes an action and then change the
environment status (modify the graph).
"""
def __init__(self, features, labels, idx_train, idx_val, dict_of_lists, classifier, ratio=0.01, parallel_size=1, reward_type='binary'):
"""number of injected nodes: ratio*|V|
number of modifications: ratio*|V|*|D_avg|
"""
# super(NodeInjectionEnv, self).__init__(features, labels, all_targets, list_action_space, classifier, num_mod, reward_type)
super(NodeInjectionEnv, self).__init__(features, labels, idx_val, dict_of_lists, classifier)
self.parallel_size = parallel_size
degrees = np.array([len(d) for n, d in dict_of_lists.items()])
N = len(degrees[degrees > 0])
avg_degree = degrees.sum() / N
self.n_injected = len(degrees) - N
assert self.n_injected == int(ratio * N)
self.ori_adj_size = N
self.n_perturbations = int(self.n_injected * avg_degree)
print("number of perturbations: {}".format(self.n_perturbations))
self.all_nodes = np.arange(N)
self.injected_nodes = self.all_nodes[-self.n_injected: ]
self.previous_acc = [1] * parallel_size
self.idx_train = np.hstack((idx_train, self.injected_nodes))
self.idx_val = idx_val
self.modified_label_list = []
for i in range(self.parallel_size):
self.modified_label_list.append(labels[-self.n_injected: ].clone())
def init_overall_steps(self):
self.overall_steps = 0
self.modified_list = []
for i in range(self.parallel_size):
self.modified_list.append(ModifiedGraph())
def setup(self):
self.n_steps = 0
self.first_nodes = None
self.second_nodes = None
self.rewards = None
self.binary_rewards = None
self.list_acc_of_all = []
def step(self, actions, inference=False):
'''
run actions and get reward
'''
if self.first_nodes is None: # pick the first node of edge
assert (self.n_steps + 1) % 3 == 1
self.first_nodes = actions[:]
if (self.n_steps + 1) % 3 == 2:
self.second_nodes = actions[:]
for i in range(self.parallel_size):
# add an edge from the graph
self.modified_list[i].add_edge(self.first_nodes[i], actions[i], 1.0)
if (self.n_steps + 1) % 3 == 0:
for i in range(self.parallel_size):
# change label
self.modified_label_list[i][self.first_nodes[i] - self.ori_adj_size] = actions[i]
self.first_nodes = None
self.second_nodes = None
self.n_steps += 1
self.overall_steps += 1
if not inference:
if self.isActionFinished() :
rewards = []
for i in (range(self.parallel_size)):
device = self.labels.device
extra_adj = self.modified_list[i].get_extra_adj(device=device)
adj = self.classifier.norm_tool.norm_extra(extra_adj)
labels = torch.cat((self.labels, self.modified_label_list[i]))
# self.classifier.fit(self.features, adj, labels, self.idx_train, self.idx_val, normalize=False)
self.classifier.fit(self.features, adj, labels, self.idx_train, self.idx_val, normalize=False, patience=30)
output = self.classifier(self.features, adj)
loss, correct = loss_acc(output, self.labels, self.idx_val, avg_loss=False)
acc = correct.sum()
# r = 1 if self.previous_acc[i] - acc > 0.01 else -1
r = 1 if self.previous_acc[i] - acc > 0 else -1
self.previous_acc[i] = acc
rewards.append(r)
self.rewards = np.array(rewards).astype(np.float32)
def sample_pos_rewards(self, num_samples):
assert self.list_acc_of_all is not None
cands = []
for i in range(len(self.list_acc_of_all)):
succ = np.where( self.list_acc_of_all[i] < 0.9 )[0]
for j in range(len(succ)):
cands.append((i, self.all_targets[succ[j]]))
if num_samples > len(cands):
return cands
random.shuffle(cands)
return cands[0:num_samples]
def uniformRandActions(self):
act_list = []
for i in range(self.parallel_size):
if self.first_nodes is None:
# a1: choose a node from injected nodes
cur_action = np.random.choice(self.injected_nodes)
if self.first_nodes is not None and self.second_nodes is None:
# a2: choose a node from all nodes
cur_action = np.random.randint(len(self.list_action_space))
while (self.first_nodes[i], cur_action) in self.modified_list[i].edge_set:
cur_action = np.random.randint(len(self.list_action_space))
if self.first_nodes is not None and self.second_nodes is not None:
# a3: choose label
cur_action = np.random.randint(self.labels.cpu().max() + 1)
act_list.append(cur_action)
return act_list
def isActionFinished(self):
if (self.n_steps) % 3 == 0 and self.n_steps != 0:
return True
return False
def isTerminal(self):
if self.overall_steps == 3 * self.n_perturbations:
return True
return False
def getStateRef(self):
return list(zip(self.modified_list, self.modified_label_list))
def cloneState(self):
return list(zip(deepcopy(self.modified_list), deepcopy(self.modified_label_list)))
|
#Embedded file name: traceback2\__init__.py
from brennivin.traceback2 import *
from traceback import format_exception_only
|
import requests
google = requests.get('http://google.com')
print(google.status_code)
print(google.content[:200])
print(google.headers)
print(google.cookies.items())
|
from django.contrib import admin
from bonds.models import Bond, User
@admin.register(Bond, User)
class BondAdmin(admin.ModelAdmin):
pass
|
# Master File
#
# A simulation of Warbling Babbler movement with different phenotypes between several populations
# in different landscapes aver the course of several weeks. This uses a randomized dispersal matrix
# to determine a rate of migration between populations.
#
#
# !! Only Functional with Two Populations !!
import random
import matplotlib.pyplot as plt
class individual:
"""Class of Individual Warbling Babblers which will be moving around different populations"""
def __init__(self,id = 1, phen = "Fluffy"):
"""Constructor for the individual class that sets and ID and phenotype for each individual
where the default phenotype is Fluffy"""
self.id = id
self.phen = phen
class population:
def __init__(self,popSize = 0,phenotype = "Fluffy",weights = [0.9,0.1]):
"""Population constructor that initializes the contents of each population, its size, and the probabiity of it staying or
leaving. The defaults are that the population is empty and the phenotype is Fluffy"""
self.popSize = popSize
self.phenotype = phenotype
self.inQ = []
self.outQ = []
self.weights = weights
# Test populations
# Checks which phenotype each population is initially classified as and then creates corresponding individuals
self.indv = []
if phenotype == "Fluffy":
for i in range(popSize):
self.indv.append( individual(id=i+1,phen = "Fluffy") )
elif phenotype == "Fuzzy":
for i in range(popSize):
self.indv.append( individual(id=i+1,phen = "Fuzzy") )
def prob(self,popnum): #######
"""Defines the probability of a phenotype in a specific population"""
fluffles = 0
fuzzles = 0
for i in self.indv:
if i.phen == "Fluffy":
fluffles += 1
else:
fuzzles += 1
fluffPercent = round((fluffles/(fluffles+fuzzles)*100),2)
fuzzPercent = round((fuzzles/(fluffles+fuzzles)*100),2)
print("Population",popnum,"is",fluffPercent, "% Fluffy")
print("Population",popnum,"is",fuzzPercent, "% Fuzzy\n")
class landscape:
"""Defines the landscape class that defines a list of populations and executes the migration between populations
and their initial phenotype"""
def __init__(self,popSize1 = 0,popSize2 = 0,weight1 = [0.9,0.1],weight2 = [0.8,0.2]):
"""Constructor for Landscape that defines a population size that is passed to the populations inside of it"""
self.popSize1 = popSize1 # PopSize of Pop1
self.popSize2 = popSize2 # PopSize of Pop2
self.weight1 = weight1
self.weight2 = weight2
# Landscapes - Only 2 Allowed
self.lands = [population(popSize1,phenotype="Fluffy",weights=weight1),population(popSize2,phenotype="Fuzzy",weights=weight2)]
def move(self):
"""Executes all movement between populations"""
for popl in self.lands: # Iterates over each population in the landscape
for indv in popl.indv: # Iterates over each member in a population
move = random.choices(self.lands, popl.weights) # Weighted Random movement from pop 1 to pop 2
if move[0] != popl:
popl.outQ.append(indv)
move[0].inQ.append(indv)
for popl in self.lands:
for i in popl.inQ:
popl.indv.append(i)
popl.inQ = []
for i in popl.outQ:
popl.indv.remove(i)
popl.outQ = []
"""!!! Changeable Variables !!!"""
Pop1Size = 20 # Fluffy Population
Pop2Size = 15 # Fuzzy Population
Weeks = 100 # Number of Weeks
Weight1 = [0.5,0.5] # Pop1 Dispersal Values [stay value,leave value]
Weight2 = [0.5,0.5] # Pop2 Dispersal Values [stay value,leave value]
if ((Weight1[0]+Weight1[1]) != 1) & ((Weight2[0]+Weight2[1]) == 1): # Error Function
exit("Weights must add up to one")
landscape = landscape(Pop1Size,Pop2Size,Weight1,Weight2)
pop1tot = [] # Contains Fluffy and Fuzzy count for Pop2
pop2tot = [] # Contains Fluffy and Fuzzy count for Pop2
size1tot = [] # List that contains total Pop1 Size
size2tot = [] # List that contains total Pop2 Size
weeknum = [] # List that contains total week number
for time in range(Weeks):
"""Iterates per week"""
weeknum.append(time+1)
fluff = 0
fuzz = 0
pop = []
for i in landscape.lands[0].indv: # Iterates over Pop1
if i.phen == "Fluffy": # Counts Fluffy and Fuzzy for Pop1
fluff+=1
else:
fuzz+=1
size1tot.append(fluff+fuzz)
pop.append(fluff)
pop.append(fuzz)
pop1tot.append(pop)
fluff = 0 # Resets Fluff, Fuzz, and pop counts
fuzz = 0
pop = []
for i in landscape.lands[1].indv: # Iterates over Pop2
if i.phen == "Fluffy": # Counts Fluffy and Fuzzy for Pop1
fluff+=1
else:
fuzz+=1
size2tot.append(fluff+fuzz)
pop.append(fluff)
pop.append(fuzz)
pop2tot.append(pop)
landscape.move() # Movement function for individuals
if time == (Weeks-1): # Prints the Final Population count
fluff=0
fuzz=0
for i in landscape.lands[0].indv: # how to call landscape
if i.phen == "Fluffy": # Counts fluff and fuzz
fluff+=1
else:
fuzz+=1
print("\nPopulation 1 has %d Fluffy and %d Fuzzy individuals after %d weeks" % (fluff,fuzz,Weeks))
fluff=0
fuzz=0
for i in landscape.lands[1].indv: # how to call landscape
if i.phen == "Fluffy": # Count fluss and fuzz
fluff+=1
else:
fuzz+=1
print("Population 2 has %d Fluffy and %d Fuzzy individuals after %d weeks" % (fluff,fuzz,Weeks))
print("~~~~~~~~~~~~~")
sizetot=[] # Total Size for each Pop in a list
sizetot.append(size1tot)
sizetot.append(size2tot)
"""Plots for Pop Contents vs Time and Pop Size vs Time"""
plt.figure(figsize=(10,4))
plt.subplot(121)
plt.axis([1,(Weeks+1),0,(Pop1Size+Pop2Size)])
plt.ylabel("Pop1 Phenotype:\n Fluff (Blue) and Fuzz (Orange)")
plt.xlabel("Week Number")
plt.plot(weeknum,pop1tot)
plt.subplot(122)
plt.axis([1,(Weeks+1),0,(Pop1Size+Pop2Size)])
plt.ylabel("Pop2 Phenotype:\n Fluff (Blue) and Fuzz (Orange)")
plt.xlabel("Week Number")
plt.plot(weeknum,pop2tot)
plt.figure(figsize=(10,4))
plt.subplot(212)
plt.axis([1,(Weeks+1),0,(Pop1Size+Pop2Size)])
plt.ylabel("Population Size:\n Pop1 (Blue) and Pop2 (Orange)")
plt.xlabel("Week Number")
plt.plot(weeknum,size1tot)
plt.plot(weeknum,size2tot)
popnum=0
for i in landscape.lands:
"""Prints out the percentage of each Pop"""
popnum += 1
i.prob(popnum)
|
#!/usr/bin/python
#ubuntu 11.10
#*apt-get install mysql-server
#*apt-get install python-mysqldb
#more information:http://mysql-python.sourceforge.net/MySQLdb.html
import MySQLdb
###connect to databases
conn = MySQLdb.connect(host='localhost', user='root', passwd='12345678')
###create database
cursor = conn.cursor()
command = 'create database quietheart;'
cursor.execute(command)
cursor.close()
###select database and create tables(two ways)
cursor = conn.cursor()
conn.select_db('quietheart')
command = 'create table test1(id int, info varchar(50));'
cursor.execute(command)
cursor.close()
cursor = conn.cursor()
command = 'use quietheart;'
cursor.execute(command)
command = 'create table test2(id int, info varchar(50));'
cursor.execute(command)
cursor.close()
###insert values
cursor = conn.cursor()
paramValue = [1,"infor1"]
command = 'insert into test1 values(%s,%s)' #with sequence '%s' must be used.
cursor.execute(command,paramValue)
cursor = conn.cursor()
paramValue = {"id":"2","info":"info2"}
command = 'insert into test1 values(%(id)s,%(info)s)' #with dict '%(key)s' must be used.
cursor.execute(command,paramValue)
cursor = conn.cursor()
paramValues = []
for i in range(20):
paramValues.append((i,'record' + str(i)))
command = 'insert into test2 values(%s,%s)' #with multi executes
cursor.executemany("""insert into test2 values(%s,%s) """,paramValues);
cursor.close();
###select values
cursor = conn.cursor()
command = 'select * from test2'
count = cursor.execute(command)
print "return %d records." % count
#fetch all record
results = cursor.fetchall() #start from first no matter fetch before.
for r in results:
print r
#fetch five records.
cursor.scroll(0,mode='absolute') #scroll to offset 0. mode='abaolute'|'relative',default 'relative'.
results = cursor.fetchmany(5)
for r in results:
print r
#fetch each record.
result = cursor.fetchone(); #cursor start from 5th, because of the previous fetch.
while result != None:
print result
print 'id:%s, info:%s' % result
result = cursor.fetchone();
cursor.close();
#select and return dict
dictcursor = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor)
command = 'select * from test2'
dictcursor.execute(command)
result = dictcursor.fetchone();
print result
dictcursor.close();
#update
cursor = conn.cursor()
command = 'update test1 set info="hi" where id=1;'
count = cursor.execute(command)
cursor.close();
###drop tables and database
cursor = conn.cursor()
command = 'drop table test1;'
cursor.execute(command)
cursor.close()
cursor = conn.cursor()
command = 'drop table test2;'
cursor.execute(command)
cursor.close()
cursor = conn.cursor()
command = 'drop database quietheart;'
cursor.execute(command)
cursor.close()
|
import operator
import sys
import ctypes
import time
import pyautogui
import pyjokes
import pyttsx3
import pywhatkit
import speech_recognition as sr
import requests
import os
import datetime
import cv2
from pywikihow import search_wikihow
from requests import get
import wikipedia
import smtplib
import geocoder
from geopy.geocoders import Nominatim
import speedtest
from PyQt5.QtGui import QMovie
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from percyUi import Ui_Percy
from PyQt5 import QtCore, QtGui, QtWidgets
from PySide2.QtCore import *
import datetime
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
from chat import *
engine = pyttsx3.init('sapi5')
voices = engine.getProperty('voices')
engine.setProperty('voice',voices[1].id)
engine.setProperty('rate',170)
print(voices[1].id)
def speak(audio):
engine.say(audio)
print(audio)
engine.runAndWait()
def takeRes(): #voice to text
r=sr.Recognizer()
with sr.Microphone() as source :
print("detecting")
r.pause_threshold=1
audio=r.listen(source,timeout=2,phrase_time_limit=5)
try:
print("Recognizing..")
question =r.recognize_google(audio,language='en-in')
print(f"user said: {question}")
except Exception as e:
speak("I am sorry, Can you repeat?")
return takeRes()
return question
def greet():
speak("Hey I am percy, how can I help you")
hour =int(datetime.datetime.now().hour)
if hour>=6 and hour<=12:
speak("Good Morning Aryan")
elif hour>12 and hour<=17:
speak("Good afternoon Aryan")
elif hour>17 and hour<=20:
speak("Good evening Aryan")
elif hour>2 and hour<=4:
speak("Aryan you should consider sleeping")
# async def getWeather():
# client = python_weather.Client(format=python_weather.IMPERIAL)
# g = geocoder.ip('me')
# loc = g.latlng
# geoLoc = Nominatim(user_agent="GetLoc")
# name = geoLoc.reverse(loc)
# weather = await client.find(f"{name.address}")
# return weather
# await client.close()
def sendEmail(to,content):
emailAdd=os.environ.get('email_id')
passAdd=os.environ.get('password_id')
server=smtplib.SMTP("smtp.gmail.com",587)
server.ehlo()
server.starttls()
server.login(emailAdd,passAdd)
server.sendmail('emailAdd',to,content)
server.close()
#if __name__=="__main__":
class mainT(QThread):
def __init__(self):
super(mainT,self).__init__()
def run(self):
self.execute()
def takeRes(self): #voice to text
r=sr.Recognizer()
with sr.Microphone() as source :
print("detecting")
#r.adjust_for_ambient_noise(source)
r.pause_threshold=2
audio=r.listen(source,timeout=2,phrase_time_limit=5)
try:
print("Recognizing..")
self.question =r.recognize_google(audio,language='en-in')
print(f"user said: {self.question}")
except Exception as e:
speak("I am sorry, Can you repeat?")
return self.takeRes()
return self.question
def execute(self):
greet()
while True:
self.question= self.takeRes().lower()
if "open notepad" in self.question:
notePath="C:\\Windows\\system32\\notepad.exe"
speak("opening notepad")
os.startfile(notePath)
elif "open word" in self.question:
wordPath="C:\\Program Files\\Microsoft Office\\root\\Office16\\WINWORD.exe"
speak("opening word")
os.startfile(wordPath)
elif "open command prompt" in self.question:
speak("opening command prompt")
os.system("start cmd")
# elif "open camera" in self.question:
# cap=cv2.VideoCapture(0)
# speak("opening camera")
# while True:
# ret,img=cap.read()
# cv2.imshow('webcam',img)
# k=cv2.waitKey(20)
# if k==10:
# break;
# cap.release()
# cv2.destroyAllWindows()
elif "ip address" in self.question:
speak("gathering information, hold on")
ip= get('http://api.ipify.org').text
speak(f"Your Ip address is {ip}")
elif "wikipedia" in self.question: #"search for{xyz} on wikipedia"
speak("Looking it up..")
try:
self.question=self.question.replace("wikipedia","")
results = wikipedia.summary(self.question,sentences=2)
speak("According to wikipedia")
speak(results)
except Exception as e:
speak("I am sorry, I could not find this")
elif "youtube" in self.question:
#webbrowser.open("youtube.com")
speak("Which video do you want me to play ?")
com= self.takeRes().lower()
speak("loading...")
pywhatkit.playonyt(f"{com}")
elif "open google" in self.question:
#webbrowser.Mozilla
speak("What would you want me to search for?")
com= self.takeRes().lower()
#webbrowser.open(f"{com}")
speak("Searching..")
pywhatkit.search(f"{com}")
elif "email" in self.question or "send an email" in self.question:
speak("Who do you wish to send an email to?\n")
email=input(speak)
try:
speak("What should be the subject?")
subject= self.takeRes().lower()
speak("What should be the content?")
body= self.takeRes().lower()
content=f'subject:{subject}\n\n{body}'
to=email
sendEmail(to,content)
speak(f"The email has been sent to {email}")
except Exception as e:
print(e)
speak("I am sorry Aryan, there was an error while sending the email")
elif "close notepad" in self.question:
speak("closing notepad..")
os.system("TASKKILL/F /IM notepad.exe")
elif "close browser" in self.question:
speak("Are you sure?")
resp= self.takeRes().lower()
if "yes" in resp:
os.system("TASKKILL/F /IM msedge.exe")
elif "no" in resp:
speak("Okay")
elif "dismiss word" in self.question or "close word" in self.question:
speak("closing word")
os.system("TASKKILL/F /IM WINWORD.exe")
elif "joke" in self.question or "tell me a joke" in self.question:
joke=pyjokes.get_joke(language='en',category='neutral')
speak(joke)
elif "turn off pc" in self.question or "shutdown the pc" in self.question:
sd='shutdown /s /t1'
os.system(sd)
elif "restart pc" in self.question or "restart the pc" in self.question:
r="shutdown /r /t 1"
os.system(r)
elif "lock screen" in self.question or "lock the screen" in self.question:
ctypes.windll.user32.LockWorkStation()
elif "tell me the news" in self.question or "news" in self.question:
url='https://newsapi.org/v2/top-headlines?sources=google-news-in&apiKey=de2d387b607b43bbb059f58b15020bae'
speak("Here are the top 3 headlines for today")
page=requests.get(url).json()
articles=page["articles"]
top3=["first","second","third"]
header=[]
for ar in articles:
header.append(ar["title"])
for i in range(len(top3)):
speak(f"Today's {top3[i]} headline is :{header[i]}")
speak("Do you want me to open the detailed news on the browser?")
ans= self.takeRes().lower()
if "yes" in ans:
speak("opening google news..")
pywhatkit.search("today's news")
if "no" in ans:
speak("Okay")
elif "what is my location" in self.question or "where are we" in self.question:
g= geocoder.ip('me')
loc=g.latlng
geoLoc = Nominatim(user_agent="GetLoc")
name=geoLoc.reverse(loc)
speak(name.address)
elif "take a screenshot" in self.question or "screenshot" in self.question:
speak("What should I name the file sir?")
fileName= self.takeRes().lower()
time.sleep(3)
image=pyautogui.screenshot()
image.save(f"{fileName}.png")
speak("Done")
elif "calculate" in self.question:
r=sr.Recognizer()
with sr.Microphone() as source:
try:
speak("I am listening, tell me the numbers")
print("listening..")
r.adjust_for_ambient_noise(source)
audio=r.listen(source)
stringTerm=r.recognize_google(audio)
except Exception as e:
speak("Try that again, I wasn't able to recognize that")
def operatorF(op):
return {
'+' :operator.add, # say plus
'-' :operator.sub, # say minus
'X' :operator.mul, # say multiplied by
'x':operator.mul, #say multiplied by
'/' :operator.__truediv__, #say divided by
}[op]
def binaryC(op1,oper,op2):
op1,op2= float(op1), float(op2)
return operatorF(oper)(op1,op2)
speak("The answer is ")
speak(binaryC(*(stringTerm.split())))
elif "weather" in self.question or "temperature" in self.question:
#myApi = os.environ.get('weather_api')
ApiLink="http://api.openweathermap.org/data/2.5/weather?q=Mumbai&appid=a9ecec8771619ca5613a738ea7767ec3&units=metric"
api_link=requests.get(ApiLink)
api_data=api_link.json()
if api_data['cod']=='404':
speak("The location seems to be wrong Aryan")
else:
temp=api_data['main']['temp']
feelsLike=api_data['main']['feels_like']
weatherDes=api_data['weather'][0]['description']
humidity=api_data['main']['humidity']
windSpeed=api_data['wind']['speed']
# clouds=api_data['clouds']['all']
speak(f"The weather description is {weatherDes}")
speak(f"The temperature right now is {temp} degree celsius")
speak(f"But it feels like {feelsLike} degree celsius")
speak(f"The humidity right now is at {humidity} %")
speak(f"The wind speed is {windSpeed} kmph")
# speak(f"the clouds are {clouds}")
elif "activate instructions mode" in self.question or "activate how to do mode" in self.question:
speak("How to do mode is activated")
while True:
speak("Ask me the question")
how=takeRes()
try:
if "disable" in how or "deactivate" in how:
speak("Disabled how to do mode sir")
break
else:
maxRes=1
howTo= search_wikihow(how,maxRes)
assert len(howTo)==1
howTo[0].print()
speak(howTo[0].summary)
except Exception as e:
speak("I am unable to find the answer to this one Aryan")
elif "internet speed" in self.question or "what is our network speed" in self.question:
speak("Calculating, hold on")
sp=speedtest.Speedtest()
download=int(sp.download())
upload=int(sp.upload())
speak(f"Our uploading speed is {upload} bits per second and downloading speed is {download} bits per second")
elif "increase the volume" in self.question or "turn up the volume" in self.question:
pyautogui.press("volumeup")
elif "decrease the volume" in self.question or "turn down the volume" in self.question:
pyautogui.press("volumedown")
elif "mute" in self.question or "disable sound" in self.question:
pyautogui.press("volumemute") #say it twice to unmute
elif "play music" in self.question or "music" in self.question or "songs" in self.question:
speak("opening spotify")
os.startfile("spotify.exe")
elif "disable" in self.question or "stop listening" in self.question:
speak("Alright aryan, call me whenever you need me!")
break;
elif "terminate" in self.question or "shutdown" in self.question:
speak("Goodbye!")
sys.exit()
# elif "chat" in self.question:
# # speak("Let's vibe!")
# speak("say the first sentence to initialize the bot")
# while True:
# micIn=takeRes()
# if "stop" in micIn:
# speak("Alright, looking forward to chat with you soon")
# break;
# else:
# step = 0
# tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
# model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")
# new_user_input_ids = tokenizer.encode(micIn + tokenizer.eos_token, return_tensors='pt')
# bot_input_ids = torch.cat([chat_history_ids, new_user_input_ids],
# dim=-1) if step > 0 else new_user_input_ids
# chat_history_ids = model.generate(bot_input_ids, max_length=1000, pad_token_id=tokenizer.eos_token_id)
# speak("{}".format(
# tokenizer.decode(chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True)))
else:
reply=talk(self.question)
speak(reply)
#speak("Is there anything else I can assist you with?")
startProgram=mainT()
class Main(QMainWindow):
def __init__(self):
super().__init__()
self.ui= Ui_Percy()
self.ui.setupUi(self)
button=self.ui.pushButton
button.setStyleSheet("background - image: url(mic2.png);")
self.ui.pushButton.clicked.connect(self.startTask)
def startTask(self):
self.ui.movie=QtGui.QMovie("C:\\Users\\aryan\\Downloads\\percy4.gif")
self.ui.label.setMovie(self.ui.movie)
self.ui.movie.start()
startProgram.start()
app=QApplication(sys.argv)
Percy=Main()
Percy.show()
exit(app.exec_())
|
from django.db import models
from django.core.validators import EmailValidator
from django.utils import timezone
from django.core.validators import RegexValidator
# Create your models here.
class Proveedor(models.Model):
class Meta:
ordering = ['nombre']
nombre = models.CharField(max_length=100, null=True, blank=False)
telefono_regex = RegexValidator(regex=r'^\+?1?\d{9,15}$', message="El formato del número no es válido.")
telefono = models.CharField(validators=[telefono_regex], max_length=17, blank=False, null=True)
direccion = models.CharField(max_length=500, null=True, blank=False)
rfc = models.CharField(max_length=13, null=True, blank=False)
razon_social = models.CharField(max_length=100, null=True, blank=False)
email = models.CharField(max_length=100, null=True, blank=False,
validators=[EmailValidator(message='Correo inválido',
code=1, whitelist=None)])
created_at = models.DateTimeField(default=timezone.now)
updated_at = models.DateTimeField(default=timezone.now)
deleted_at = models.DateTimeField(blank = True, null = True)
status = models.IntegerField(default=1)
def __str__(self):
return self.nombre
|
from pyqtgraph.Qt import QtCore, QtGui, QtWidgets
import pyqtgraph.opengl as gl
import pyqtgraph as pg
import numpy as np
import sys
# -*- coding: utf-8 -*-
import os
import PyQt5
import sys
from PyQt5 import QtGui, QtWidgets, QtCore
import math
import sys
def get_rot_mat(_axis, _angle_rad):
if _axis == 'x':
return np.array([[1, 0, 0],
[0, math.cos(_angle_rad), math.sin(_angle_rad)],
[0, -math.sin(_angle_rad), math.cos(_angle_rad)]])
if _axis == 'y':
return np.array([[math.cos(_angle_rad), 0, math.sin(_angle_rad)],
[0, 1, 0],
[-math.sin(_angle_rad), 0, math.cos(_angle_rad)]])
if _axis == 'z':
return np.array([[math.cos(_angle_rad), math.sin(_angle_rad), 0],
[-math.sin(_angle_rad), math.cos(_angle_rad), 0],
[0, 0, 1]])
print("get_rot_mat: wrong input")
exit(-9)
QtCore.QCoreApplication.setAttribute(QtCore.Qt.AA_UseDesktopOpenGL)
app = QtWidgets.QApplication(sys.argv)
w = gl.GLViewWidget() # Создаем некоторый виджет
w.opts['distance'] = 40 # Дистанция от объекта до наблюдателя
w.resize(1000, 1000)
w.show()
w.setWindowTitle('Pyqtgraph-3D example: GLLinePlotItem')
gx = gl.GLGridItem() # Сетка
gx.rotate(90, 0, 1, 0) # Поворачиваем на 90 гр вокруг оси у
gx.translate(-10, 0, 0) # Смещение не -10 по иксу
w.addItem(gx) # Добавление виджета
gy = gl.GLGridItem()
gy.rotate(90, 1, 0, 0) # То же для других осей
gy.translate(0, -10, 0)
w.addItem(gy)
gz = gl.GLGridItem()
gz.translate(0, 0, -10)
w.addItem(gz)
a, e = 5, 0.9 # Задаем переменные для орбиты
psi = math.radians(10)
theta = math.radians(30)
phi = math.radians(50)
coordinates_orig = []
for i in range(361):
angle = math.radians(i)
x = a * math.cos(angle)
y = a * math.sqrt(1 - e ** 2) * math.sin(angle)
z = 0
coordinates_orig.append([x, y, z])
coordinates_orig = np.array(coordinates_orig)
plt = gl.GLLinePlotItem(pos=coordinates_orig, color=pg.glColor((360, 10 * 1.3)),
width=2, antialias=True)
w.addItem(plt) # Добавляем элементы в пространство
md = pg.opengl.MeshData.sphere(rows=10, cols=10) # Создаём и настраиваем сферу
m3 = pg.opengl.GLMeshItem(meshdata=md, smooth=False, shader='balloon', color=(0, 1, 0, 0.2))
w.addItem(m3)
m3.translate(coordinates_orig[0][0], coordinates_orig[0][1], coordinates_orig[0][2])
time_global_i_max = 360
time_global_i = 0
psi_c, theta_c, phi_c = 0, 0, 0
rotation = 0
coordinates = coordinates_orig
def update():
global coordinates_orig, phi, theta, psi, i, psi_c, theta_c, phi_c, rotation, time_global_i_max, time_global_i,\
coordinates
x_curr = -coordinates[time_global_i][0]
y_curr = -coordinates[time_global_i][1]
z_curr = -coordinates[time_global_i][2]
time_global_i = (time_global_i + 1) % time_global_i_max
if psi_c <= psi:
rotation = (rotation + 1) % time_global_i_max
time = math.radians(rotation)
coordinates = np.dot(coordinates_orig, get_rot_mat('z', time))
psi_c = time
if psi_c > psi:
coordinates_orig = coordinates
elif theta_c <= theta:
if theta_c == 0:
rotation = 0
rotation = (rotation + 1) % time_global_i_max
time = math.radians(rotation)
coordinates = np.dot(coordinates_orig, get_rot_mat('x', time))
theta_c = time
if theta_c > theta:
coordinates_orig = coordinates
elif phi_c <= phi:
if phi_c == 0:
rotation = 0
rotation = (rotation + 1) % time_global_i_max
time = math.radians(rotation)
coordinates = np.dot(coordinates_orig, get_rot_mat('z', time))
phi_c = time
if phi_c > phi:
coordinates_orig = coordinates
# меняем позицию элипса
coordinates = np.array(coordinates)
plt.setData(pos=coordinates, color=(0.255, (time_global_i/2)/10, 0.04, 1), width=3, antialias=True)
# вычисляем новые координаты шара и перемещаем его
x_curr = x_curr + coordinates[time_global_i][0]
y_curr = y_curr + coordinates[time_global_i][1]
z_curr = z_curr + coordinates[time_global_i][2]
m3.translate(x_curr, y_curr, z_curr)
timer = QtCore.QTimer()
timer.timeout.connect(update) # Запуск таймера
timer.start(50)
sys.exit(app.exec_()) # Запускаем цикл обработки событий
|
class Move:
# def nowInfo(self,infoCount,infoList):
# if infoCount<0:
# print("조회 중인 고객 정보가 없습니다..")
# return 0
# else:
# return 1
# def nextInfo(self,infoCount,infoList):
# if len(infoList) == infoCount+1:
# print("다음 고객 정보가 없습니다.")
# else:
# infoCount = infoCount+1
# return infoCount
# def preInfo(self,infoCount,infoList):
# if infoCount<1:
# print("이전 고객 정보가 없습니다.")
# else:
# infoCount = infoCount-1
# return infoCount
def nextInfo(self,infoCount,infoList):
return infoCount+1
def preInfo(self,infoCount,infoList):
return infoCount-1
def isnow(self,infoCount,infoList):
if infoCount<0:
return 0
else:
return 1
def isnext(self,infoCount,infoList):
if len(infoList) == infoCount+1:
return 0
else:
return 1
def ispre(self,infoCount,infoList):
if infoCount<1:
return 0
else:
return 1 |
def solution(s):
answer = True
tmp = []
for i in s:
if i is "(":
tmp.append(i)
elif len(tmp) is not 0:
tmp.pop()
else:
answer = False
if len(tmp) is not 0:
answer = False
return answer
s=["()()","(())()",")()(","(()("]
for i in s:
print(solution(i)) |
from django.conf import settings
from django.db.models import Q
from django.http import Http404
from django.shortcuts import redirect, get_object_or_404, render
from authn.decorators.auth import require_auth
from authn.helpers import check_user_permissions
from badges.models import UserBadge
from comments.models import Comment
from common.pagination import paginate
from authn.decorators.api import api
from posts.models.post import Post
from search.models import SearchIndex
from users.forms.profile import ExpertiseForm
from users.models.achievements import UserAchievement
from users.models.expertise import UserExpertise
from users.models.friends import Friend
from users.models.mute import Muted
from tags.models import Tag, UserTag
from users.models.notes import UserNote
from users.models.user import User
from users.utils import calculate_similarity
def profile(request, user_slug):
if user_slug == "me":
return redirect("profile", request.me.slug, permanent=False)
user = get_object_or_404(User, slug=user_slug)
if request.me and user.id == request.me.id:
# handle auth redirect
goto = request.GET.get("goto")
if goto and goto.startswith(settings.APP_HOST):
return redirect(goto)
# moderation status check for new-joiners
access_denied = check_user_permissions(request)
if access_denied:
return access_denied
if not user.can_view(request.me):
return render(request, "auth/private_profile.html")
if user.moderation_status != User.MODERATION_STATUS_APPROVED and not request.me.is_moderator:
# hide unverified users
raise Http404()
# select user tags and calculate similarity with me
tags = Tag.objects.filter(is_visible=True).exclude(group=Tag.GROUP_COLLECTIBLE).all()
user_tags = UserTag.objects.filter(user=user).select_related("tag").all()
active_tags = {t.tag_id for t in user_tags if t.tag.group != Tag.GROUP_COLLECTIBLE}
collectible_tags = [t.tag for t in user_tags if t.tag.group == Tag.GROUP_COLLECTIBLE]
similarity = {}
if request.me and user.id != request.me.id:
my_tags = {t.tag_id for t in UserTag.objects.filter(user=request.me).all()}
similarity = calculate_similarity(my_tags, active_tags, tags)
# select other stuff from this user
intro = Post.get_user_intro(user)
projects = Post.objects.filter(author=user, type=Post.TYPE_PROJECT, is_visible=True).all()
badges = UserBadge.user_badges_grouped(user=user)
achievements = UserAchievement.objects.filter(user=user).select_related("achievement")
expertises = UserExpertise.objects.filter(user=user).all()
posts = Post.objects_for_user(request.me).filter(is_visible=True)\
.filter(Q(author=user) | Q(coauthors__contains=[user.slug]))\
.exclude(type__in=[Post.TYPE_INTRO, Post.TYPE_PROJECT, Post.TYPE_WEEKLY_DIGEST])\
.order_by("-published_at")
if request.me:
comments = Comment.visible_objects()\
.filter(author=user, post__is_visible=True)\
.order_by("-created_at")\
.select_related("post")
friend = Friend.objects.filter(user_from=request.me, user_to=user).first()
muted = Muted.objects.filter(user_from=request.me, user_to=user).first()
note = UserNote.objects.filter(user_from=request.me, user_to=user).first()
else:
comments = None
friend = None
muted = None
note = None
moderator_notes = []
if request.me and request.me.is_moderator:
moderator_notes = UserNote.objects.filter(user_to=user)\
.exclude(user_from=request.me)\
.select_related("user_from")\
.all()
return render(request, "users/profile.html", {
"user": user,
"intro": intro,
"projects": projects,
"badges": badges,
"tags": tags,
"active_tags": active_tags,
"collectible_tags": collectible_tags,
"achievements": [ua.achievement for ua in achievements],
"expertises": expertises,
"comments": comments[:3] if comments else [],
"comments_total": comments.count() if comments else 0,
"posts": posts[:15],
"posts_total": posts.count() if posts else 0,
"similarity": similarity,
"friend": friend,
"muted": muted,
"note": note,
"moderator_notes": moderator_notes,
})
@require_auth
def profile_comments(request, user_slug):
if user_slug == "me":
return redirect("profile_comments", request.me.slug, permanent=False)
user = get_object_or_404(User, slug=user_slug)
comments = Comment.visible_objects()\
.filter(author=user, post__is_visible=True)\
.order_by("-created_at")\
.select_related("post")
return render(request, "users/profile/comments.html", {
"user": user,
"comments": paginate(request, comments, settings.PROFILE_COMMENTS_PAGE_SIZE),
})
def profile_posts(request, user_slug):
if user_slug == "me":
return redirect("profile_posts", request.me.slug, permanent=False)
user = get_object_or_404(User, slug=user_slug)
if not user.can_view(request.me):
return render(request, "auth/private_profile.html")
posts = Post.objects_for_user(request.me) \
.filter(is_visible=True) \
.filter(Q(author=user) | Q(coauthors__contains=[user.slug])) \
.exclude(type__in=[Post.TYPE_INTRO, Post.TYPE_PROJECT, Post.TYPE_WEEKLY_DIGEST]) \
.order_by("-published_at")
return render(request, "users/profile/posts.html", {
"user": user,
"posts": paginate(request, posts, settings.PROFILE_POSTS_PAGE_SIZE),
})
def profile_badges(request, user_slug):
if user_slug == "me":
return redirect("profile_badges", request.me.slug, permanent=False)
user = get_object_or_404(User, slug=user_slug)
if not user.can_view(request.me):
return render(request, "auth/private_profile.html")
badges = UserBadge.user_badges(user)
return render(request, "users/profile/badges.html", {
"user": user,
"badges": paginate(request, badges, settings.PROFILE_BADGES_PAGE_SIZE),
})
@api(require_auth=True)
def toggle_tag(request, tag_code):
if request.method != "POST":
raise Http404()
tag = get_object_or_404(Tag, code=tag_code)
user_tag, is_created = UserTag.objects.get_or_create(
user=request.me, tag=tag, defaults=dict(name=tag.name)
)
if not is_created:
user_tag.delete()
SearchIndex.update_user_tags(request.me)
return {
"status": "created" if is_created else "deleted",
"tag": {"code": tag.code, "name": tag.name, "color": tag.color},
}
@api(require_auth=True)
def add_expertise(request):
if request.method == "POST":
form = ExpertiseForm(request.POST)
if form.is_valid():
user_expertise = form.save(commit=False)
user_expertise.user = request.me
UserExpertise.objects.filter(
user=request.me, expertise=user_expertise.expertise
).delete()
user_expertise.save()
return {
"status": "created",
"expertise": {
"name": user_expertise.name,
"expertise": user_expertise.expertise,
"value": user_expertise.value,
},
}
return {"status": "ok"}
@api(require_auth=True)
def delete_expertise(request, expertise):
if request.method == "POST":
UserExpertise.objects.filter(user=request.me, expertise=expertise).delete()
return {
"status": "deleted",
"expertise": {
"expertise": expertise,
},
}
return {"status": "ok"}
|
# Generated by Django 3.0.2 on 2020-01-28 23:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('my_app', '0007_category'),
]
operations = [
migrations.AlterModelOptions(
name='category',
options={'verbose_name': 'Categoria', 'verbose_name_plural': 'Categorias'},
),
migrations.AddField(
model_name='post',
name='categories',
field=models.ManyToManyField(to='my_app.Category'),
),
]
|
import sys,datetime,os
from PyQt5.QtWidgets import QApplication,QDialog,QMessageBox, QTableWidgetItem
from PyQt5 import uic
from form_cuotas_vencidas_30dias import Ui_form_cuotas_vencidas_30dias
from N_cliente import N_datos_personales_cliente, N_party_address, N_party_otros, N_datos_laborales, N_party_garante,N_party_cliente, N_party_contacto
from N_creditos import N_creditos
from N_cuotas import N_cuotas
from PyQt5.QtCore import pyqtRemoveInputHook
from reportlab.pdfgen import canvas
from reportlab.lib.pagesizes import letter
from reportlab.lib.pagesizes import A4
from reportlab.lib.styles import getSampleStyleSheet,ParagraphStyle
from reportlab.platypus import Spacer, SimpleDocTemplate, Table, TableStyle
from reportlab.platypus import Paragraph, Image
from reportlab.lib import colors
from PyQt5.QtWidgets import QFileDialog
from E_configuracion import configuracion
import subprocess
class Cuotas_vencidas_30dias(QDialog):
obj_form = Ui_form_cuotas_vencidas_30dias()
listado_cuotas_30_dias = list()
listado_cuotas_60_dias = list()
listado_cuotas_90_dias = list()
def __init__(self):
QDialog.__init__(self)
self.obj_form = Ui_form_cuotas_vencidas_30dias()
self.obj_form.setupUi(self)
self.obj_form.boton_generar.clicked.connect(self.generar_30dias)
self.obj_form.boton_generar_60_dias.clicked.connect(self.generar_60dias)
self.obj_form.boton_generar_90_dias.clicked.connect(self.generar_90dias)
def generar_30dias(self):
obj_N_cuotas = N_cuotas(1)
self.listado_cuotas_30_dias = obj_N_cuotas.lista_cuotas_venc_30_dias()
styleSheet=getSampleStyleSheet()
#pyqtRemoveInputHook()
#import pdb; pdb.set_trace()
img=Image("cabezal.png",250,75)
img.hAlign = "LEFT"
#pyqtRemoveInputHook()
#import pdb; pdb.set_trace()
otro_estilo= ParagraphStyle('',fontSize = 20,textColor = '#000',leftIndent = 200,rightIndent = 50)
style_barra= ParagraphStyle('',fontSize = 13,textColor = '#000',backColor='#f5f5f5',borderColor ='#a3a3a3',borderWidth = 1,borderPadding = (1, 2, 5))
texto_principal = ""
estilo_texto = ParagraphStyle('',
fontSize = 22,
alignment = 0,
spaceBefore = 0,
spaceAfter = 0,
#backColor = '#fff',
textColor = '#999',
leftIndent = 10 )
h = Paragraph( texto_principal, estilo_texto)
banner = [ [ img,h ] ]
options = QFileDialog.Options()
story=[]
ban = Table( banner, colWidths=300, rowHeights=10)
ban.setStyle([ ('ALIGN',(0,0),(0,0),'LEFT'),('ALIGN',(0,0),(1,0),'LEFT'), ('VALIGN',(0,0),(1,0),'TOP'),
('TEXTCOLOR',(0,1),(0,-1), colors.blue) ])
story.append(ban)
story.append(Spacer(0,-17))
P= Paragraph("<b>Reportes</b> ",otro_estilo)
story.append(P)
story.append(Spacer(0,25))
P=Paragraph("<b>Cuotas vencidas hasta 30 dias</b> " + str(datetime.datetime.now()),style_barra)
story.append(P)
story.append(Spacer(0,25))
#nombre apellido dni Nro prestamo nro cuota monto
integrantes = [[Paragraph('''<font size=12> <b> </b></font>''',styleSheet["BodyText"])],
['Apellido', 'Nombre', 'D.N.I:', 'Nro Crédito:','Nro Cuota','Monto']]
#pyqtRemoveInputHook()
#import pdb; pdb.set_trace()
for item in self.listado_cuotas_30_dias:
monto_adeudado = float(item.importe_primer_venc) + float(item.punitorios)
obj_N_credito = N_creditos(1)
obj_credito = obj_N_credito.buscar_credito_por_nro_credito(item.nro_credito)
obj_N_datos_personales_cliente = N_datos_personales_cliente()
obj_party = obj_N_datos_personales_cliente.buscar_party_party_por_id(obj_credito.id_party)
integrantes.append([str(obj_party.apellido), str(obj_party.nombre), str(obj_party.nro_doc) ,str(item.nro_credito),str(item.nro_cuota), str(monto_adeudado)])
t=Table(integrantes, (150,135, 100, 55, 55,55))
t.setStyle(TableStyle([
('INNERGRID', (0,1), (-1,-1), 0.25, colors.black),
('BOX', (0,1), (-1,-1), 0.25, colors.black),
('BACKGROUND',(0,1),(-1,1),colors.lightgrey)
]))
story.append(t)
story.append(Spacer(0,15))
obj_config = configuracion()
cadena = obj_config.ruta()
file_path = cadena + "/pdf/listados/list_morosos_30dias"+str(datetime.date.today().year)+"_"+str(datetime.date.today().month)
if not os.path.exists(file_path):
os.makedirs(file_path)
doc=SimpleDocTemplate(file_path +"/listado_de_morosos_30dias.pdf")
doc.build(story )
msgBox = QMessageBox()
msgBox.setWindowTitle("Estado de Listado")
msgBox.setText("El Listado se ha generado correctamente : ticket listado_de_morosos_30dias.pdf")
msgBox.exec_()
if sys.platform == 'linux':
subprocess.call(["xdg-open", file_path +"/listado_de_morosos_30dias.pdf"])
else:
os.startfile( file_path +"/listado_de_morosos_30dias.pdf")
def generar_60dias(self):
obj_N_cuotas = N_cuotas(1)
self.listado_cuotas_60_dias = obj_N_cuotas.lista_cuotas_venc_60_dias("slam")
styleSheet=getSampleStyleSheet()
#pyqtRemoveInputHook()
#import pdb; pdb.set_trace()
img=Image("cabezal.png",250,75)
img.hAlign = "LEFT"
#pyqtRemoveInputHook()
#import pdb; pdb.set_trace()
otro_estilo= ParagraphStyle('',fontSize = 20,textColor = '#000',leftIndent = 200,rightIndent = 50)
style_barra= ParagraphStyle('',fontSize = 13,textColor = '#000',backColor='#f5f5f5',borderColor ='#a3a3a3',borderWidth = 1,borderPadding = (1, 2, 5))
texto_principal = ""
estilo_texto = ParagraphStyle('',
fontSize = 22,
alignment = 0,
spaceBefore = 0,
spaceAfter = 0,
#backColor = '#fff',
textColor = '#999',
leftIndent = 10 )
h = Paragraph( texto_principal, estilo_texto)
banner = [ [ img,h ] ]
options = QFileDialog.Options()
story=[]
ban = Table( banner, colWidths=300, rowHeights=10)
ban.setStyle([ ('ALIGN',(0,0),(0,0),'LEFT'),('ALIGN',(0,0),(1,0),'LEFT'), ('VALIGN',(0,0),(1,0),'TOP'),
('TEXTCOLOR',(0,1),(0,-1), colors.blue) ])
story.append(ban)
story.append(Spacer(0,10))
P= Paragraph("<b>Reportes</b> ",otro_estilo)
story.append(P)
story.append(Spacer(0,25))
P=Paragraph("<b>Cuotas vencidas hasta 60 dias</b> "+ str(datetime.datetime.now()),style_barra)
story.append(P)
story.append(Spacer(0,25))
#nombre apellido dni Nro prestamo nro cuota monto
integrantes = [[Paragraph('''<font size=12> <b> </b></font>''',styleSheet["BodyText"])],
['Apellido', 'Nombre', 'D.N.I:', 'Nro Crédito:','Nro Cuota','Monto']]
for item in self.listado_cuotas_60_dias:
monto_adeudado = float(item.importe_primer_venc) + float(item.punitorios)
obj_N_credito = N_creditos(1)
obj_credito = obj_N_credito.buscar_credito_por_nro_credito(item.nro_credito)
obj_N_datos_personales_cliente = N_datos_personales_cliente()
obj_party = obj_N_datos_personales_cliente.buscar_party_party_por_id(obj_credito.id_party)
integrantes.append([str(obj_party.apellido), str(obj_party.nombre), str(obj_party.nro_doc) ,str(item.nro_credito),str(item.nro_cuota), str(round(monto_adeudado,2))])
t=Table(integrantes, (150,135, 100, 55, 55,55))
t.setStyle(TableStyle([
('INNERGRID', (0,1), (-1,-1), 0.25, colors.black),
('BOX', (0,1), (-1,-1), 0.25, colors.black),
('BACKGROUND',(0,1),(-1,1),colors.lightgrey)
]))
story.append(t)
story.append(Spacer(0,15))
obj_config = configuracion()
cadena = obj_config.ruta()
file_path = cadena + "/pdf/listados/list_morosos_60dias"+str(datetime.date.today().year)+"_"+str(datetime.date.today().month)
if not os.path.exists(file_path):
os.makedirs(file_path)
doc=SimpleDocTemplate(file_path +"/listado_de_morosos_60dias.pdf")
doc.build(story )
msgBox = QMessageBox()
msgBox.setWindowTitle("Estado de Listado")
msgBox.setText("El Listado se ha generado correctamente : Listado listado_de_morosos_60dias.pdf")
msgBox.exec_()
if sys.platform == 'linux':
subprocess.call(["xdg-open", file_path +"/listado_de_morosos_60dias.pdf"])
else:
os.startfile( file_path +"/listado_de_morosos_60dias.pdf")
def generar_90dias(self):
obj_N_cuotas = N_cuotas(1)
self.listado_cuotas_90_dias = obj_N_cuotas.lista_cuotas_venc_90_dias("slam")
styleSheet=getSampleStyleSheet()
#pyqtRemoveInputHook()
#import pdb; pdb.set_trace()
img=Image("cabezal.png",250,75)
img.hAlign = "LEFT"
#pyqtRemoveInputHook()
#import pdb; pdb.set_trace()
otro_estilo= ParagraphStyle('',fontSize = 20,textColor = '#000',leftIndent = 200,rightIndent = 50)
style_barra= ParagraphStyle('',fontSize = 13,textColor = '#000',backColor='#f5f5f5',borderColor ='#a3a3a3',borderWidth = 1,borderPadding = (1, 2, 5))
texto_principal = ""
estilo_texto = ParagraphStyle('',
fontSize = 22,
alignment = 0,
spaceBefore = 0,
spaceAfter = 0,
#backColor = '#fff',
textColor = '#999',
leftIndent = 10 )
h = Paragraph( texto_principal, estilo_texto)
banner = [ [ img,h ] ]
options = QFileDialog.Options()
story=[]
ban = Table( banner, colWidths=300, rowHeights=10)
ban.setStyle([ ('ALIGN',(0,0),(0,0),'LEFT'),('ALIGN',(0,0),(1,0),'LEFT'), ('VALIGN',(0,0),(1,0),'TOP'),
('TEXTCOLOR',(0,1),(0,-1), colors.blue) ])
story.append(ban)
story.append(Spacer(0,-17))
P= Paragraph("<b>Reportes</b> ",otro_estilo)
story.append(P)
story.append(Spacer(0,25))
P=Paragraph("<b>Cuotas vencidas hasta 90 dias</b> " + str(datetime.datetime.now()),style_barra)
story.append(P)
story.append(Spacer(0,25))
#nombre apellido dni Nro prestamo nro cuota monto
integrantes = [[Paragraph('''<font size=12> <b> </b></font>''',styleSheet["BodyText"])],
['Apellido', 'Nombre', 'D.N.I:', 'Nro Crédito:','Nro Cuota','Monto']]
for item in self.listado_cuotas_90_dias:
monto_adeudado = float(item.importe_primer_venc) + float(item.punitorios)
obj_N_credito = N_creditos(1)
obj_credito = obj_N_credito.buscar_credito_por_nro_credito(item.nro_credito)
obj_N_datos_personales_cliente = N_datos_personales_cliente()
obj_party = obj_N_datos_personales_cliente.buscar_party_party_por_id(obj_credito.id_party)
integrantes.append([str(obj_party.apellido), str(obj_party.nombre), str(obj_party.nro_doc) ,str(item.nro_credito),str(item.nro_cuota), str(round(monto_adeudado,2))])
t=Table(integrantes, (150,155, 100, 55, 55,55))
t.setStyle(TableStyle([
('INNERGRID', (0,1), (-1,-1), 0.25, colors.black),
('BOX', (0,1), (-1,-1), 0.25, colors.black),
('BACKGROUND',(0,1),(-1,1),colors.lightgrey)
]))
story.append(t)
story.append(Spacer(0,15))
obj_config = configuracion()
cadena = obj_config.ruta()
file_path = cadena + "/pdf/listados/listado_de_morosos_90dias"+str(datetime.date.today().year)+"_"+str(datetime.date.today().month)
if not os.path.exists(file_path):
os.makedirs(file_path)
doc=SimpleDocTemplate(file_path +"/listado_de_morosos_90dias.pdf")
doc.build(story )
msgBox = QMessageBox()
msgBox.setWindowTitle("Estado de Listado")
msgBox.setText("El Listado se ha generado correctamente : Listado listado_de_morosos_90dias.pdf")
msgBox.exec_()
if sys.platform == 'linux':
subprocess.call(["xdg-open", file_path +"/listado_de_morosos_90dias.pdf"])
else:
os.startfile( file_path +"/listado_de_morosos_90dias.pdf")
#app = QApplication(sys.argv)
#dialogo= Cuotas_vencidas_30dias()
#dialogo.show()
#app.exec_()
|
from numpy import array, zeros, shape, sqrt, ceil
def pixelise(image, pixel_width):
image_shape = shape(image)
pixelisation_shape = pixelised_shape(image_shape, pixel_width)
# print(pixelisation_shape)
pixelised_image = zeros(pixelisation_shape)
for i in range(pixelisation_shape[0]):
image_x = i * pixel_width
for j in range(pixelisation_shape[1]):
image_y = j * pixel_width
pixels = access_pixel_group(image, image_x, image_y, pixel_width)
# print(pixels, pixels[0].dtype, np.square(pixels), pixel_blend(pixels))
pixelised_image[i,j] = pixel_blend(pixels)
# print(len(pixels))
final_image = pixelised_image.astype(int)
return final_image
def pixel_blend(pixels):
#Will fail with an empty array.
squared_rgb = pixels**2
average_of_squares = sum(squared_rgb) / len(pixels)
return sqrt(average_of_squares)
def pixelised_shape(image_shape, pixel_width):
if len(image_shape) == 2:
width, height = image_shape
return int(ceil(width / pixel_width)), int(ceil(height / pixel_width))
elif len(image_shape) == 3:
width, height, colour_dimensions = image_shape
return int(ceil(width / pixel_width)), int(ceil(height / pixel_width)), \
colour_dimensions
elif len(image_shape) == 4:
width, height, colour_dimensions, alpha = image_shape
return int(ceil(width / pixel_width)), int(ceil(height / pixel_width)), \
colour_dimensions
def access_pixel_group(original_image, top_left_x, top_left_y, pixel_width):
pixel_values = []
for i in range(pixel_width):
for j in range(pixel_width):
try:
pixel_values.append(original_image[top_left_x + i, \
top_left_y + j])
except:
# print("uh, oh")
pass
#can fail if access overflows - resolution not being divisble by the pixel width.
#final image will overflow in such a case. (slightly larger resolution)
return array(pixel_values) |
from pydantic import BaseModel, Field, EmailStr
class UserLoginSchema(BaseModel):
email: EmailStr = Field(...)
password: str = Field(...)
class Config:
schema_extra = {
'exemplo': {
'email': 'teste@teste.com',
'password': 'umasenha'
}
}
|
from vpython import *
#sphere()
ball = sphere(pos=vector(-5, 0, 0), radius=0.5, color=color.yellow, make_trail=True)
wallR = box(pos=vector(6, 0, 0), size=vector(0.2, 12, 12), color=color.purple)
wallL = box(pos=vector(-6, 0, 0), size=vector(0.2, 12, 12), color=color.purple)
wallT = box(pos=vector(0, 6, 0), size=vector(12, 0.2, 12), color=color.purple)
wallB = box(pos=vector(0, -6, 0), size=vector(12, 0.2, 12), color=color.purple)
wallZ = box(pos=vector(0, 0, -6), size=vector(12, 12, 0.2), color=color.purple)
ball.velocity = vector(25,10,10)
vscale = 0.1
varr = arrow(pos=ball.pos, axis=ball.velocity, color=color.yellow)
deltat = 0.005
t = 0
scene.autoscale = False
while True:
rate(50)
if ball.pos.x > wallR.pos.x:
ball.velocity.x = -ball.velocity.x
if ball.pos.x < wallL.pos.x:
ball.velocity.x = -ball.velocity.x
if ball.pos.y > wallT.pos.y:
ball.velocity.y = -ball.velocity.y
if ball.pos.y < wallB.pos.y:
ball.velocity.y = -ball.velocity.y
if ball.pos.z < wallZ.pos.z:
ball.velocity.z = -ball.velocity.z
if ball.pos.z > -wallZ.pos.z:
ball.velocity.z = -ball.velocity.z
ball.pos = ball.pos + ball.velocity*deltat
t = t + deltat
varr.pos = ball.pos
varr.axis = ball.velocity*vscale
|
from api.models import Wallet
from ariadne import convert_kwargs_to_snake_case
def fetch_wallets(obj, info):
try:
wallets = [wallet.to_dict() for wallet in Wallet.query.all()]
payload = {
"success": True,
"wallets": wallets
}
except Exception as error:
payload = {
"success": False,
"errors": [str(error)]
}
return payload
@convert_kwargs_to_snake_case
def fetch_one_wallet(obj, info, wallet_id):
try:
wallet = Wallet.query.get(wallet_id)
payload = {
"success": True,
"wallet": wallet.to_dict()
}
except AttributeError:
payload = {
"success": False,
"errors": [f"Wallet item matching id {wallet_id} not found"]
}
return payload
|
from djangobench.utils import run_benchmark
def setup():
global Book
from model_save_new.models import Book
def benchmark():
global Book
for i in range(0, 30):
b = Book(id=i, title='Foo')
b.save()
run_benchmark(
benchmark,
setup=setup,
meta={
'description': 'A simple Model.save() call, instance not in DB.',
},
)
|
## class for handling scripts
#
class Script(object):
## constructor
def __init__(self, nom_script, tool_script, chemin_fichier):
super(Script,self).__init__()
# script name
self.intitule = nom_script
# tool
self.tool = tool_script
# script path
self.path = chemin_fichier
## get script name
def __str__(self):
return self.intitule
## get script path
def get_file_with_path(self):
return self.path
|
# add imports
import unittest
import sysconfig as sys
# Class to contain all the Unit Test for Python
class TddWithPython(unittest.TestCase):
def test_python_env(self):
self.assertEquals('posix', sys.os.name)
#this line is required for running the python code
if __name__ == '__main__':
unittest.main() |
from math import sqrt
a = float(input('Proszę wprowadzić współczynik a: '))
b = float(input('Proszę wprowadzić współczynik b: '))
c = float(input('Proszę wprowadzić współczynik c: '))
if a!=0:
delta = b**2-(4*a*c)
if delta>0:
x1 = (-1*b-sqrt(delta))/(2*a)
x2 = (-1*b+sqrt(delta))/(2*a)
print("x1 =",x1)
print("x2 =",x2)
elif delta==0:
x = (-1*b)/(2*a)
print('x =',x)
else:
print('Podana funkcja kwadratowa nie posiada pierwaistków')
else:
print('Podana funkcja nie jest funkcją kwadratową') |
from projectq import MainEngine
from projectq.ops import *
from projectq.meta import Dagger
import numpy as np
BaseX = [H]
BaseY = [H, S]
BaseZ = []
"""
ProjectQ library build for the experiment data analysis
"""
def io_circuit(eng, input_gate, output_gate):
'''
measure circuit1 on given output_gate,
Args:
input_gate(iteratable::tuple::Gate): gate to apply to prepare the required state
output_gate(iteratable::tuple::Gate): gate to apply to the prepared state before measure
Return:
prob(list::float): probability of the given circuit
'''
assert len(output_gate) == len(input_gate)
n = len(output_gate)
state = eng.allocate_qureg(n)
for i in range(n):
for gate in input_gate[i]:
gate | state[i]
for i in range(n-1):
CZ | (state[i], state[i+1])
with Dagger(eng): # not sure wether this would work, apply dagger on qubits
for i in range(n):
# output_gate.reverse()
for gate in output_gate[i]:
gate | state[i]
return state
# depreciated, equals to eng.backend.cheat
def circuit_tomography(eng, init, base, reverse_order=False):
'''
measure circuit1 on given base,
Args:
init(iteratable::Gate): gate to apply to prepare the required state
base(iteratable::Gate): gate to apply to the prepared state before measure
Return:
prob(list::float): amplitude of the given circuit, with vector elements sorted by bitstring order
'''
state = io_circuit(eng, init, base)
n = len(state)
eng.flush()
if reverse_order:
# result=[eng.backend.get_amplitude(bin(i)[2:].replace('0','a').replace("1","0").replace("a","1").zfill(n),state) for i in range(2**n)]
result = [eng.backend.get_amplitude(
bin(i)[2:].zfill(n)[::-1], state) for i in range(2**n)]
else:
result = [eng.backend.get_amplitude(
bin(i)[2:].zfill(n), state) for i in range(2**n)]
# different vector order compared with the original order in HiQ
# result=eng.backend.cheat()
All(Measure) | state
return np.array(result)
|
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from PyQt4 import uic
from GA144_fonctions_py2 import *
import sys
UiMaFenetre, Klass = uic.loadUiType('ArrayForthWindow.ui')
class MaFenetre(QMainWindow, UiMaFenetre):
def __init__(self, conteneur=None):
if conteneur is None : conteneur = self
QMainWindow.__init__(conteneur)
self.setupUi(conteneur)
self.createConnexions()
def createConnexions(self):
self.connect(self.actionInfo, SIGNAL("triggered()"), self.info)
self.connect(self.commandLinkButton_ArrayForthRun, SIGNAL("clicked()"), ArrayForth)
self.connect(self.commandLinkButton_EditRun, SIGNAL("clicked()"), Editeur)
self.connect(self.pushButton_Save, SIGNAL("clicked()"), InitArrayForth)
self.connect(self.commandLinkButton_cf_to_f, SIGNAL("clicked()"), ConversionCF_toForth)
self.connect(self.commandLinkButton_f_to_cf, SIGNAL("clicked()"), ConversionForth_toCF)
self.connect(self.pushButton_Maj, SIGNAL("clicked()"), MajArrayForth)
self.connect(self.pushButton_Send, SIGNAL("clicked()"), Commande)
""""
self.connect(self.actionCalculer, SIGNAL("triggered()"), self.calcul)
def calcul(self):
n1 = self.spinBoxNombre1.value()
n2 = self.spinBoxNombre2.value()
op = self.comboBoxOperation.currentText()
ch = str(n1)+str(op)+ str(n2)
try : res = eval(ch)
except ZeroDivisionError : res = "#div0"
self.labelResultat.setText (str(res))
"""
def info(self):
QMessageBox.information(self,"info : "," GA144 , Auteur Emmanuel SAID "," version : "+version)
if __name__ == "__main__":
a = QApplication(sys.argv)
f = MaFenetre()
f.show()
r = a.exec_()
|
from django.contrib import admin
from .models import Income,spending
# Register your models here.
admin.site.register(Income)
admin.site.register(spending)
|
def get_common_elements(seq1,seq2,seq3):
common = set(seq1) & set(seq2) & set(seq3)
return tuple(common)
print(get_common_elements("abcd",['a','b', 'd'],('b','c', 'd')))
# , {"a","b","c","d", "e"}
def get_common_elements_multi(*multy_arg):
if len(multy_arg) == 0:
return ()
my_set = set(multy_arg[0])
for el in multy_arg[1:]:
my_set = my_set.intersection(set(el))
return tuple(my_set)
print(get_common_elements_multi("abcd",['a','b', 'd'],('b','c', 'd'), {"a","b","c","d", "e"}, {"a","c","d", "e"}))
print(get_common_elements_multi("Valdis", "Voldemārs", "Voldemorts", "Volodja"))
print(get_common_elements_multi()) |
from nltk.stem.snowball import SnowballStemmer
import string
import nltk
from nltk.corpus import stopwords
PUNCTUATION = string.punctuation
SNOWBALLSTEMMER = SnowballStemmer("english")
try:
STOPWORDS = stopwords.words("english")
except:
nltk.download("stopwords")
STOPWORDS = stopwords.words("english")
RATING_CSV = "../data/ratings.csv"
META_DATA_PARQUET = "../data/processed_meta_data.parquet"
TRAIN_SET_PARQUET = "../data/rating_train.parquet"
VALIDATION_SET_PARQUET = "../data/rating_valid.parquet"
TEST_SET_PARQUET = "../data/rating_test.parquet"
JACCARD_SIM = "../data/JACCARD_SIM.npz"
|
#!/usr/bin/env python3
import os
import sys
import query_db
if __name__ == '__main__':
if len(sys.argv) != 3:
print("Usage: load_db_tool.py [-D, -d, -a, -l] [filename]")
print("-D: Destination File\n -d: Dining File\n -a: Attractions File\n -l: Lodging File")
else:
fileType = sys.argv[1]
fileName = sys.argv[2]
file = open(fileName, 'r')
lines = file.readlines()
queryHandler = query_db.QueryHandler()
for line in lines:
dataFields = line.split()
if fileType == "-a":
queryHandler.addAttraction(dataFields[0], dataFields[1], dataFields[2], dataFields[3], dataFields[4])
else if fileType == "-d":
queryHandler.addDining(dataFields[0], dataFields[1], dataFields[2], dataFields[3], dataFields[4], dataFields[5])
else if fileType == "-D":
queryHandler.addDestination(dataFields[0], dataFields[1], dataFields[2])
else if fileType == "-l"
queryHandler.addLodging(dataFields[0], dataFields[1], dataFields[2], dataFields[3], dataFields[4])
file.close()
queryHandler.close_conn() |
import csv
import UDLevel
import copy
import Game_elements
def convertBoardToInstances(a, dimension):
(rows, cols) = (len(a), len(a[0]))
for row in range(rows):
for col in range(cols):
key = a[row][col]
if key == "_":
a[row][col] = Game_elements.Floor((row, col), dimension)
elif key == "P":
a[row][col] = Game_elements.Player((row, col), dimension)
elif key == "W":
a[row][col] = Game_elements.Wall((row, col), dimension)
elif key == "R":
a[row][col] = Game_elements.Rock((row, col), dimension)
elif key == 'K':
a[row][col] = Game_elements.Key((row, col), dimension)
elif key == "KW":
a[row][col] = Game_elements.Keywall((row, col), dimension)
elif key == "D(L)":
a[row][col] = Game_elements.Door((row, col), dimension, True)
elif key == "D(U)":
a[row][col] = Game_elements.Door((row, col), dimension, False)
elif key == "S(A)":
a[row][col] = Game_elements.Switch((row, col), dimension, True)
elif key == "S(D)":
a[row][col] = Game_elements.Switch((row, col), dimension, False)
elif key == "L":
a[row][col] = Game_elements.Ladder((row, col), dimension)
elif key == "#":
a[row][col] = Game_elements.Portal((row, col), dimension)
return a
def rowIsEmpty(a):
for val in a:
if not val == "": return False
return True
def cleanRows(a):
cleanList = []
for row in a:
if not rowIsEmpty(row):
cleanList.append(row)
return cleanList
def cleanCols(a):
(rows, cols) = (len(a), len(a[0]))
cleanList = [[] for x in range(rows)]
for col in range(cols):
colIsEmpty = True
for row in range(rows):
if not a[row][col] == "":
colIsEmpty = False
if not colIsEmpty:
for row in range(rows):
cleanList[row].append(a[row][col])
return cleanList
def clean2dList(a):
return cleanCols(cleanRows(a))
def getListFromFile(path):
levelList = []
with open(path, 'r') as csvfile:
reader = csv.reader(csvfile)
for row in reader:
levelList.append(row)
return clean2dList(levelList)
def getLevel(level):
overworldPath = '../Levels/level%d/%s_level%s.csv' % (level, "o", level)
upsideDownPath = '../Levels/level%d/%s_level%s.csv' % (level, "u", level)
persistentPath = '../Levels/level%d/%s_level%s.csv' % (level, "p", level)
playerLocationPath = '../Levels/level%d/playerStartLoc.txt' % (level)
playerLocation = [int(i) for i in open(playerLocationPath).readline().split(",")]
overworld = convertBoardToInstances(getListFromFile(overworldPath), "overworld")
persistent = convertBoardToInstances(getListFromFile(persistentPath), "persistent")
upsideDown = convertBoardToInstances(getListFromFile(upsideDownPath), "upsideDown")
player = Game_elements.Player(playerLocation, "player")
# overworld = stripByType(getListFromFile(overworldPath), False)
# persistent = stripByType(getListFromFile(overworldPath), True)
# upsideDown = stripByType(getListFromFile(upsideDownPath), False)
return UDLevel.Level(overworld, upsideDown, persistent, player)
|
"""
File that contains all the non-visible routes
that are used for communicating with the app.
This particular file contains routines that
are used for vault specific tasks.
"""
#External dependency imports
import tempfile,os
from werkzeug import Headers
#Flask imports
from flask import request, redirect, url_for, abort, Response, stream_with_context
#WebGlacier imports
import WebGlacier as WG
from WebGlacier.models import Vault, Archive, Job
from WebGlacier.lib.app import get_valid_clients, get_handler
from WebGlacier.lib.glacier import process_job, download_archive, upload_archive, process_archive, upload_archive_queue
@WG.app.route(WG.app.config.get("URL_PREFIX","")+"/<vault_name>/action/submit",methods=['POST'])
def multi_dispatch(vault_name):
"""
This should handle the big form submit. That is, when we hit any of the
buttons on the vault pages, it should end up here and do what was
asked after some validation.
"""
#Get the valid clients and make sure the one we selected is one of them
clients = get_valid_clients()
client = request.form.get("client_select")
if client in clients:
#We've got a valid client, save it
WG.app.config['current_client'] = client
if WG.app.config.get("VERBOSE",False):
print "Handling multi-dispatch. Client is %s"%str(client)
#Are we just changing vaults, if so don't need the extra validation stuff
if 'vault_select_pressed' in request.form:
if WG.app.config.get("VERBOSE",False):
print "Changing vault"
#Change vault and we're done
return redirect(url_for("vault_view",vault_name=request.form['vault_select']))
#Either we're done, or we need to do something else
if client in clients:
#Did we press upload?
if 'upload_pressed' in request.form:
#Extract description
description=request.form.get('upload_description','')
if description=="Description of file.":
description=''
#Do upload
if WG.app.config.get("VERBOSE",False):
print "Doing upload via client from vault %s with path %s"%(vault_name,request.form['upload_path'])
upload_archive_queue(vault_name,request.form['upload_path'],client,description)
elif 'download' in request.form:
#Do download
if WG.app.config.get("VERBOSE",False):
print "Doing download via client from vault %s with id %s"%(vault_name,request.form['download'])
download_archive(vault_name,request.form['download'],client)
else:
if 'add_archive_via_server' in request.form:
if WG.app.config.get("VERBOSE",False):
print "Doing upload via server with request.form %s and request.files %s"%(str(request.form),str(request.files))
#Need to do the via elsewhere upload.
return upload_file(vault_name)
if WG.app.config.get("VERBOSE",False):
print "Doing nothing"
if WG.app.config.get("VERBOSE",False):
print "Form that did nothing much was %s"%str(request.form)
return redirect(request.referrer)
@WG.app.route(WG.app.config.get("URL_PREFIX","")+"/<vault_name>/action/addfile",methods=["POST"])
def upload_file(vault_name):
"""
Handle file upload
"""
handler = get_handler()
region = handler.region.name
vault = Vault.query.filter_by(name=vault_name,region=region).first()
if vault is None:
abort(401)
if vault.lock:
abort(401)
file = request.files['file']
if file:
if WG.app.config.get("VERBOSE",False):
print "starting to upload file to web-server"
#Save to a temporary file on the server... Needs to be done for calculating hashes and the like.
tmp=tempfile.NamedTemporaryFile(dir=WG.app.config["TEMP_FOLDER"],delete=True)
file.save(tmp)
#Ensure that things are really really written to disc.
tmp.file.flush()
os.fsync(tmp.file.fileno())
if WG.app.config.get("VERBOSE",False):
print "Server has accepted payload"
description=request.form.get('upload_description','')
if description=="Description of file.":
description=''
upload_archive(tmp.name,vault,file.filename,description=description)
tmp.close()
return redirect(request.referrer)
@WG.app.route(WG.app.config.get("URL_PREFIX","")+"/<vault_name>/action/download",methods=["GET"])
def download_file(vault_name):
"""
Download a file if the link is available...
"""
handler = get_handler()
region = handler.region.name
vault = Vault.query.filter_by(name=vault_name,region=region).first()
if vault is None:
abort(401)
#Need to get the archive too...
if 'archive_id' not in request.args:
abort(401)
archive = Archive.query.filter_by(archive_id=request.args['archive_id']).first()
if archive is None:
abort(401)
if archive.filename!="NOT_GIVEN":
fname=archive.filename
else:
fname=app.config["UNKNOWN_FILENAME"]
#Are we serving from cache?
#cache = archive.cached()
#if cache==1:
# print "Serving from cache."
# return send_from_directory(os.path.join(app.config["LOCAL_CACHE"],region,vault.name),archive.archive_id,attachment_filename=fname,as_attachment=True)
#Is there a finished job knocking about?
job=archive.jobs.filter_by(action='download',completed=True,live=True,status_message="Succeeded").first()
if job is None:
abort(401)
#OK, everything exists, go ahead...
if False and cache==2:
#Save to cache whilst serving
f = open(os.path.join(app.config["LOCAL_CACHE"],region,vault.name,archive.archive_id),'wb')
else:
#Don't add to cache, just serve
f = None
h=Headers()
h.add("Content-Disposition",'attachment;filename="'+fname+'"')
return Response(stream_with_context(job.stream_output(file_handler=f)),headers=h)
@WG.app.route(WG.app.config.get("URL_PREFIX","")+"/<vault_name>/action/checkjobstatus")
def check_job_status(vault_name):
"""
Pretty self explanatory isn't it?
"""
handler = get_handler()
region = handler.region.name
vault = Vault.query.filter_by(name=vault_name,region=region).first()
if vault is None:
abort(401)
#Get the live jobs from Amazon
live_jobs = handler.list_jobs(vault.name)
#First update/add all of them to db
for job in live_jobs['JobList']:
process_job(job,vault)
#Then kill any ones that are in our db and should be dead
jobs = vault.jobs.filter_by(live=True).all()
live_ids = [x["JobId"] for x in live_jobs['JobList']]
for job in jobs:
if job.job_id not in live_ids:
job.live=False
WG.db.session.add(job)
WG.db.session.commit()
return redirect(request.referrer)
@WG.app.route(WG.app.config.get("URL_PREFIX","")+"/<vault_name>/action/runjobs",methods=["GET"])
def run_jobs(vault_name):
"""
Execute a completed job. If not completed, updates its status.
"""
handler = get_handler()
#Need to get the vault as always...
region = handler.region.name
vault = Vault.query.filter_by(name=vault_name,region=region).first()
if vault is None:
abort(401)
#Get the job from our local db
job=Job.query.filter_by(job_id=(request.args['job_id'])).first()
#If we don't have the job, or our records show it's incomplete, check with amazon
if job is None or not job.completed:
if vault.lock:
abort(401)
job=process_job(handler.describe_job(vault.name,request.args['job_id']),vault)
#If it's still none, something went wrong...
if job is None or not job.completed or not job.live or not job.status_code=="Succeeded":
abort(401)
#Now we have the job, get its output
if job.action=='list':
dat=handler.get_job_output(vault.name,job.job_id)
for archive in dat["ArchiveList"]:
process_archive(archive,vault)
vault.lock=False
WG.db.session.add(vault)
WG.db.session.commit()
elif job.action=='download':
pass
#return redirect(request.referrer)
return redirect(url_for("vault_view",vault_name=vault.name))
@WG.app.route(WG.app.config.get("URL_PREFIX","")+"/<vault_name>/action/getinventory",methods=["GET"])
def get_inventory(vault_name):
"""
Initiates an inventory job for the specified vault.
Currently lacks any checks on if it's a good idea to submit another of these jobs
"""
handler = get_handler()
#Need to get the vault...
region = handler.region.name
vault = Vault.query.filter_by(name=vault_name,region=region).first()
if vault is None:
abort(401)
#Already asked for one, don't need another...
if vault.lock:
abort(401)
def_opts={"Description":"Auto-made inventory job.",
"Type":"inventory-retrieval","Format":"JSON"}
job_id = handler.initiate_job(vault.name, def_opts)
#Lock the vault...
vault.lock=True
WG.db.session.add(vault)
WG.db.session.commit()
#Add the job to the database
job=process_job(handler.describe_job(vault.name,job_id["JobId"]),vault)
return redirect(request.referrer)
@WG.app.route(WG.app.config.get("URL_PREFIX","")+"/<vault_name>/action/getarchive",methods=["GET"])
def request_archive(vault_name):
"""
Asks glacier to get your data. You'll have to wait for it to get back first...
"""
handler = get_handler()
#Need to get the vault as always...
region = handler.region.name
vault = Vault.query.filter_by(name=vault_name,region=region).first()
if vault is None:
abort(401)
#Need to get the archive too...
if 'archive_id' not in request.args:
abort(401)
archive = Archive.query.filter_by(archive_id=request.args['archive_id']).first()
if archive is None:
abort(401)
#OK, everything exists, go ahead...
def_opts={"Description":"Fetch archive.",
"Type":"archive-retrieval",
"ArchiveId":archive.archive_id}
job_id = handler.initiate_job(vault.name, def_opts)
job=process_job(handler.describe_job(vault.name,job_id["JobId"]),vault)
return redirect(request.referrer)
@WG.app.route(WG.app.config.get("URL_PREFIX","")+"/<vault_name>/action/deletearchive",methods=["GET"])
def delete_archive(vault_name):
"""
Delete archive from glacier's servers
"""
handler = get_handler()
#Need to get the vault as always...
region = handler.region.name
vault = Vault.query.filter_by(name=vault_name,region=region).first()
if vault is None or vault.lock:
abort(401)
#Need to get the archive too...
if 'archive_id' not in request.args:
abort(401)
archive = Archive.query.filter_by(archive_id=request.args['archive_id']).first()
if archive is None:
abort(401)
#OK, everything exists, go ahead...
handler.delete_archive(vault.name,archive.archive_id)
#Delete the archive and any jobs associated with it...
for job in archive.jobs:
WG.db.session.delete(job)
WG.db.session.delete(archive)
WG.db.session.commit()
return redirect(request.referrer)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from PyQt5 import QtNetwork
from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkRequest
from PyQt5.QtCore import QCoreApplication, QUrl, QByteArray
import sys
class Example:
def __init__(self):
self.doRequest()
def doRequest(self):
mobile = "13593871052"
url = "http://azxfsite.test/extra_query.php"
request = QNetworkRequest(QUrl(url))
request.setHeader(QNetworkRequest.ContentTypeHeader, "application/x-www-form-urlencoded")
data = QByteArray()
data.append("action=student_info")
data.append("&mobile={0}".format(mobile))
self.manager = QNetworkAccessManager()
self.manager.finished.connect(self.handleResponse)
self.manager.post(request, data)
def handleResponse(self, reply):
er = reply.error()
if er == QtNetwork.QNetworkReply.NoError:
bytes_string = reply.readAll()
print(str(bytes_string, 'utf-8'))
else:
print("Error occured: ", er)
print(reply.errorString())
QCoreApplication.quit()
def main():
app = QCoreApplication([])
ex = Example()
sys.exit(app.exec_())
if __name__ == '__main__':
main() |
from collections import Counter
class Solution(object):
def combinationSum2(self, candidates, target):
"""
:type candidates: List[int]
:type target: int
:rtype: List[List[int]]
"""
processed_candidates = sorted(Counter(candidates).items())
rv = []
def search(start_index, prefix, prefix_sum):
if prefix_sum == target:
rv.append(prefix[:])
return
if prefix_sum > target:
return
if start_index >= len(processed_candidates):
return
x, max_count = processed_candidates[start_index]
if x > target - prefix_sum:
# End the whole search along the path
return
search(start_index+1, prefix, prefix_sum)
for count in range(max_count):
prefix.append(x)
prefix_sum += x
search(start_index+1, prefix, prefix_sum)
del prefix[-max_count:]
prefix_sum -= max_count*x
search(0, [], 0)
return rv
def test():
def sub_test(candidates, target, expected):
rv = Solution().combinationSum2(candidates, target)
assert sorted(rv) == sorted(expected)
sub_test([10, 1, 2, 7, 6, 1, 5], 8, [
[1, 7],
[1, 2, 5],
[2, 6],
[1, 1, 6]
])
sub_test([1,1,2,2,3,4], 13, [[1,1,2,2,3,4]])
sub_test([1,1,2,2,3,4], 14, [])
sub_test([1,1,2,2,3,4], 6, [
[1,1,2,2],
[1,2,3],
[1,1,4],
[2,4],
])
|
"""
OOrtiz
N2O Thermodynamic Data Gathering
v:0.0
"""
from CoolProp.CoolProp import PropsSI
import CoolProp.CoolProp as CP
from scipy import *
class N20ThermoCompile():
def __init__(self, TrangeMin, TrangeMax, Tincrement, PrangeMin, PrangeMax, Pincrement):
self.TrangeMin = TrangeMin
self.TrangeMax = TrangeMax
self.Tincrement = Tincrement
self.PrangeMin = PrangeMin
self.PrangeMax = PrangeMax
self.Pincrement = Pincrement
self.conversions() #conversions occur at start
self.ReadMe()
def conversions(self):
C2K = 273 #Celceius to Kelvin
self.TrangeMin += C2K
self.TrangeMax += C2K
def dataGather(self):
print("Gathering Data...")
#Range from Low to High temperature entered
for temp in range(self.TrangeMin, self.TrangeMax + self.Tincrement, self.Tincrement):
dataStore = [[],[], [],[],[],[],[]] #T , Saturation values, P_n, Rho_n, s_n, h_n, u_n
Tcurr = temp
dataStore[0].append(Tcurr)
#Liquid saturation values
sv0 = PropsSI('P','T', Tcurr, 'Q', 0, 'N2O')#P_sat_l
sv2 = PropsSI('D','T', Tcurr, 'Q', 0, 'N2O')#Rho_sat_l
sv4 = PropsSI('S','T', Tcurr, 'Q', 0, 'N2O')#s_sat_l
sv6 = PropsSI('H','T', Tcurr, 'Q', 0, 'N2O')#h_sat_l
sv8 = PropsSI('U','T', Tcurr, 'Q', 0, 'N2O')#u_sat_l
#Vapor saturation values
sv1 = PropsSI('P','T', Tcurr, 'Q', 1, 'N2O')#P_sat_v
sv3 = PropsSI('D','T', Tcurr, 'Q', 1, 'N2O')#Rho_sat_v
sv5 = PropsSI('S','T', Tcurr, 'Q', 1, 'N2O')#s_sat_v
sv7 = PropsSI('H','T', Tcurr, 'Q', 1, 'N2O')#h_sat_v
sv9 = PropsSI('U','T', Tcurr, 'Q', 1, 'N2O')#u_sat_v
#appends first index
for j in range(0,10):
satIndex = 'sv'+str(j)
satIndex = eval(satIndex)
dataStore[1].append(satIndex)
for pressure in range(self.PrangeMin, self.PrangeMax + self.Pincrement, self.Pincrement):
rho = PropsSI('D','T',Tcurr,'P',pressure,'N2O') #pressure
s = PropsSI('S','T',Tcurr,'P',pressure,'N2O') #entropy
h = PropsSI('H','T',Tcurr,'P',pressure,'N2O') #enthalpy
u = PropsSI('U','T',Tcurr,'P',pressure,'N2O') #internal energy
dataStore[2].append(pressure)
dataStore[3].append(rho)
dataStore[4].append(s)
dataStore[5].append(h)
dataStore[6].append(u)
#creates a new txt file for N2O table
f = open("C:/Users/m_i_d/Desktop/N2O Thermodynamic Property Tables/"+str(temp)+"K Table.txt","w")
#write current temperature
f.write("Temperature\n"+str(dataStore[0][0])+"\n")
#writes title headings
f.write("P Sat l\tP Sat v\tRho Sat l\tRho Sat v\ts Sat l\ts Sat v\t\
h Sat l\th Sat v\tu Sat l\tu Sat v\n")
#wrires saturation values
for space in range(0, len(dataStore[1])):
f.write("{:.5f}".format(dataStore[1][space]))
if space + 1 < len(dataStore[1]): #tabs in between each value
f.write("\t")
else: #unless its the last value in the array
f.write("\n")
f.write("Pressure\tDensity\tEntropy\tEnthalpy\tInternal Energy\n") #writes title headings
for line in range(0, len(dataStore[2])):
f.write("{:.2f}".format(dataStore[2][line])+"\t{:.5f}".format(dataStore[3][line])+"\t{:.5f}".format(dataStore[4][line])+\
"\t{:.5f}".format(dataStore[5][line])+"\t{:.5f}".format(dataStore[6][line])+"\n")
f.close()
print("Data Gathering Completed...")
def ReadMe(self):
rm = open("C:/Users/m_i_d/Desktop/N2O Thermodynamic Property Tables/@@Read Me@@.txt","w")
rm.write("All thermodynamic information in the following tables has been \n gathered from CoolProp\
@ https://CoolProp.org or https://github.com/CoolProp/CoolProp.")
rm.close()
# (TrangeMin, TrangeMax, Tincrement, PrangeMin, PrangeMax, Pincrement)
NTC = N20ThermoCompile(-50, 36, 1, 100000, 9000000, 25000) #Temp in C, P in Pa
NTC.dataGather()
|
import numpy as np
def is_in_range(x, limit):
return (x >= 0 and x < limit)
def is_not_visited(visited_sites, i, j):
return not visited_sites[i][j]
def is_correct_height(grid, i, j, height):
return grid[i][j]==height
def is_valid_to_visit(grid, i, j, height, visited_sites, row, col):
return (is_in_range(i, row) and
is_in_range(j, col) and
is_not_visited(visited_sites, i, j) and
is_correct_height(grid, i, j, height))
def depth_first_search(grid, i, j, visited_sites, height, row, col, island_size):
neighbour_row_and_col_indices = [(1, -1),
(0, -1),
(-1, -1),
(-1, 0),
(-1, 1),
(0, 1),
(1, 1),
(1, 0)]
visited_sites[i][j] = True
for row_num, col_num in neighbour_row_and_col_indices:
row_num += i
col_num += j
if is_valid_to_visit(grid, row_num, col_num, height, visited_sites, row, col):
island_size += 1
island_size = depth_first_search(grid,
row_num,
col_num,
visited_sites,
height,
row,
col,
island_size)
return island_size
def count_islands(grid, row, col, height):
visited_sites = np.zeros((row, col))
count = 0
island_size = 0
sizes = []
for i in range(row):
for j in range(col):
if not visited_sites[i][j] and grid[i][j] == height:
sizes.append(depth_first_search(grid,
i,
j,
visited_sites,
height,
row,
col,
1))
count += 1
return count, sizes
def get_island_count_and_average_size(grid):
row, col = grid.shape
max_height = np.max(grid)
island_count = 0
island_sizes = []
for height in range(1, int(max_height+1)):
count, size = count_islands(grid, row, col, height)
island_count += count
island_sizes.extend(size)
return island_count, sum(island_sizes)/island_count
def get_grid_info(grid):
island_count, avg_size = get_island_count_and_average_size(grid)
max_height = np.max(grid)
min_height = np.min(grid)
height_difference = max_height-min_height
adatoms = np.sum(grid)
return island_count, avg_size, max_height, min_height, height_difference, adatoms
def print_grid_info(grid):
count, size, max_h, min_h, diff, adatoms = get_grid_info(grid)
print(f'{count} islands, average island size = {size}')
print(f'number of adatoms = {adatoms}, min height = {min_h}')
print(f'max height = {max_h}, max height difference {diff}')
|
#Embedded file name: ACEStream\Core\TS\Service.pyo
import sys
import time
import hashlib
import random
from base64 import b64encode, b64decode
import urllib
import os
import binascii
from urllib2 import HTTPError, URLError
from traceback import print_exc
from xml.dom.minidom import parseString, Document
from xml.dom import expatbuilder
from cStringIO import StringIO
from ACEStream.version import VERSION
from ACEStream.Core.simpledefs import *
from ACEStream.Core.TorrentDef import *
from ACEStream.Core.Utilities.timeouturlopen import urlOpenTimeout
from ACEStream.Core.Utilities.logger import log, log_exc
from ACEStream.Core.TS.domutils import domutils
SERVER_TYPE_PROXY = 1
SERVER_TYPE_SERVICE = 2
SERVER_TYPE_AD = 3
SERVER_TYPE_TRACKER = 4
SERVER_TYPE_PREMIUM_SERVICE = 5
SERVER_TYPE_PREMIUM_STATISTICS = 6
SERVER_TYPE_AUTH = 7
DEBUG = False
class BadResponseException(Exception):
pass
class TSService():
REQUEST_SECRET = 'q\\\'X!;UL0J_<R*z#GBTL(9mCeRJbm/;L.oi9.`\\"iETli9GD]`t&xlT(]MhJ{NVN,Q.)r~(6+9Bt(G,O%2c/g@sPi]<c[i\\\\ga]fkbHgwH:->ok4w8><y]^:Lw465+W4a(:'
RESPONSE_SECRET = 'hXD.VAgz=QegM4Hq>P~b7t9LA:eB|}t3z~Rt`FV/-P<va|g,i/M~5/>A-.G70H-p!k|s{wL!Tn\\"=%/L\\\\&@C-Bkz`(w\\\'(KF4fU3(KPKC@.L3.zL4-y%gI8/?RVRx?d+a)'
SERVICE_SERVERS = ['http://s1.torrentstream.net',
'http://s1.torrentstream.org',
'http://s1.torrentstream.info',
'http://s2.torrentstream.net',
'http://s2.torrentstream.org',
'http://s2.torrentstream.info',
'http://s3.torrentstream.net',
'http://s3.torrentstream.org',
'http://s3.torrentstream.info']
PREMIUM_SERVICE_SERVERS = ['https://p1.acestream.net',
'https://p2.acestream.net',
'https://p1.acestream.org',
'https://p2.acestream.org']
PREMIUM_STATISTICS_SERVERS = ['http://ps1.acestream.net',
'http://ps2.acestream.net',
'http://ps1.acestream.org',
'http://ps2.acestream.org']
AUTH_SERVERS = ['https://auth1.acestream.net',
'https://auth2.acestream.net',
'https://auth1.acestream.org',
'https://auth2.acestream.org']
def __init__(self, baseapp):
self.baseapp = baseapp
def get_user_level(self, login, password, action, device_id, hardware_key):
if hardware_key is None:
hardware_key = ''
device_key = hashlib.sha1(device_id + hardware_key).hexdigest()
params = {'l': login,
'p': hashlib.sha1(password).hexdigest(),
'h': b64encode(hardware_key),
action: device_key}
response = self.send_request('getuserlevel', params, use_random=True, server_type=SERVER_TYPE_AUTH)
if response is None:
if DEBUG:
log('tsservice::get_user_level: request failed')
return
user_level = domutils.get_tag_value(response, 'level')
if user_level is None:
return
try:
user_level = int(user_level)
except:
if DEBUG:
log('tsservice::get_user_level: bad user_level:', user_level)
return
return user_level
def check_premium_status(self, provider_key, content_id, infohash):
params = {'p': provider_key,
'c': content_id,
'i': binascii.hexlify(infohash)}
response = self.send_request('checkpremiumstatus', params, use_random=True, server_type=SERVER_TYPE_PREMIUM_SERVICE)
if response is None:
if DEBUG:
log('tsservice::check_premium_status: request failed')
return
status = domutils.get_tag_value(response, 'status')
if status is None:
return
try:
status = int(status)
except:
if DEBUG:
log('tsservice::check_premium_status: bad status:', status)
return
return status
def report_premium_download(self, watch_id, provider_key, content_id, user_login):
if content_id is None:
content_id = ''
params = {'w': watch_id,
'p': provider_key,
'c': content_id,
'u': b64encode(user_login)}
self.send_request('watch', params, use_random=True, use_timestamp=True, server_type=SERVER_TYPE_PREMIUM_STATISTICS, parse_response=False)
def get_infohash_from_url(self, url):
params = {'url': b64encode(url)}
response = self.send_request('getu2t', params, use_random=True)
if response is None:
if DEBUG:
log('tsservice::get_infohash_from_url: request failed: url', url)
return
infohash = domutils.get_tag_value(response, 'infohash')
if infohash is None:
return
infohash = b64decode(infohash)
if DEBUG:
log('tsservice::get_infohash_from_url: got data: infohash', binascii.hexlify(infohash))
return infohash
def save_url2infohash(self, url, infohash):
params = {'url': b64encode(url),
'infohash': b64encode(infohash)}
self.send_request('putu2t', params)
def get_infohash_from_adid(self, adid):
params = {'id': str(adid)}
response = self.send_request('geta2i', params, use_random=True)
if response is None:
if DEBUG:
log('tsservice::get_infohash_from_adid: request failed: adid', adid)
return
infohash = domutils.get_tag_value(response, 'infohash')
if infohash is None:
return
infohash = b64decode(infohash)
if DEBUG:
log('tsservice::get_infohash_from_adid: got data: infohash', binascii.hexlify(infohash))
return infohash
def send_torrent(self, torrent_data, developer_id = None, affiliate_id = None, zone_id = None, protected = False, infohash = None):
params = {}
if developer_id is not None:
params['d'] = str(developer_id)
if affiliate_id is not None:
params['a'] = str(affiliate_id)
if zone_id is not None:
params['z'] = str(zone_id)
if protected:
params['protected'] = '1'
if infohash is not None:
params['infohash'] = binascii.hexlify(infohash)
response = self.send_request('puttorrent', params, data=torrent_data, content_type='application/octet-stream', use_random=True)
if response is None:
if DEBUG:
log('tsservice::send_torrent: request failed')
return
try:
player_id = domutils.get_tag_value(response, 'id')
except Exception as e:
if DEBUG:
log('tsservice::send_torrent: failed to parse response: ' + str(e))
return
return player_id
def get_torrent(self, infohash = None, player_id = None):
if infohash is None and player_id is None:
raise ValueError, 'Infohash or player id must be specified'
params = {}
if player_id is not None:
params['pid'] = player_id
elif infohash is not None:
params['infohash'] = b64encode(infohash)
response = self.send_request('gettorrent', params, use_random=True)
if response is None:
return
torrent_data = domutils.get_tag_value(response, 'torrent')
if torrent_data is None:
return
torrent_checksum = domutils.get_tag_value(response, 'checksum')
if torrent_checksum is None:
return
torrent_data = b64decode(torrent_data)
buf = StringIO(torrent_data)
tdef = TorrentDef._read(buf)
player_data = {'tdef': tdef,
'checksum': binascii.unhexlify(torrent_checksum)}
if player_id is not None:
try:
developer_id = int(domutils.get_tag_value(response, 'developer_id'))
affiliate_id = int(domutils.get_tag_value(response, 'affiliate_id'))
zone_id = int(domutils.get_tag_value(response, 'zone_id'))
player_data['developer_id'] = developer_id
player_data['affiliate_id'] = affiliate_id
player_data['zone_id'] = zone_id
except:
if DEBUG:
print_exc()
return player_data
def check_torrent(self, torrent_checksum = None, infohash = None, player_id = None, developer_id = 0, affiliate_id = 0, zone_id = 0):
if infohash is None and player_id is None:
raise ValueError, 'Infohash or player id must be specified'
params = {}
if player_id is not None:
params['pid'] = player_id
elif infohash is not None:
if torrent_checksum is not None:
params['checksum'] = binascii.hexlify(torrent_checksum)
params['infohash'] = b64encode(infohash)
params['d'] = str(developer_id)
params['a'] = str(affiliate_id)
params['z'] = str(zone_id)
response = self.send_request('checktorrent', params, use_random=True)
if response is None:
return
player_id = domutils.get_tag_value(response, 'id')
metadata = None
http_seeds = None
if player_id is not None:
root = response.documentElement
e_http_seeds = domutils.get_single_element(root, 'httpseeds', False)
if e_http_seeds is not None:
http_seeds = []
e_urls = domutils.get_children_by_tag_name(e_http_seeds, 'url')
for e_url in e_urls:
url = domutils.get_node_text(e_url)
http_seeds.append(url)
e_metadata = domutils.get_single_element(root, 'metadata', False)
if e_metadata is not None:
metadata = {}
e_duration = domutils.get_single_element(e_metadata, 'duration', False)
e_prebuf_pieces = domutils.get_single_element(e_metadata, 'prebuf_pieces', False)
e_rpmp4mt = domutils.get_single_element(e_metadata, 'rpmp4mt', False)
if e_duration is not None:
metadata['duration'] = {}
files = domutils.get_children_by_tag_name(e_duration, 'file')
for f in files:
idx = f.getAttribute('id')
try:
idx = int(idx)
except:
continue
value = domutils.get_node_text(f)
metadata['duration']['f' + str(idx)] = value
if e_prebuf_pieces is not None:
metadata['prebuf_pieces'] = {}
files = domutils.get_children_by_tag_name(e_prebuf_pieces, 'file')
for f in files:
idx = f.getAttribute('id')
try:
idx = int(idx)
except:
continue
value = domutils.get_node_text(f)
metadata['prebuf_pieces']['f' + str(idx)] = value
if e_rpmp4mt is not None:
metadata['rpmp4mt'] = {}
files = domutils.get_children_by_tag_name(e_rpmp4mt, 'file')
for f in files:
idx = f.getAttribute('id')
try:
idx = int(idx)
except:
continue
value = domutils.get_node_text(f)
metadata['rpmp4mt']['f' + str(idx)] = value
if DEBUG:
log('tsservice::check_torrent: got metadata: metadata', metadata)
return (player_id, metadata, http_seeds)
def send_metadata(self, infohash, metadata):
params = {'infohash': b64encode(infohash)}
doc = Document()
e_metadata = doc.createElement('metadata')
doc.appendChild(e_metadata)
if metadata.has_key('duration'):
e_duration = doc.createElement('duration')
for idx, duration in metadata['duration'].iteritems():
idx = idx.replace('f', '')
e_file = doc.createElement('file')
e_file.setAttribute('id', idx)
e_file.appendChild(doc.createTextNode(str(duration)))
e_duration.appendChild(e_file)
e_metadata.appendChild(e_duration)
if metadata.has_key('prebuf_pieces'):
e_prebuf_pieces = doc.createElement('prebuf_pieces')
for idx, prebuf_pieces in metadata['prebuf_pieces'].iteritems():
idx = idx.replace('f', '')
e_file = doc.createElement('file')
e_file.setAttribute('id', idx)
e_file.appendChild(doc.createTextNode(prebuf_pieces))
e_prebuf_pieces.appendChild(e_file)
e_metadata.appendChild(e_prebuf_pieces)
if metadata.has_key('rpmp4mt'):
e_rpmp4mt = doc.createElement('rpmp4mt')
for idx, rpmp4mt in metadata['rpmp4mt'].iteritems():
idx = idx.replace('f', '')
e_file = doc.createElement('file')
e_file.setAttribute('id', idx)
e_file.appendChild(doc.createTextNode(rpmp4mt))
e_rpmp4mt.appendChild(e_file)
e_metadata.appendChild(e_rpmp4mt)
xmldata = doc.toxml()
if DEBUG:
log('tsservice::send_metadata: infohash', binascii.hexlify(infohash), 'xmldata', xmldata)
self.send_request('putmeta', params, data=xmldata, content_type='text/xml', timeout=10)
def send_request(self, method, params = {}, data = None, content_type = None, use_random = False, use_timestamp = False, timeout = 5, server_type = SERVER_TYPE_SERVICE, parse_response = True):
if data is not None and content_type is None:
raise ValueError, 'Data passed without content type'
if params.has_key('r'):
raise ValueError, "Cannot use reserved parameter 'r'"
if params.has_key('t'):
raise ValueError, "Cannot use reserved parameter 't'"
if params.has_key('v'):
raise ValueError, "Cannot use reserved parameter 'v'"
params['v'] = VERSION
if use_random:
request_random = random.randint(1, sys.maxint)
params['r'] = str(request_random)
else:
request_random = None
if use_timestamp:
params['t'] = str(long(time.time()))
get_params = []
payload = []
if len(params):
for k in sorted(params.keys()):
v = params[k]
get_params.append(k + '=' + urllib.quote_plus(v))
payload.append(k + '=' + v)
if DEBUG:
log('tsservice::send_request: got params: get_params', get_params, 'payload', payload)
if data is not None:
payload.append(data)
if DEBUG:
log('tsservice::send_request: got data')
if len(payload):
payload = '#'.join(payload)
payload += self.REQUEST_SECRET
signature = hashlib.sha1(payload).hexdigest()
get_params.append('s=' + signature)
if DEBUG:
log('tsservice::send_request: sign data: signature', signature)
query = '/' + method
if len(get_params):
query += '?' + '&'.join(get_params)
if DEBUG:
log('tsservice::send_request: query', query)
servers = self.get_servers(server_type)
random.shuffle(servers)
response = None
for serv in servers:
try:
url = serv + query
if DEBUG:
log('tsservice::send_request: url', url)
stream = urlOpenTimeout(url, timeout, content_type, None, data)
response = stream.read()
stream.close()
if DEBUG:
log('tsservice::send_request: got response: url', url, 'response', response)
if parse_response:
response = self.check_response(response, request_random)
break
except BadResponseException as e:
response = None
if DEBUG:
log('tsservice::send_request: bad response: ' + str(e))
except (URLError, HTTPError) as e:
response = None
if DEBUG:
log('tsservice::send_request: http error: ' + str(e))
except:
response = None
if DEBUG:
print_exc()
return response
def get_servers(self, server_type):
if server_type == SERVER_TYPE_SERVICE:
return self.SERVICE_SERVERS
if server_type == SERVER_TYPE_PREMIUM_SERVICE:
return self.PREMIUM_SERVICE_SERVERS
if server_type == SERVER_TYPE_PREMIUM_STATISTICS:
return self.PREMIUM_STATISTICS_SERVERS
if server_type == SERVER_TYPE_AUTH:
return self.AUTH_SERVERS
raise ValueError, 'Unknown server type ' + str(server_type)
def check_response(self, response, request_random = None):
if len(response) == 0:
raise BadResponseException, 'Empty response'
doc = parseString(response)
root = doc.documentElement
if root.tagName != 'response':
raise BadResponseException, 'Bad response tagname: ' + doc.tagName
if not root.hasAttribute('sig'):
raise BadResponseException, 'Missing signature'
if request_random is not None:
if not root.hasAttribute('r'):
raise BadResponseException, 'Missing random'
try:
response_random = int(root.getAttribute('r'))
except ValueError:
raise BadResponseException, 'Cannot parse random'
if response_random != request_random:
if DEBUG:
log('tsservice::check_response: bad random: response_random', response_random, 'request_random', request_random)
raise BadResponseException, 'Bad random'
response_sig = root.getAttribute('sig')
payload = response.replace(' sig="' + response_sig + '"', '', 1)
if DEBUG:
log('tsservice::check_response: response', response)
log('tsservice::check_response: response_sig', response_sig)
log('tsservice::check_response: payload', payload)
check_sig = hashlib.sha1(payload + self.RESPONSE_SECRET).hexdigest()
if check_sig != response_sig:
if DEBUG:
log('tsservice::check_response: bad sig: response_sig', response_sig, 'check_sig', check_sig)
raise BadResponseException, 'Bad signature'
return doc
if __name__ == '__main__':
service = TSService(None)
url = 'http://test.com'
infohash = service.get_infohash_from_url(url)
print infohash
|
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Tiago de Freitas Pereira <tiago.pereira@idiap.ch>
"""Executes only the train part of a biometric pipeline"""
import logging
import click
from clapper.click import ConfigCommand, ResourceOption, verbosity_option
from bob.pipelines.distributed import VALID_DASK_CLIENT_STRINGS
logger = logging.getLogger(__name__)
EPILOG = """\b
Command line examples\n
-----------------------
$ bob bio pipeline train -vv DATABASE PIPELINE
See the help of the CONFIG argument on top of this help message
for a list of available configurations.
It is possible to provide database and pipeline through a configuration file.
Generate an example configuration file with:
$ bob bio pipeline train --dump-config my_experiment.py
and execute it with:
$ bob bio pipeline train -vv my_experiment.py
my_experiment.py must contain the following elements:
>>> pipeline = ... # A scikit-learn pipeline wrapped with bob.pipelines' SampleWrapper\n
>>> database = .... # Biometric Database (class that implements the methods: `background_model_samples`, `references` and `probes`)"
\b"""
@click.command(
name="train",
entry_point_group="bob.bio.config",
cls=ConfigCommand,
epilog=EPILOG,
)
@click.option(
"--pipeline",
"-p",
required=True,
entry_point_group="bob.bio.pipeline",
help="A PipelineSimple or an sklearn.pipeline",
cls=ResourceOption,
)
@click.option(
"--database",
"-d",
entry_point_group="bob.bio.database",
required=True,
help="Biometric Database connector (class that implements the methods: `background_model_samples`, `references` and `probes`)",
cls=ResourceOption,
)
@click.option(
"--dask-client",
"-l",
entry_point_group="dask.client",
string_exceptions=VALID_DASK_CLIENT_STRINGS,
default="single-threaded",
help="Dask client for the execution of the pipeline.",
cls=ResourceOption,
)
@click.option(
"--output",
"-o",
show_default=True,
default="results",
help="Name of output directory where output files will be saved.",
cls=ResourceOption,
)
@click.option(
"--memory",
"-m",
is_flag=True,
help="If set, it will run the experiment keeping all objects on memory with nothing checkpointed. If not set, checkpoints will be saved in `--output`.",
cls=ResourceOption,
)
@click.option(
"--checkpoint-dir",
"-c",
show_default=True,
default=None,
help="Name of output directory where the checkpoints will be saved. In case --memory is not set, checkpoints will be saved in this directory.",
cls=ResourceOption,
)
@click.option(
"--dask-partition-size",
"-s",
help="If using Dask, this option defines the max size of each dask.bag.partition. "
"Use this option if the current heuristic that sets this value doesn't suit your experiment. "
"(https://docs.dask.org/en/latest/bag-api.html?highlight=partition_size#dask.bag.from_sequence).",
default=None,
type=click.INT,
cls=ResourceOption,
)
@click.option(
"--dask-n-partitions",
"-n",
help="If using Dask, this option defines a fixed number of dask.bag.partition for "
"each set of data. Use this option if the current heuristic that sets this value "
"doesn't suit your experiment."
"(https://docs.dask.org/en/latest/bag-api.html?highlight=partition_size#dask.bag.from_sequence).",
default=None,
type=click.INT,
cls=ResourceOption,
)
@click.option(
"--dask-n-workers",
"-w",
help="If using Dask, this option defines the number of workers to start your experiment. "
"Dask automatically scales up/down the number of workers due to the current load of tasks to be solved. "
"Use this option if the current amount of workers set to start an experiment doesn't suit you.",
default=None,
type=click.INT,
cls=ResourceOption,
)
@click.option(
"--force",
"-f",
is_flag=True,
help="If set, it will force generate all the checkpoints of an experiment. This option doesn't work if `--memory` is set",
cls=ResourceOption,
)
@click.option(
"--no-dask",
is_flag=True,
help="If set, it will not use Dask to run the experiment.",
cls=ResourceOption,
)
@click.option(
"--split-training",
is_flag=True,
help="Splits the training set in partitions and trains the pipeline in multiple steps.",
cls=ResourceOption,
)
@click.option(
"--n-splits",
default=3,
help="Number of partitions to split the training set in. "
"Each partition will be trained in a separate step.",
cls=ResourceOption,
)
@verbosity_option(cls=ResourceOption, logger=logger)
def pipeline_train(
pipeline,
database,
dask_client,
output,
memory,
checkpoint_dir,
dask_partition_size,
dask_n_workers,
dask_n_partitions,
force,
no_dask,
split_training,
n_splits,
**kwargs,
):
"""Runs the training part of a biometrics pipeline.
This pipeline consists only of one component, contrary to the ``simple`` pipeline.
This component is a scikit-learn ``Pipeline``, where a sequence of transformations
of the input data is defined.
The pipeline is trained on the database and the resulting model is saved in the
output directory.
It is possible to split the training data in multiple partitions that will be
used to train the pipeline in multiple steps, helping with big datasets that would
not fit in memory if trained all at once. Passing the ``--split-training`` option
will split the training data in ``--n-splits`` partitions and train the pipeline
sequentially with each partition. The pipeline must support "continuous learning",
(a call to ``fit`` on an already trained pipeline should continue the training).
"""
from bob.bio.base.pipelines import execute_pipeline_train
if no_dask:
dask_client = None
checkpoint = not memory
logger.debug("Executing pipeline training with:")
logger.debug(f"pipeline: {pipeline}")
logger.debug(f"database: {database}")
execute_pipeline_train(
pipeline=pipeline,
database=database,
dask_client=dask_client,
output=output,
checkpoint=checkpoint,
dask_partition_size=dask_partition_size,
dask_n_partitions=dask_n_partitions,
dask_n_workers=dask_n_workers,
checkpoint_dir=checkpoint_dir,
force=force,
split_training=split_training,
n_splits=n_splits,
**kwargs,
)
logger.info(f"Experiment finished ! ({output=})")
|
import numpy as np
import matplotlib.pyplot as plt
import datetime
import cal_obs
import jdutil
dm_folder = '/data1/Daniele/B2217+47/Analysis/DM/'
def DM_evolution():
#Plot DM evolution with archival data
core = np.load(dm_folder+'CORE_DM.npy')
inter = np.load(dm_folder+'dm_INTER.npy')
GMRT = np.load(dm_folder+'dm_GMRT.npy')
LWA = np.array([ [datetime.date(2014,7,4),], [43.4975,], [0.0005,] ])
ATNF = np.array([ [datetime.date(1986,6,18),], [43.519,], [0.012,] ])
JB = np.array([ [datetime.date(2007,9,29),datetime.date(2004,1,26),datetime.date(1999,10,4),datetime.date(1998,12,18),datetime.date(1998,3,22),datetime.date(1997,12,29),datetime.date(1997,10,15),datetime.date(1997,7,5),datetime.date(1996,9,16),datetime.date(1995,4,30),datetime.date(1993,12,29),datetime.date(1992,10,29),datetime.date(1991,12,10),datetime.date(1990,11,23),datetime.date(1989,8,5),datetime.date(1988,8,14),datetime.date(1984,9,3)], [43.4862,43.5052,43.5038,43.4838,43.506,43.4963,43.503,43.5138,43.5130,43.5157,43.5185,43.5287,43.5139,43.5183,43.520,43.5112,43.5277], [0.0098,0.0075,0.0028,0.0041,0.015,0.0073,0.017,0.0028,0.0034,0.0028,0.0017,0.0050,0.0022,0.0052,0.015,0.0014,0.0035] ])
plt.errorbar(GMRT[0], GMRT[1], yerr=GMRT[2], fmt='go', label='GMRT')
plt.errorbar(inter[0], inter[1], yerr=inter[2], fmt='co', label='LOFAR international')
plt.errorbar(core[0],core[1],yerr=core[2],fmt='ko',label='LOFAR core')
plt.errorbar(LWA[0], LWA[1], yerr=LWA[2], fmt='ro', label='LWA1 (ATNF)')
plt.errorbar(ATNF[0], ATNF[1], yerr=ATNF[2], fmt='bo', label='JB (ATNF)')
plt.errorbar(JB[0], JB[1], yerr=JB[2], fmt='mo', label='JB new')
all_dm = np.hstack((core,GMRT,ATNF,JB,inter))
idx = np.argsort(all_dm[0])
all_dm = all_dm[:,idx]
plt.plot(all_dm[0], all_dm[1], 'k--')
#Hobbs04 relation
ordinals = np.array( [n.toordinal() for n in all_dm[0]] )
y = all_dm[1,-1] - 0.0002*np.sqrt(np.mean(all_dm[1]))*(ordinals-ordinals[-1])/365.
plt.plot(ordinals,y,'r')
plt.legend()
plt.show()
#Plot LOFAR data
plt.errorbar(core[0],core[1],yerr=core[2],fmt='ko',label='Core')
for label in np.unique(inter[3]):
idx = np.where(inter[3]==label)[0]
plt.errorbar(inter[0,idx], inter[1,idx], yerr=inter[2,idx], fmt='o', label=label)
LOFAR_dm = np.hstack((core,inter[:3]))
idx = np.argsort(LOFAR_dm[0])
LOFAR_dm = LOFAR_dm[:,idx]
plt.plot(LOFAR_dm[0], LOFAR_dm[1], 'k--')
plt.legend()
plt.show()
def fluxes():
#Plot DM vs Fluxes
JB_flux = np.load(dm_folder+'JB_FLUX.npy')
inter = np.load(dm_folder+'dm_INTER.npy')
core = np.load(dm_folder+'CORE_DM.npy')
date,I,L,V,PA = cal_obs.pol_analysis()
flux = np.mean(I,axis=1)
LOFAR_flux = np.vstack((date,flux,flux/2.))
f, axarr = plt.subplots(3, sharex=True)
#axarr[0].errorbar(core[0],core[1],yerr=core[2],fmt='ko--')
axarr[0].errorbar(core[0],core[1],yerr=core[2],fmt='ko',label='Core')
for label in np.unique(inter[3]):
idx = np.where(inter[3]==label)[0]
axarr[0].errorbar(inter[0,idx], inter[1,idx], yerr=inter[2,idx], fmt='o', label=label)
LOFAR_dm = np.hstack((core,inter[:3]))
idx = np.argsort(LOFAR_dm[0])
LOFAR_dm = LOFAR_dm[:,idx]
axarr[0].plot(LOFAR_dm[0], LOFAR_dm[1], 'k--')
axarr[0].set_ylabel("LOFAR DM (pc/cm3)")
axarr[0].legend()
axarr[1].errorbar(LOFAR_flux[0],LOFAR_flux[1],yerr=LOFAR_flux[2],fmt='ko--')
axarr[1].set_ylabel("LOFAR MEAN FLUX (mJ)")
axarr[2].errorbar(JB_flux[0],JB_flux[1],yerr=JB_flux[2],fmt='ko--')
axarr[2].set_ylabel("JODRELL BANK MEAN FLUX (mJ)")
axarr[2].set_xlabel("Date")
plt.show()
def DM_crab():
crab = np.load(dm_folder+'DM_crab.npy')
x = np.array([jdutil.mjd2date(n) for n in crab[0]])
y = crab[1]
dm_var = y.min() - all_dm[1].max()
plt.plot(x,y-dm_var,'ko--', label = 'Crab')
plt.legend()
plt.show()
|
import cv2
import numpy as np
im = cv2.imread(r'D:\Users\yl_gong\Desktop\abc.jpg')
im = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
thresh,im = cv2.threshold(im, 100, 255, cv2.THRESH_BINARY)
im2, contours, hierarchy = cv2.findContours(im, cv2.RETR_TREE , cv2.CHAIN_APPROX_SIMPLE)
for contour in contours:
if cv2.contourArea(contour) < 40:
cv2.fillPoly(im,[contour],(255, 255, 255))
# cv2.imshow('image',im)
# cv2.waitKey()
cv2.imwrite('01.jpg',im) |
from saltjob.salt_http_api import SaltAPI
from devops.settings import SALT_REST_URL
from saltjob.get_api_token import get_token
def transfer_script(tgt,script_dir,script_name):
data = {
"tgt": tgt,
"fun": "cp.get_file",
"arg": [
"salt://scripts/{}".format(script_name),
"{}/{}".format(script_dir,script_name)
],
"expr_form": "list"
}
token = get_token()
salt_api_object = SaltAPI(data,SALT_REST_URL,token)
res = salt_api_object.cmdrun()["return"][0]
return res
def execute_script(tgt,script_name,args=[]):
data = {
"tgt": tgt,
"fun": "cmd.script",
"arg": [
"salt://scripts/{}".format(script_name),
" ".join(args)
]
}
token = get_token()
salt_api_object = SaltAPI(data,SALT_REST_URL,token)
res = salt_api_object.cmdrun()
return res
def get_minions():
data = {
"tgt": "*",
"fun": "key.list_all",
}
token = get_token()
salt_api_object = SaltAPI(data,SALT_REST_URL,token)
res = salt_api_object.wheelrun()["return"][0]["data"]["return"]["minions"]
return res
|
from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
from circle_finder import circle_finder
CMB_DIST = 14000
CELL_SIZE = 320
ang_rad = np.arange((1/360)*2*np.pi, np.pi/2, (2*np.pi)/(360))
data = np.genfromtxt('/opt/local/l4astro/rbbg94/data/ngp_corr.csv', dtype = complex, delimiter = ',')
peaks_ind= np.argwhere(np.logical_and(ang_rad>=((40/360)*2*np.pi), ang_rad<=((65/360)*2*np.pi)))
peaks_rad_pred = circle_finder(CELL_SIZE, peaks_ind, 0)
"""ang_rad = np.arange((1/360)*2*np.pi, np.pi/2, (2*np.pi)/(360*3))
peaks_ind= np.argwhere(np.logical_and(ang_rad>=((40/360)*2*np.pi), ang_rad<=((65/360)*2*np.pi)))"""
peaks_rad = ang_rad[peaks_ind]
peaks_data = data[peaks_ind]
#std_dev = np.genfromtxt('/opt/local/l4astro/rbbg94/data/ngp_lag_err.csv', delimiter = ',')
#peaks_err = std_dev[peaks_ind]
peaks_err = 0
peaks_rad = peaks_rad*(360/(2*np.pi))
peaks_rad_pred = peaks_rad_pred*(360/(2*np.pi))
fig, ax = plt.subplots(figsize=(18, 10))
ax.errorbar(peaks_rad, peaks_data, yerr = peaks_err, ecolor = 'red')
ax.set_xlabel(r'$\alpha/^\circ$')
ax.set_ylabel('$S$')
ax.annotate('Phase=0$^\circ$', xy = (0.02,0.95), xycoords = 'axes fraction', bbox=dict(facecolor='none', edgecolor='black'))
ax.axhline(0, color = 'black')
#plt.xticks(np.arange(0, 91, 10))
plt.xlim(40,65)
#for i in range(len(peaks_rad_pred)):
# ax.axvline(peaks_rad_pred[i], color = 'red', ls = '--')
plt.tight_layout()
fig.savefig('/opt/local/l4astro/rbbg94/figures/corr_0_ngp_no_err_peaks.png', overwrite = True)
plt.show()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('players', '0004_auto_20160602_1959'),
]
operations = [
migrations.CreateModel(
name='PlayerTypeDetails',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('player_id', models.IntegerField(null=True)),
('region', models.CharField(max_length=255)),
('skill_index', models.IntegerField()),
('main_skill_type', models.CharField(max_length=255)),
('sub_skill_type', models.CharField(max_length=255)),
],
options={
},
bases=(models.Model,),
),
]
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
def _add_tpu_models_to_path():
dir = os.path.dirname(os.path.realpath(__file__))
tpu_models_dir = os.path.abspath(os.path.join(dir, '..', 'tpu', 'models'))
if tpu_models_dir not in sys.path:
sys.path.insert(0, tpu_models_dir)
_add_tpu_models_to_path()
|
from PIL import Image, ImageEnhance, ImageFilter
import cv2
import numpy as np
import pytesseract
#reading and converting imgae to gray
gray= cv2.imread('noisyNumbers.png',cv2.IMREAD_GRAYSCALE)
gray = cv2.threshold(gray, 150,150,
cv2.THRESH_BINARY | cv2.THRESH_OTSU)[1]
cv2.imshow('Final', gray)
cv2.imwrite('NosiyNoThreshold.jpg', gray)
#applying canny edge detection
'''
JobEdges= cv2.Canny(im,100,40)
cv2.imshow("Edge Detection", JobEdges)
'''
#save image in jpg format
#cv2.imwrite('cardCanny.jpg', JobEdges)
#printing text using tesseract
print(pytesseract.image_to_string(Image.open('NosiyNoThreshold.jpg')))
#print(pytesseract.image_to_string(Image.open('cardCanny.jpg')))
|
# coding: utf-8
# In[1]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
#get_ipython().run_line_magic('matplotlib', 'inline')
#import mpld3
#mpld3.enable_notebook()
# In[ ]:
data=pd.read_csv("dataset_1.csv") #reading data
# data.head()
# In[3]:
# x=data['X']
# y=data['Y']
# plt.scatter(x,y,c=data["target"])
# In[4]:
data_groupby=data.groupby("target") ##grouping data according to the target value
data_groupby.first()
# print(data_groupby)
# In[5]:
data0=data_groupby.get_group(0) # storing two classes in two different dataframes
data1=data_groupby.get_group(1)
# In[6]:
# data0.head()
# In[7]:
plt.scatter(data0['X'],data0["Y"],color="r") ##plotting scatter graph using matplotlib
plt.scatter(data1['X'],data1["Y"],color="b")
# In[8]:
mean0=np.mean(data0) ##finding means of both classes
mean1=np.mean(data1)
# print(mean0.shape)
overallmean=np.mean(data.drop(['target'],axis=1)) ##finding overall mean
# In[9]:
# mean0=mean0.as_matrix()
# mean1=mean1.as_matrix()
# overallmean=overallmean.as_matrix()
#print(mean0)
# In[10]:
mean0=np.resize(mean0,(2,1)) ##converting and storing mean as matrix
mean1=np.resize(mean1,(2,1))
overallmean=np.resize(overallmean,(2,1))
#print(mean0)
# In[11]:
# print(mean0.shape)
# print(mean1.shape)
# print(overallmean.shape)
# In[12]:
n0=data0["X"].count() #counting number of rows in both dataset
n1=data1["X"].count()
# print(n1)
# print(n0)
# In[13]:
s0=np.matmul((mean0-overallmean)*n0,(mean0-overallmean).T) ## finding Sw
# print(s0)
s1=np.matmul((mean1-overallmean)*n1,(mean1-overallmean).T)
# print(s1)
# In[14]:
sb=s0+s1
sb=np.array(sb) ##finding Sb
# print(sb.shape)
# print(sb)
# In[15]:
sw0=np.zeros(sb.shape)
for i in data0.itertuples():
mat=np.zeros((2,1))
mat[0][0]=i.X
mat[1][0]=i.Y
sw0=np.add(np.matmul((mat-mean0),(mat-mean0).T),sw0)
# In[16]:
# print(sw0)
# In[17]:
sw1=np.zeros(sb.shape)
for i in data1.itertuples():
mat=np.zeros((2,1))
mat[0][0]=i.X
mat[1][0]=i.Y
sw1=np.add(np.matmul((mat-mean1),(mat-mean1).T),sw1)
# In[18]:
# print(sw1)
# In[19]:
sw=sw0+sw1
# print(sw)
# In[20]:
swinv=np.linalg.inv(sw) ##finding inverse of Sw
# print(swinv)
# In[21]:
direction=np.matmul(swinv,(mean1-mean0)) ## finding direction of linear separable line
# print(direction)
# print(direction.shape)
# In[22]:
proj_zero=[]
# direc=direction[0][0]**2+direction[1][0]**2
# print(direc)
# In[23]:
for i in data0.itertuples():
mat=np.zeros((2,1))
mat[0][0]=i.X
mat[1][0]=i.Y
val=mat[0][0]*direction[0][0]+mat[1][0]*direction[1][0]
# val=val/direc
proj_zero.append(val)
# c=np.dot(direction,direction)
# In[24]:
# print(proj_zero)
# In[25]:
proj_one=[] ##finding projection of class zero element on line
for i in data1.itertuples():
mat=np.zeros((2,1))
mat[0][0]=i.X
mat[1][0]=i.Y
val=mat[0][0]*direction[0][0]+mat[1][0]*direction[1][0]
# val=val/direc
proj_one.append(val)
# c=np.dot(direction,direction)
# In[26]:
# print(proj_one)
# In[27]:
# x=data['X']
# y=data['Y']
# plt.scatter(x,y,c=data["target"])
for i in proj_zero:
plt.scatter(i,0,color="r")
for i in proj_one:
plt.scatter(i,0,color="b")
# In[28]:
val=mean0[0][0]*direction[0][0]+mean0[1][0]*direction[1][0]
mean0_proj=val
# print(mean0_proj)
# In[29]:
val=mean1[0][0]*direction[0][0]+mean1[1][0]*direction[1][0]
mean1_proj=val
mean=(mean1_proj+mean0_proj)/2
# print(mean)
# In[30]:
# for i in proj_zero:
# plt.scatter(i,0,color="r")
# for i in proj_one:
# plt.scatter(i,0,color="b")
# plt.scatter(mean,0,color="black",marker='*')
# print(mean)
# In[31]:
proj_one=np.array(proj_one);
proj_zero=np.array(proj_zero);
# print(proj_zero)
var0=np.var(proj_zero)
# print(var0)
# In[32]:
var1=np.var(proj_one)
# print(var1)
# In[33]:
meannew0=np.mean(proj_zero)
meannew1=np.mean(proj_one)
# print(meannew0)
# In[34]:
proj_one.sort()
# In[35]:
from scipy.stats import norm
# In[36]:
# proj_one
# In[37]:
# meannew1
# In[38]:
sigma_one = var1**0.5
# In[39]:
sigma_zero = var0**0.5
# In[40]:
# norm.pdf(proj_one, meannew1, sigma_one)
# In[41]:
X_axis_one = np.linspace(meannew1-4*sigma_one, meannew1+4*sigma_one,1000)
# In[42]:
X_axis_zer = np.linspace(meannew0-4*sigma_zero, meannew0+4*sigma_zero,1000)
# In[43]:
plt.plot(X_axis_one, norm.pdf(X_axis_one, meannew1, sigma_one)) ##plotting the normal curve
plt.plot(X_axis_zer, norm.pdf(X_axis_zer, meannew0, sigma_zero))
# In[44]:
# proj_zero.sort()
# In[45]:
# plt.figure(figsize=(10,6))
# plt.plot(proj_one, )
# plt.ylabel('gaussian distribution')
plt.show()
|
class JsonException(Exception):
GROUP = None
ID = None
def __dict__(self):
return {
'error': True,
'name': self.__class__.__name__,
'message': self.message,
'group': self.__class__.GROUP,
'id': self.__class__.ID
}
class UserFriendlyException(JsonException):
GROUP = 'user'
def __init__(
self,
message='Non critical error occured.'
):
self.message = message
class MissingValueException(UserFriendlyException):
def __init__(
self,
message="Required value '{0}' is missed!", value=''
):
self.message = message.format(value)
class NotFoundException(UserFriendlyException):
def __init__(
self,
message='Requested {0} was not found!',
value='item'
):
self.message = message.format(value)
class InternalServerException(JsonException):
GROUP = 'internal'
def __init__(
self,
message="Internal server error detected. We are sorry for this."
):
self.message = message
|
"""
The MIT License (MIT)
@author: Stephen J. Maher
"""
import sys
import os.path
import instancegen as ig
if __name__ == "__main__":
# printing the help message
if (len(sys.argv) == 2 and sys.argv[1] == "--help")\
or len(sys.argv) < 4 or len(sys.argv) == 1:
print("Usage: %s instance-class instance-name numscenarios [type]"%sys.argv[0])
print(" instance-class : the instance class. Available classes (%s)"%", ".join(map(str, ig.instances.keys())))
print(" instance-name : the name of the instance (without extension)")
print(" numscenarios : the number of scenarios to generate")
print(" type : the type of stochasticity. Available types (%s) (default %s)"\
%(", ".join(map(str, ig.STOCH_TYPES)), str(ig.STOCH_TYPES[0])))
exit(1)
print("Arguments:", sys.argv)
instanceclass = sys.argv[1]
instancename = sys.argv[2]
extensions = ["cor", "tim"]
numscenarios = sys.argv[3]
stochtype = ig.STOCH_RHS
if len(sys.argv) == 5:
stochtype = sys.argv[4]
# verifying the inputs for the script
if not ig.validInputs(instanceclass, instancename, extensions, numscenarios,
stochtype):
exit(1)
# initialising the instance
instance = ig.instances[instanceclass](
"%s.cor"%(instancename),"%s.tim"%(instancename),
"%s_%s.sto"%(instancename, numscenarios))
# reading the instance core and time-stages files
instance.readInstance(readCor = True, readTim = True)
# writing the stochastic file
instance.writeStoFile(int(numscenarios), stochtype)
# writing the SMPS file (used by SCIP).
instance.writeSmpsFile()
|
#! /usr/bin/env python
########################################################################
# #
# Resums the non-global logarithms, needs ngl_resum.py #
# #
# If using ngl_resum, please cite #
# doi:10.1007/JHEP09(2020)029 #
# https://inspirehep.net/literature/1798660 #
# #
########################################################################
__author__ = 'Marcel Balsiger'
__email__ = 'marcel.balsiger@hotmail.com'
__date__ = 'October 19, 2020'
import time
import numpy as np
import argparse
import pylhe
import ngl_resum as ngl
parser = argparse.ArgumentParser(description='This code shows how to '\
'use ngl_resum in combination with LHE-files, considering '\
'top-pair production. First, each event gets tested whether '\
'it fulfills the conditions of Table 1 from the ATLAS paper '\
'arXiv:1203.5015 [hep-ex] and then showers the dipoles with '\
'the outside region defined by the symmetric rapidity gap '\
'from -y to y with areas around the bottom quarks cut away. '\
'Similar code was used to resum the non-global logarithms in '\
'Section 5 of arXiv:2006.00014')
parser.add_argument('-f','--file', help='lhe event file to shower',\
default=None,required=True)
parser.add_argument('-y','--ymax', help='ymax of outside region', \
default=0.8, type=float)
parser.add_argument('-n','--nsh', help='number of showers per dipole', \
default=100, type=int)
parser.add_argument('-t','--tmax', help='maximal shower time tmax', \
default=0.1, type=float)
parser.add_argument('-m','--nbins', help='number of bins in hists', \
default=100, type=int)
parser.add_argument('-c','--cutoff', help='cutoff of shower', \
default=5, type=float)
parser.add_argument('-s','--seed', help='random seed', \
default=None, type=int)
parser.add_argument('-b','--break', help='stop after so many events', \
default=100000, type=int)
args = vars(parser.parse_args())
eventFile=args['file']
if not(args['seed'] is None) : np.random.seed(args['seed'])
showerCutoff=float(args['cutoff'])
nbins=int(args['nbins'])
tmax=float(args['tmax'])
nsh=int(args['nsh'])
def _outside(self,v):
jetaxis1=self.event.outgoingBottom[0]/self.event.outgoingBottom[0].e
jetaxis2=self.event.outgoingBottom[1]/self.event.outgoingBottom[1].e
jetRadius=0.4
rapRangeMax=float(args['ymax'])
rapRangeMin=0.0
return (v.R2(jetaxis1)>jetRadius**2) and \
(v.R2(jetaxis2)>jetRadius**2) and \
(abs(v.rap)<rapRangeMax) and (abs(v.rap)>=rapRangeMin)
def validEvent(ev): # ev is the ngl.Event we want to test
# check whether we have the necessary particles
if ev.intermediateTop == None : return False
if ev.outgoingBottom == None : return False
if (ev.outgoingElectron == None) and (ev.outgoingMuon == None): \
return False
if len(ev.intermediateTop) != 2 : return False
if len(ev.outgoingBottom) != 2 : return False
momentaLeptonsOut=[]
momentaNeutrinoOut=[]
electronmuonevent=True
if not ev.outgoingElectron==None:
for i in ev.outgoingElectron:
momentaLeptonsOut.append(i)
# checks on electron(s)
if i.eT< 25: return False
if abs(i.rap)>2.47: return False
for i in ev.outgoingENeutrino:
momentaNeutrinoOut.append(i)
else:
electronmuonevent=False
if not ev.outgoingMuon==None:
for i in ev.outgoingMuon:
momentaLeptonsOut.append(i)
# checks on muon(s)
if i.pT< 20: return False
if abs(i.rap)>2.5: return False
for i in ev.outgoingMNeutrino:
momentaNeutrinoOut.append(i)
else:
electronmuonevent=False
# check number of leptons ans neutrinos
if len(momentaLeptonsOut) != 2 : return False
if len(momentaNeutrinoOut) != 2 : return False
dileptonmass=np.sqrt((momentaLeptonsOut[0]+momentaLeptonsOut[1])*\
(momentaLeptonsOut[0]+momentaLeptonsOut[1]))
missingMomentum=(momentaNeutrinoOut[0]+momentaNeutrinoOut[1])
if not electronmuonevent:
# checks on "missing momenta" (neutrinos) and dilepton mass
if missingMomentum.eT<40 : return False
if (dileptonmass<15 or abs(dileptonmass-91)<10) : return False
else:
# check on visible transverse momentum
if (momentaLeptonsOut[0].pT+momentaLeptonsOut[1].pT+\
ev.outgoingBottom[0].pT+ev.outgoingBottom[1].pT)<130:
return False
# checks on bottom quarks
for i in ev.outgoingBottom:
if i.pT<25: return False
if abs(i.rap)>2.4: return False
for j in momentaLeptonsOut:
if i.R2(j)<0.4**2: return False
return True # only gets reached, if no check failed.
evtFile = pylhe.readLHE(eventFile)
fullResultLL=ngl.Hist(nbins,tmax,errorHistCalc=True)
fullNGL1Loop=0.
fullNGL1LoopSq=0.
fullNGL2Loop=0.
fullNGL2LoopSq=0.
eventWeight=0.
numberEvents=0
numberValidEvents=0
timeStart = time.time()
for event in evtFile:
numberEvents+=1
ev=ngl.Event(eventFromFile=event,productionDipoles='intermediate',\
decayDipoles=False)
if not eventWeight > 0:
eventWeight=ev.weight
if not eventWeight==ev.weight:
print("Warning: events not of equal weight!")
if validEvent(ev):
numberValidEvents+=1
outsideRegion=ngl.OutsideRegion(ev)
outsideRegion.outside = _outside.__get__(outsideRegion,\
ngl.OutsideRegion)
shower=ngl.Shower(ev,outsideRegion,nsh,nbins,tmax,showerCutoff)
shower.shower()
fullResultLL+=shower.resLL
fullNGL1Loop+=shower.ngl1Loop
fullNGL1LoopSq+=shower.ngl1LoopSq
fullNGL2Loop+=shower.ngl2Loop
fullNGL2LoopSq+=shower.ngl2LoopSq
if numberEvents >= int(args['break']):break
print('runtime=', time.time()-timeStart,' sec')
print("of ", numberEvents," events, ", numberValidEvents," were valid.")
print("Weight of each event:", eventWeight)
print('\n\n' )
print('*************************************')
print('* t LL(t) dS(t) * ')
print('*************************************\n')
print('*** Binned Result ***\n\n')
for i in range(0,fullResultLL.nbins):
print( round(fullResultLL.centerBinValue[i],4),' ', \
fullResultLL.entries[i]/numberValidEvents,' ', \
np.sqrt(fullResultLL.squaredError[i])/numberValidEvents)
print('\n' )
snlo=fullNGL1Loop/numberValidEvents
snloError=np.sqrt((fullNGL1LoopSq/numberValidEvents-\
(fullNGL1Loop/numberValidEvents)**2)\
/(nsh*numberValidEvents))
print('snlo=',snlo)
print('snloError=',snloError)
print('\n')
snnlo=fullNGL2Loop/numberValidEvents+0.5*snlo**2
#Error(snnlo)=|d(snnlo)/d(fullNGL2Loop)*Error(fullNGL2Loop)|
# + |d(snnlo)/d(snlo)*Error(snlo)|
snnloError=abs(np.sqrt((fullNGL2LoopSq/numberValidEvents-\
(fullNGL2Loop/numberValidEvents)**2)/\
(nsh*numberValidEvents)))\
+abs(snlo*snloError)
print('snnlo=',snnlo)
print('snnloError=',snnloError)
print('\n')
|
# -*- coding: utf-8 -*-
import scrapy
from abcrawler import models
from abcrawler.items import QuoteItem
import datetime
class QuotesSpider(scrapy.Spider):
name = "quotes"
url = input("please enter a valid url : ")
# url = 'http://quotes.toscrape.com/page/1/'
start_urls = [
url,
]
def parse(self, response):
quotes = response.css('div.quote')
for quote in quotes:
text = quote.css('span.text::text').extract_first()
author = quote.css('small.author::text').extract_first()
tags = quote.css('div.tags a.tag::text').extract()
# “ ”
mod_text = text[1:-1]
quote_data = QuoteItem()
quote_data['quote'] = ''.join(mod_text)
quote_data['author'] =''.join(author)
quote_data['tag'] =''.join(tags)
quote_data['created_date'] = datetime.datetime.now()
yield quote_data
next_page = response.css('li.next a::attr(href)').extract_first()
if next_page is not None:
next_page = response.urljoin(next_page)
yield scrapy.Request(next_page, callback=self.parse)
|
from toontools import Toon
import sys
import argparse
import logging
def setlogging(args):
if args.DEBUG:
print("Set Logging to DEBUG")
logging.basicConfig(level=logging.DEBUG, format='[%(levelname)s] %(message)s',)
else:
logging.basicConfig(level=logging.INFO, format='%(message)s',)
def main(args):
setlogging(args)
t = Toon.load_from_config('conf/toon.json')
logging.info(t.get_devices())
dev = t.get_device('o-001-101133:happ_smartplug_6634E9BC69E')
# Turn on device
t.set_device(dev['uuid'], state='1')
# Or turn off
t.set_device(dev['uuid'], state='0')
t.get_devices()
return()
if __name__ == '__main__':
prog = 'python ' + sys.argv[0]
parser = argparse.ArgumentParser(description='This App Creates an Application Network Profile with all its EPS, Contracts etc')
parser = argparse.ArgumentParser(prog, usage='%(prog)s [options]')
parser.add_argument("--getusage", required=False, default='both', help='''gas/ electricity/ both''')
parser.add_argument("--DEBUG", action='store_true')
args = parser.parse_args()
main(args)
|
from django.urls import path, include
# https://docs.djangoproject.com/en/dev/topics/auth/default/#module-django.contrib.auth.views
app_name = "account"
urlpatterns = [
path("", include("django.contrib.auth.urls")),
]
# This is a list of the included urls
# accounts/login/ [name='login']
# accounts/logout/ [name='logout']
# accounts/password_change/ [name='password_change']
# accounts/password_change/done/ [name='password_change_done']
# accounts/password_reset/ [name='password_reset']
# accounts/password_reset/done/ [name='password_reset_done']
# accounts/reset/<uidb64>/<token>/ [name='password_reset_confirm']
# accounts/reset/done/ [name='password_reset_complete']
|
#!/usr/bin/python
from setuptools import setup
from distutils.extension import Extension
from Pyrex.Distutils import build_ext
setup(
name="PyMoira",
version="4.3.0",
description="PyMoira - Python bindings for the Athena Moira library",
author="Evan Broder",
author_email="broder@mit.edu",
license="MIT",
py_modules=['moira'],
ext_modules=[
Extension("_moira",
["_moira.pyx"],
libraries=["moira", "krb5"]),
Extension("mrclient",
["mrclient.pyx"],
libraries=["mrclient", "moira"]),
],
scripts=['qy'],
cmdclass= {"build_ext": build_ext}
)
|
"""
decodeした画像を確認
"""
from char_img_autoencoder import CharImgAutoencoder
from img_loader import ImgLoader
import sys
sys.path.append("../")
from img_char.img_char_opt import ImgCharOpt
from matplotlib import pylab as plt
from PIL import Image
import numpy as np
# グラフに日本語を表示するために必要
import matplotlib
font = {'family': 'AppleGothic'}
matplotlib.rc('font', **font)
feat_shape = 8
feat_pict_size = 4
font_size = 32
def get_concat_h(im1, im2):
dst = Image.new('RGB', (im1.width + im2.width, im1.height))
dst.paste(im1, (0, 0))
dst.paste(im2, (im1.width, 0))
return dst
def get_concat_v(im1, im2):
dst = Image.new('RGB', (im1.width, im1.height + im2.height))
dst.paste(im1, (0, 0))
dst.paste(im2, (0, im1.height))
return dst
def toPILarray(np_arr):
np_arr = np_arr * 255
pilImg = Image.fromarray(np.uint8(np_arr))
return pilImg
def main():
predict_num = 10
train_x, _ = ImgLoader.make_train_data_random(
predict_num, "../font_img/image/hiragino/")
img_char_opt = ImgCharOpt(
"../font_img/image/hiragino/", "../img_char/image_save_dict/")
# train_yomi_list = []
# for t in train_x:
# t = t.reshape(font_size, font_size)
# train_yomi_list.append(img_char_opt.image2char(t))
# print(train_yomi_list)
char_img = CharImgAutoencoder(
"./weight/char_feature.hdf5", init_model=False)
predicted = char_img.autoencoder.predict(train_x)
compare_img_list = []
for train, predict in zip(train_x, predicted):
train = train.reshape(font_size, font_size)
predict = predict.reshape(font_size, font_size)
train = toPILarray(train)
predict = toPILarray(predict)
compare_img_list.append(get_concat_h(train, predict))
concat = compare_img_list[0]
for i in range(1, len(compare_img_list)):
concat = get_concat_v(compare_img_list[i], concat)
concat.save("./debug_data/test.png")
if __name__ == "__main__":
main()
|
import app.model as model
class Task(object):
def initialize(self):
model.init_connection()
|
"""Represents a refused transaction message."""
from marshmallow import EXCLUDE, fields
from .....messaging.agent_message import AgentMessage, AgentMessageSchema
from .....messaging.valid import UUID4_EXAMPLE
from ..message_types import PROTOCOL_PACKAGE, REFUSED_TRANSACTION_RESPONSE
HANDLER_CLASS = (
f"{PROTOCOL_PACKAGE}.handlers"
".refused_transaction_response_handler.RefusedTransactionResponseHandler"
)
class RefusedTransactionResponse(AgentMessage):
"""Class representing a refused transaction response message."""
class Meta:
"""Metadata for a refused transaction response message."""
handler_class = HANDLER_CLASS
message_type = REFUSED_TRANSACTION_RESPONSE
schema_class = "RefusedTransactionResponseSchema"
def __init__(
self,
*,
transaction_id: str = None,
thread_id: str = None,
signature_response: dict = None,
state: str = None,
endorser_did: str = None,
**kwargs,
):
"""Initialize a refused transaction response object.
Args:
transaction_id: The id of the transaction record
thread_id: The thread id of the transaction record
signature_response: The response created to refuse the transaction
state: The state of the transaction record
endorser_did: The public did of the endorser who refuses the transaction
"""
super().__init__(**kwargs)
self.transaction_id = transaction_id
self.thread_id = thread_id
self.signature_response = signature_response
self.state = state
self.endorser_did = endorser_did
class RefusedTransactionResponseSchema(AgentMessageSchema):
"""Refused transaction response schema class."""
class Meta:
"""Refused transaction response schema metadata."""
model_class = RefusedTransactionResponse
unknown = EXCLUDE
transaction_id = fields.Str(
required=False,
metadata={
"description": (
"The transaction id of the agent who this response is sent to"
),
"example": UUID4_EXAMPLE,
},
)
thread_id = fields.Str(
required=False,
metadata={
"description": (
"The transaction id of the agent who this response is sent from"
),
"example": UUID4_EXAMPLE,
},
)
signature_response = fields.Dict(
required=False,
metadata={
"example": {
"message_id": "143c458d-1b1c-40c7-ab85-4d16808ddf0a",
"context": "did:sov",
"method": "add-signature",
"signer_goal_code": "transaction.refuse",
}
},
)
state = fields.Str(
required=False,
metadata={
"description": "The State of the transaction Record",
"example": "refused",
},
)
endorser_did = fields.Str(
required=False,
metadata={
"description": "The public did of the endorser",
"example": "V4SGRU86Z58d6TV7PBUe6f",
},
)
|
from distutils.core import setup
from Cython.Build import cythonize
setup(
ext_modules = cythonize(["kambing.py", "dawet.py"])
) |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author : mofei
# @Time : 2018/10/12 14:59
# @File : t22FindFirstCommonNode.py
# @Software: PyCharm
# 两个链表的第一个公共结点
# https://www.nowcoder.com/practice/6ab1d9a29e88450685099d45c9e31e46?tpId=13&tqId=11189&tPage=2&rp=2&ru=/ta/coding-interviews&qru=/ta/coding-interviews/question-ranking
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def length(self, head):
t = head
count = 0
while t is not None:
t = t.next
count += 1
return count
def FindFirstCommonNode(self, pHead1, pHead2):
t1 = pHead1
t2 = pHead2
l1 = self.length(pHead1)
l2 = self.length(pHead2)
if l1 < l2:
t1, t2 = t2, t1
for i in range(abs(l2 - l1)):
t1 = t1.next
result = None
while t1 is not None and t2 is not None:
if t1 == t2:
result = t1
break
t1 = t1.next
t2 = t2.next
return result
node = ListNode(1)
node2 = ListNode(2)
node3 = ListNode(3)
node4 = ListNode(4)
node.next = node2
node2.next = node3
node4.next = node2
print(Solution().FindFirstCommonNode(node, node4).val)
|
import json
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--otgy', type=str)
# MultiWOZ hotel-book stay,hotel-book people,hotel-stars,train-book people
# SMD distance,temperature
parser.add_argument('--remove_fields',
type=str,
default='hotel-book stay,hotel-book people,hotel-stars,train-book people,distance,temperature')
args = parser.parse_args()
with open(args.otgy, 'rt') as fd:
data = json.load(fd)
for field in args.remove_fields.split(','):
if field in data:
del data[field]
with open(args.otgy, 'wt') as fd:
json.dump(data, fd)
|
from graphene_django import DjangoObjectType
import graphene
from .models import Projectil
class ProjectilType(DjangoObjectType):
class Meta:
model = Projectil
class CreateProjectil(graphene.Mutation):
class Arguments:
name=graphene.String()
sprite=graphene.String()
speed=graphene.Decimal()
hitboxSize=graphene.Decimal()
damage=graphene.Int()
range=graphene.Decimal()
ok = graphene.Boolean()
projectil = graphene.Field(lambda: Projectil)
def mutate(root, info, name, sprite, speed, hitboxSize, damage, range):
projectil = Projectil.objects.create(
name=name,
sprite=sprite,
speed=speed,
hitboxSize=hitboxSize,
damage=damage,
range=range)
ok = True
return CreateProjectil(person=projectil, ok=ok)
class UpdateProjectil(graphene.Mutation):
class Arguments:
name=graphene.String()
sprite=graphene.String()
speed=graphene.Decimal()
hitboxSize=graphene.Decimal()
damage=graphene.Int()
range=graphene.Decimal()
ok = graphene.Boolean()
projectil = graphene.Field(lambda: Projectil)
def mutate(root, info, **kwargs):
projectil = Projectil.objects.get(pk=kwargs['id'])
for k, v in kwargs.items():
projectil.k = v
projectil.save()
ok = True
return UpdateProjectil(person=projectil, ok=ok)
class DeleteProjectil(graphene.Mutation):
class Arguments:
id=graphene.ID()
ok = graphene.Boolean()
def mutate(root, info, id):
projectil = Projectil.objects.get(pk=id)
projectil.delete()
ok = True
return DeleteProjectil(person=projectil, ok=ok)
class Mutations(graphene.ObjectType):
create_projectil = CreateProjectil.Field()
update_projectil = UpdateProjectil.Field()
delete_projectil = DeleteProjectil.Field()
class Query(graphene.ObjectType):
projectil = graphene.Field(ProjectilType,
id=graphene.Int(),
sprite=graphene.String(),
speed=graphene.Decimal(),
hitboxSize=graphene.Decimal(),
damage=graphene.Int(),
range=graphene.Decimal())
projectils = graphene.List(ProjectilType)
def resolve_projectil(self, context, id=None, name=None):
if id is not None:
return Projectil.objects.get(pk=id)
if name is not None:
return Projectil.objects.get(name=name)
return None
def resolve_projectils(self, context):
return Projectil.objects.all() |
from backend.core.model.dictionary.hash.HashDictionary import HashDictionary
from backend.core.model.preprocess.TokenizingPorter2Stemmer import TokenizingPorter2Stemmer
from backend.core.model.semantics.ISemanticsStrategy import ISemanticsStrategy
from backend.core.util.util import *
from backend.shared.NodeCommunicator import NodeCommunicator
ln = getModuleLogger(__name__)
from backend.core.util import config
from gensim import models
import itertools
from twisted.internet import reactor
from twisted.internet.threads import deferToThread
from twisted.internet.task import LoopingCall
from Queue import Queue
CORE_IP = "localhost"
REGISTER_PORT = config.strategyregisterport
LISTEN_PORT = config.lsa_strategy_port
# SETTINGS
NUM_TOPICS = 200
CHUNK_SIZE = 10000
DECAY = 1.0
DISTRIBUTED = False
ONE_PASS = True
DICTIONARY = HashDictionary
def silenceGensim():
import logging
lsi_log = logging.getLogger("gensim.models.lsimodel")
mat_log = logging.getLogger("gensim.matutils")
lsi_log.setLevel(logging.INFO)
mat_log.setLevel(logging.INFO)
class LSAStrategy(ISemanticsStrategy):
NAME = "LSA"
def __init__(self):
"""
Initialize the model. This doesn't add any documents yet.
"""
silenceGensim()
self.dictionaries = dict()
self.preprocessor = TokenizingPorter2Stemmer()
#this dict keeps a model for every source type
# (since e.g. RSS feeds should be treated separately from twitter feeds)
self.models = dict()
#this dict keeps a dictionary for every source type
self.dictionaries = dict()
self.queue = Queue()
self.modelQueue = Queue()
self.nodeCommunicator = NodeCommunicator(self, LISTEN_PORT)
self.nodeCommunicator.registerWithNode(CORE_IP, REGISTER_PORT) # register this node with the core
ln.info("LSA Initialized")
self.updating = False
loop = LoopingCall(self.update)
loop.start(5)
reactor.run()
def createDictionary(self, sourceType):
ln.info("creating a new dictionary for sourceType %s.", sourceType)
dictionary = DICTIONARY()
self.dictionaries[sourceType] = dictionary
return dictionary
def createModel(self, sourceType, dictionary):
ln.info("creating a new LSA model for sourceType %s.", sourceType)
model = models.lsimodel.LsiModel(corpus=None, num_topics=NUM_TOPICS, chunksize=CHUNK_SIZE, id2word=dictionary,
decay=DECAY, distributed=DISTRIBUTED, onepass=ONE_PASS)
self.models[sourceType] = model
return model
def queueDocuments(self, returnTo, documents, relabel):
self.queue.put((returnTo, documents, relabel))
def update(self):
deferToThread(self.__update)
def __update(self):
if self.updating:
return
self.updating = True
if not self.queue.empty():
returnTo, docs, relabel = self.queue.get()
self._handleDocuments(returnTo, docs, relabel)
self.updating = False
def _handleDocuments(self, returnTo, docs, relabel=False):
"""
Add documents to the model, and send back their vector representations.
"""
ln.info("LSA tasked with %s documents.", len(docs))
if not self.models:
self.load()
updatedModel = not relabel
documents = []
for docDict in docs:
document = Document("")
document.__dict__ = docDict
documents.append(document)
documentGroups = itertools.groupby(documents, lambda d: d.sourceType)
results = []
for sourceType, iterator in documentGroups:
documents = list(iterator)
#retrieve the dict and LSA model for this source, or create new ones
dictionary = self.dictionaries.get(sourceType, None)
if dictionary is None:
dictionary = self.createDictionary(sourceType)
model = self.models.get(sourceType, None)
if model is None:
model = self.createModel(sourceType, dictionary)
for doc in documents:
self.preprocessor.preprocess(doc, dictionary)
prep = (doc.preprocessed for doc in documents)
if not relabel:
model.add_documents(prep)
else:
try:
assert model.projection.u is not None
except:
ln.debug("LSA was not properly initialized. Reinitializing, adding all documents for model.")
model = self.createModel(sourceType, dictionary)
model.add_documents(prep)
updatedModel = True
# add the document vector space representations
sourceTypeTag = self.NAME # +"_"+document.sourceType
results += [{"_id": document._id, "strategy": sourceTypeTag, "vector": model[document.preprocessed]}
for document in documents]
if updatedModel:
self.save()
self.nodeCommunicator.respond(returnTo, {"vectors": results})
def handleOne(self, text, sourceType="RSSFeed"):
if not self.models:
self.load()
dictionary = self.dictionaries.get(sourceType, None)
if dictionary is None:
dictionary = self.createDictionary(sourceType)
text = self.preprocessor.preprocess(text, dictionary)
res = {"vectors": []}
for modelName in self.models:
res["vectors"].append(
{
"_id": None,
"vector": self.models[modelName][text],
"strategy": "LSA_" + modelName
}
)
return res
def save(self):
ln.debug("saving models")
for sourceType in self.models:
model = self.models[sourceType]
model.save("persist/LSA_model_" + sourceType)
ln.debug("done saving models")
def load(self):
ln.debug("Loading models..")
import os
for filename in os.listdir(os.getcwd()+"/persist"):
if filename == "LSA_model_RSSFeed":
loadfilename = "persist/" + filename
model = models.lsimodel.LsiModel.load(loadfilename)
sourceType = "RSSFeed"
self.models[sourceType] = model
ln.info("loaded model %s for sourceType %s", filename, sourceType)
ln.debug("Done loading models.")
NUM_TRAIN = 500
class CustomLsiModel(models.lsimodel.LsiModel):
# TODO make this work
def __init__(self, corpus, num_topics, chunksize, id2word, decay, distributed, onepass):
super(CustomLsiModel, self).__init__(corpus=corpus, num_topics=num_topics, chunksize=chunksize, id2word=id2word,
decay=decay, distributed=distributed, onepass=onepass)
self._currentChunk = []
self._numTrained = 0
def add_documents(self, documents):
self._currentChunk += documents
chunkSize = len(self._currentChunk)
if self._numTrained < NUM_TRAIN:
ln.debug("Training LSI model in foreground")
super(CustomLsiModel, self).add_documents(self._currentChunk)
self._numTrained += chunkSize
elif chunkSize > CHUNK_SIZE:
ln.debug("Training LSI model in background")
self._updateInBackground()
def _updateInBackground(self):
if self._updating:
return
self._updating = True
deferToThread(super(CustomLsiModel, self).add_documents, self._currentChunk[:])
self._currentChunk = []
self._updating = False
|
from flask import Flask, redirect, session, request, render_template
import random
# create a site that when a user loads it creates a random number between 1-100
# stores the number in a session
# allow the user to guess at the number and tell them when they are too high or too low
app = Flask(__name__)
app.secret_key='secret123'
@app.route('/')
def index():
if 'guess_number' not in session:
session['guess_number'] = None
if 'random_number' not in session:
session['random_number'] = random.randrange(1,100)
return render_template('index.html', guess_number=session['guess_number'], random_number=session['random_number'])
@app.route('/results', methods=['POST'])
def result():
session['guess_number']=int(request.form['guess'])
return redirect('/')
@app.route('/finish')
def finish():
session['guess_number']= None
session['random_number'] = random.randrange(1,100)
return redirect('/')
app.run(debug=True)
|
import sys
import struct
import argparse
import numpy as np
from builtins import range
# Reads an idx file and stores it in a numpy array
def read_idx( filename ):
try:
with open(filename, 'rb') as f:
# First two bytes are ignored, second byte is the data type and last byte is the number of dimensions
zero, data_type, dims = struct.unpack( '>HBB', f.read(4) )
# Read <dims> integers (4 bytes each) to get the size of each dimension
shape = tuple( struct.unpack( '>I', f.read(4) )[0] for d in range(dims) )
# Read the values (a byte each) as string, cast to 8-bit unsigned int and reshape numpy array to the correct dimensions
return np.fromstring( f.read(), dtype = np.uint8 ).reshape(shape)
except Exception as err:
print( 'Error reading IDX file: \'%s\':\n%s' % ( filename, err ) )
raise
# Simple Neural Network definition
class SimpleNN(object):
def __init_layers(self, layers, weight_factor = 1):
self.weights = []
self.biases = []
for i in range( len(layers)-1 ):
self.weights.append(weight_factor * np.random.normal( 0, 1, (layers[i], layers[i+1] )))
self.biases.append(weight_factor * np.random.normal( 0, 1, (1, layers[i+1] )))
def __init__(self, input_size, output_size, mid_layers = [300], weight_factor = 1):
self.__init_layers([input_size] + mid_layers + [output_size], weight_factor)
# Forward computation
def forward(self, ipt):
x = ipt
self.inputs = []
for weight, bias in zip(self.weights, self.biases):
self.inputs.append(x)
# Multiply by weights and add bias
wb = np.matmul(x, weight) + bias
# Logistic activation
x = 1 / (1 + np.exp(-wb))
return x
# Backpropagation of error
def backward(self, output, expected):
self.gradients = []
# Error gradient on the output layer
grad = output * (1 - output) * (output - expected)
# Store the gradients
self.gradients.insert(0, grad)
for i in range(len(self.weights)-1, 0, -1):
# Propagate the error
grad = self.inputs[i] * (1 - self.inputs[i]) * np.reshape( np.sum(grad * self.weights[i], 1), (1, self.weights[i].shape[0]) )
# Store the gradients
self.gradients.insert(0, grad)
# Update weights and biases
def update(self, lr = 0.5):
for weight, bias, grad, inpt in zip(self.weights, self.biases, self.gradients, self.inputs):
weight -= lr * np.matmul(np.transpose(inpt), grad)
bias -= lr * grad
# Command line parameters
parser = argparse.ArgumentParser( description = 'Learn from the MNIST dataset' )
parser.add_argument( '-d', dest = 'directory', metavar = 'MNIST location', default = '.', help = 'Location of MNIST IDX files (default: current directory)' )
parser.add_argument( '-lr', dest = 'lr', metavar = 'Learning Rate', type = float, default = 0.2, help = 'Learning Rate during training (default: 0.2)' )
parser.add_argument( '-n', dest = 'num_models', metavar = 'Num. Models', type = int, default = 15, help = 'Number of different models to learn in Bagging (default: 15)' )
parser.add_argument( '-e', dest = 'epoch', metavar = 'Epochs', type = int, default = 1000, help = 'Number of epochs to learn for (default: 1000)' )
parser.add_argument( '--nn', dest = 'use_nn', action = 'store_true', help = 'Use Simple 3 layer Neural Network' )
parser.add_argument( '--bag', dest = 'use_bagging', action = 'store_true', help = 'Use bagging of weaker networks' )
args = parser.parse_args()
mnist_dir = args.directory
learning_rate = args.lr
epochs = args.epoch
epochs_bagging = epochs / 10
n_models = args.num_models
use_nn = args.use_nn
use_bagging = args.use_bagging
# Read the datasets
try:
train_data = read_idx( '%s/train-images.idx3-ubyte' % mnist_dir )
train_labels = read_idx( '%s/train-labels.idx1-ubyte' % mnist_dir )
test_data = read_idx( '%s/t10k-images.idx3-ubyte' % mnist_dir )
test_labels = read_idx( '%s/t10k-labels.idx1-ubyte' % mnist_dir )
except Exception as err:
sys.exit()
# Standardize the data for faster convergence during training
all_data = np.vstack( (train_data, test_data) )
all_data = ( all_data - all_data.mean() ) / all_data.std()
train_data = all_data[0:train_data.shape[0]]
test_data = all_data[train_data.shape[0]:]
# Generate one-hot encodings for the labels
nclasses = 10
train_one_hot = np.zeros((train_labels.shape[0], 10))
test_one_hot = np.zeros((test_labels.shape[0], 10))
train_one_hot[ np.arange(train_labels.shape[0]), train_labels] = 1
test_one_hot[ np.arange(test_labels.shape[0]), test_labels] = 1
# Reshape images as 1D arrays
train_data = np.reshape(train_data, (train_data.shape[0], train_data.shape[1] * train_data.shape[2] ))
test_data = np.reshape(test_data, (test_data.shape[0], test_data.shape[1] * test_data.shape[2] ))
# Split train dataset into train (80%) and validation (20%) datasets
train_perc = 0.8
cutoff = int(train_perc * train_data.shape[0])
perm = np.random.permutation(train_data.shape[0])
val_data = train_data[ perm[cutoff:] ]
train_data = train_data[ perm[0:cutoff] ]
val_one_hot = train_one_hot[ perm[cutoff:] ]
train_one_hot = train_one_hot[ perm[0:cutoff] ]
###########################################################
"""
Simple learning with only one neural network.
In each epoch we run the full forward-backward-update pass
through every instance in the training set.
We can observe overfitting with a high enough number of epochs.
Experimental result:
Network trained over 1000 epochs and
learning rate of 0.2 achieves an accuracy of 0.9315
"""
###########################################################
if( use_nn ):
# Create Neural Network
nn = SimpleNN( 784, 10, mid_layers = [30], weight_factor = np.sqrt(2) )
# Simple training
print('\n-----Starting Training ( Single NN )-----\n')
# In each epoch...
for i in range( epochs ):
# ...pass through the training set
tr_error = 0
for j in range(train_data.shape[0]):
ipt_data = train_data[j:j+1]
fwd = nn.forward(ipt_data)
tr_error += np.sum( ( train_one_hot[j:j+1] - fwd )**2 ) / 10
nn.backward(fwd, train_one_hot[j:j+1])
nn.update( learning_rate )
# Pass through the validation set (only forward pass)
val_error = 0
for j in range(val_data.shape[0]):
fwd = nn.forward(val_data[j:j+1])
val_error += np.sum(( val_one_hot[j:j+1] - fwd )**2) / 10
print( 'epoch %d, train error: %f, val error: %f' % (i, tr_error / train_data.shape[0], val_error / val_data.shape[0]))
print('\n-----End of Training ( Single NN )-----\n')
# Accuracy
print( 'Single NN - Accuracy on test data: %f' % (np.sum( np.argmax( nn.forward(test_data),1 ) == test_labels ) / float( test_data.shape[0] )) )
###########################################################
"""
Learning using an ensemble approach ( bagging ).
Using a single network we observe overfitting, so we try
to learn several weaker classifiers ( using random subsamples
of the training set ) and aggregate their predictions to
obtain a lower variance model that outperforms the previous
result with a single neural network trained over the whole
dataset.
Experimental result:
Ensemble with 15 models trained over 50 epochs each and
learning rate of 0.2 achieves an accuracy of 0.9504
"""
###########################################################
if( use_bagging ):
print('\n-----Starting Training ( Bagging )-----\n')
nets = []
for n_classifier in range( n_models ):
print('\nTraining classifier %d\n' % n_classifier)
net = SimpleNN( 784, 10, mid_layers = [30], weight_factor = np.sqrt(2) )
# Train each network over 60% of the training instances (weak classifier - reduces correlation between models)
instance_perm = np.random.permutation( train_data.shape[0] )[ 0:int( train_data.shape[0] * 0.6 ) ]
# In each epoch...
for i in range( epochs_bagging ):
# ...pass through the training set
for j in instance_perm:
ipt_data = train_data[j:j+1]
fwd = net.forward(ipt_data)
net.backward(fwd, train_one_hot[j:j+1])
net.update( learning_rate )
print('Classifier %d, Epoch %d' % (n_classifier, i))
nets.append(net)
# Training
print('\n-----End of Training ( Bagging )-----\n')
# Generate final prediction by summing all of the prediction
result_sum = np.zeros((10000,10))
for n_classifier in range(5):
result_sum += nets[n_classifier].forward(test_data)
# Accuracy
print( 'Bagging - Accuracy on test data: %f' % (np.sum( np.argmax(result_sum, 1) == test_labels ) / float(test_data.shape[0]) ) )
|
from sklearn.externals import joblib
import numpy as np
import faiss
import time
from sklearn.decomposition import PCA
from sklearn.preprocessing import normalize
from multiprocessing import Pool
query_file_path = "test_gem.pkl"
index_file_path = "index_gem.pkl"
query_images, query_features = joblib.load(query_file_path)
index_images, index_features = joblib.load(index_file_path)
query_ids = [x.split("/")[-1].split(".")[0] for x in query_images]
index_ids = [x.split("/")[-1].split(".")[0] for x in index_images]
query_ids = np.array(query_ids)
index_ids = np.array(index_ids)
query_features = query_features.astype(np.float32).T
index_features = index_features.astype(np.float32).T
query_features = np.ascontiguousarray(query_features)
index_features = np.ascontiguousarray(index_features)
def get_invert_index(feature):
res = faiss.StandardGpuResources() # use a single GPU
## Using a flat index
index_flat = faiss.IndexFlatL2(len(feature[0])) # build a flat (CPU) index
# make it a flat GPU index
gpu_index_flat = faiss.index_cpu_to_gpu(res, 0, index_flat)
gpu_index_flat.add(feature)
return gpu_index_flat
recall_num = 5
weigths = np.diag(np.logspace(0, -1.5, recall_num))
print(" building index")
since = time.time()
invert_index = get_invert_index(index_features)
print("build index used {} s".format(str(time.time() - since)))
print("searching for query vect ...")
since = time.time()
D, I = invert_index.search(query_features, recall_num)
print("search used {} s".format(str(time.time() - since)))
augment_query_features = np.zeros(query_features.shape)
for i, index in enumerate(I):
augment_query_features[i, :] = query_features[i] + np.sum(np.dot(weigths, index_features[index]), axis=0)
augment_query_features = normalize(augment_query_features)
print("searching for index vect ...")
since = time.time()
D, I = invert_index.search(index_features, recall_num)
print("search used {} s".format(str(time.time() - since)))
augment_index_features = np.zeros(index_features.shape)
for i, index in enumerate(I):
augment_index_features[i, :] = index_features[i] + np.sum(np.dot(weigths, index_features[index]), axis=0)
augment_index_features = normalize(augment_index_features)
joblib.dump((query_ids, augment_query_features, index_ids, augment_index_features), "dba_2.pkl")
|
"""
Contain functions related to the creation, manipulation and the retrival of any useful information of the
tic-tac-toe board.
"""
import numpy as np
from copy import deepcopy
BLANK_STATE = 0
HUMAN_STATE = 1
BOT_STATE = 2
def create_board():
"""
Creates a 3 by 3 numpy array containing BLANK_STATE.
:return: type: numpy.ndarray
A blank Tic Tac Toe board represented using numpy array
"""
return np.full((3, 3), BLANK_STATE, dtype=int)
def get_possible_moves(board):
"""
Loops through the board to find all BLANK_STATE indexes and place them in a list.
:param board: type: numpy.ndarray
The current state of the Tic Tac Toe board game
:return: type: list
A list of available moves indexes (BLANK_STATE indexes)
Move indexes are in numpy array format (<row_index>, <column_index>)
"""
moves = list()
# Loop board
for row_index, row in enumerate(board, start=0):
for box_index, box in enumerate(row, start=0):
if box == BLANK_STATE:
# Record indexes of valid moves
moves.append((row_index, box_index))
return moves
def win_check(board):
"""
Checks the board for any winning combinations.
:param board: type: numpy.ndarray
The current state of the Tic Tac Toe board game
:return: type: bool
True if winning combination is found, else False
"""
# Straight row win
for row in board:
if all(box == HUMAN_STATE for box in row) or all(box == BOT_STATE for box in row):
return True
# Vertical column win
for col in zip(*board):
if all(box == HUMAN_STATE for box in col) or all(box == BOT_STATE for box in col):
return True
# Diagonal win \ type
if all([board[index][index] == HUMAN_STATE for index in range(len(board))]) or \
all([board[index][index] == BOT_STATE for index in range(len(board))]):
return True
# Diagonal win / type
if all([board[index][-(index + 1)] == HUMAN_STATE for index in range(len(board))]) or \
all([board[index][-(index + 1)] == BOT_STATE for index in range(len(board))]):
return True
return False
def get_winning_combination_index(board):
"""
Checks the board for any winning combinations and return the index of the winning indexes.
:param board: type: numpy.ndarray
The current state of the Tic Tac Toe board game
:return: type: list
List of winning indexes
"""
# Straight row win
for row_index, row in enumerate(board, start=0):
if all(box == HUMAN_STATE for box in row) or all(box == BOT_STATE for box in row):
return [(row_index, box_index) for box_index in range(0, 3)]
# Vertical column win
for col_index, col in enumerate(zip(*board), start=0):
if all(box == HUMAN_STATE for box in col) or all(box == BOT_STATE for box in col):
return [(row_index, col_index) for row_index in range(0, 3)]
# Diagonal win \ type
if all([board[index][index] == HUMAN_STATE for index in range(len(board))]) or \
all([board[index][index] == BOT_STATE for index in range(len(board))]):
return [(index, index) for index in range(0, 3)]
# Diagonal win / type
if all([board[index][-(index + 1)] == HUMAN_STATE for index in range(len(board))]) or \
all([board[index][-(index + 1)] == BOT_STATE for index in range(len(board))]):
return [(row_index, box_index) for row_index, box_index in zip(range(0, 3), reversed(range(0, 3)))]
return None
def get_turn_number(board):
"""
Calculates the total number of moves on the board. The number of moves on the board is the same as the turn number
:param board: type: numpy.ndarray
The current state of the Tic Tac Toe board game
:return: type: int
The number of moves on the board or the turn number
"""
num_moves = 0
for row in board:
for box in row:
if box != BLANK_STATE:
num_moves += 1
return num_moves
def is_board_full(board):
"""
Check if the board is full, where no moves are available. (Terminal state)
Use win_check() before this function to check for draw scenario
:param board: type: numpy.ndarray
The current state of the Tic Tac Toe board game
:return: type: bool
True if board is full, else False
"""
# Check for any BLANK_STATE in board
if not any(BLANK_STATE in row for row in board):
return True
return False
def display_board(board, players):
"""
Converts all human & bot states on the board to mark and prints the board.
:param board: type: numpy.ndarray
The current state of the Tic Tac Toe board game
:param players: type: list
List containing both human & bot player class instances
"""
# Convert numpy array into python list
# Create a copy of the board
board_copy = deepcopy(board.tolist())
# Loop board
for row_index, row in enumerate(board, start=0):
for box_index, box in enumerate(row, start=0):
if box == HUMAN_STATE:
# Human mark
board_copy[row_index][box_index] = next(
player.mark for player in players if player.state == HUMAN_STATE)
elif box == BOT_STATE:
# Bot mark
board_copy[row_index][box_index] = next(player.mark for player in players if player.state == BOT_STATE)
else:
# Blank mark
board_copy[row_index][box_index] = "_"
# Print board
for row in board_copy:
print(row)
def update_board(board, move, player):
"""
Check if the selected move is valid and not occupied by other player state on the board.
:param board: type: numpy.ndarray
The current state of the Tic Tac Toe board game
:param move: type: tuple or list
Contain the column and row index of the selected move
Selected move index is in numpy array format (<row_index>, <column_index>)
:param player: type: player class instance
The class instance of the player who is making the move (Bot or Human instance)
:return: type: function
Returns updated board if valid, else calls itself
"""
# Check if the selected move is valid (BLANK_STATE)
if board[move[0]][move[1]] == BLANK_STATE:
# Replace blank with player state
board[move[0]][move[1]] = player.state
else:
# Invalid move, prompt the user to select a move again
print("Invalid move, please re-enter move...")
return update_board(board, player.make_move(board), player)
|
import discord
import requests
from discord.ext import commands
from near.database import get_embeds
class Crypto(commands.Cog):
def __init__(self, client: commands.Bot):
self.client = client
# This is the please-wait/Loading embed
self.please_wait_emb = discord.Embed(title=get_embeds.PleaseWait.TITLE, description=get_embeds.PleaseWait.DESCRIPTION, color=get_embeds.PleaseWait.COLOR)
self.please_wait_emb.set_author(name=get_embeds.Common.AUTHOR_NAME, icon_url=get_embeds.Common.AUTHOR_URL)
self.please_wait_emb.set_thumbnail(url=get_embeds.PleaseWait.THUMBNAIL)
self.please_wait_emb.set_footer(text=get_embeds.PleaseWait.FOOTER)
@commands.command()
async def bitcoin(self, ctx):
loading_message = await ctx.send(embed=self.please_wait_emb)
try:
r = requests.get('https://min-api.cryptocompare.com/data/price?fsym=BTC&tsyms=USD,EUR')
r = r.json()
usd = r['USD']
eur = r['EUR']
embed=discord.Embed(title="Bitcoin", color=get_embeds.Common.COLOR)
embed.set_author(name=get_embeds.Common.AUTHOR_NAME, icon_url=get_embeds.Common.AUTHOR_URL)
embed.set_thumbnail(url="https://cdn.pixabay.com/photo/2013/12/08/12/12/bitcoin-225079_960_720.png")
embed.add_field(name="USD", value=f"{usd}$", inline=False)
embed.add_field(name="EUR", value=f"{eur}€", inline=False)
embed.set_footer(text=f"Requested by {ctx.author.name}")
await loading_message.delete()
await ctx.send(embed=embed)
except Exception as e:
embed3=discord.Embed(title=get_embeds.ErrorEmbeds.TITLE, description=get_embeds.ErrorEmbeds.DESCRIPTION, color=get_embeds.ErrorEmbeds.COLOR)
embed3.set_author(name=get_embeds.Common.AUTHOR_NAME, icon_url=get_embeds.Common.AUTHOR_URL)
embed3.set_thumbnail(url=get_embeds.ErrorEmbeds.THUMBNAIL)
embed3.add_field(name=get_embeds.ErrorEmbeds.FIELD_NAME, value=f"{e}", inline=False)
embed3.set_footer(text=f"Requested by {ctx.author.name}")
await loading_message.delete()
await ctx.send(embed=embed3)
@commands.command()
async def eth(self, ctx):
loading_message = await ctx.send(embed=self.please_wait_emb)
try:
r = requests.get('https://min-api.cryptocompare.com/data/price?fsym=ETH&tsyms=USD,EUR')
r = r.json()
usd = r['USD']
eur = r['EUR']
embed=discord.Embed(title="Ethereum", color=get_embeds.Common.COLOR)
embed.set_author(name=get_embeds.Common.AUTHOR_NAME, icon_url=get_embeds.Common.AUTHOR_URL)
embed.set_thumbnail(url="https://cdn.discordapp.com/attachments/271256875205525504/374282740218200064/2000px-Ethereum_logo.png")
embed.add_field(name="USD", value=f"{usd}$", inline=False)
embed.add_field(name="EUR", value=f"{eur}€", inline=False)
embed.set_footer(text=f"Requested by {ctx.author.name}")
await loading_message.delete()
await ctx.send(embed=embed)
except Exception as e:
embed3=discord.Embed(title=get_embeds.ErrorEmbeds.TITLE, description=get_embeds.ErrorEmbeds.DESCRIPTION, color=get_embeds.ErrorEmbeds.COLOR)
embed3.set_author(name=get_embeds.Common.AUTHOR_NAME, icon_url=get_embeds.Common.AUTHOR_URL)
embed3.set_thumbnail(url=get_embeds.ErrorEmbeds.THUMBNAIL)
embed3.add_field(name=get_embeds.ErrorEmbeds.FIELD_NAME, value=f"{e}", inline=False)
embed3.set_footer(text=f"Requested by {ctx.author.name}")
await loading_message.delete()
await ctx.send(embed=embed3)
@commands.command()
async def hastebin(self, ctx, *, message):
loading_message = await ctx.send(embed=self.please_wait_emb)
try:
r = requests.post("https://hastebin.com/documents", data=message).json()
try:
embed=discord.Embed(title="Hastebin", color=get_embeds.Common.COLOR)
embed.set_author(name=get_embeds.Common.AUTHOR_NAME, icon_url=get_embeds.Common.AUTHOR_URL)
embed.set_thumbnail(url="https://cdn.discordapp.com/attachments/877796755234783273/879586340520480768/large.png")
embed.add_field(name="Link", value=f"https://hastebin.com/{r['key']}", inline=False)
embed.add_field(name=f"Text by {ctx.author.name}", value=f"{message}", inline=False)
embed.set_footer(text=f"Requested by {ctx.author.name}")
await loading_message.delete()
await ctx.send(embed=embed)
except:
embed=discord.Embed(title="Hastebin", color=get_embeds.Common.COLOR)
embed.set_author(name=get_embeds.Common.AUTHOR_NAME, icon_url=get_embeds.Common.AUTHOR_URL)
embed.set_thumbnail(url="https://cdn.discordapp.com/attachments/877796755234783273/879586340520480768/large.png")
embed.add_field(name="Link", value=f"https://hastebin.com/{r['key']}", inline=False)
embed.set_footer(text=f"Requested by {ctx.author.name}")
await loading_message.delete()
await ctx.send(embed=embed)
except Exception as e:
embed3=discord.Embed(title=get_embeds.ErrorEmbeds.TITLE, description=get_embeds.ErrorEmbeds.DESCRIPTION, color=get_embeds.ErrorEmbeds.COLOR)
embed3.set_author(name=get_embeds.Common.AUTHOR_NAME, icon_url=get_embeds.Common.AUTHOR_URL)
embed3.set_thumbnail(url=get_embeds.ErrorEmbeds.THUMBNAIL)
embed3.add_field(name=get_embeds.ErrorEmbeds.FIELD_NAME, value=f"{e}", inline=False)
embed3.set_footer(text=f"Requested by {ctx.author.name}")
await loading_message.delete()
await ctx.send(embed=embed3)
@commands.command()
async def xmr(self, ctx):
loading_message = await ctx.send(embed=self.please_wait_emb)
try:
r = requests.get("https://min-api.cryptocompare.com/data/price?fsym=XMR&tsyms=USD,EUR")
NegroPuket = r.json()
eur = NegroPuket['EUR']
usd = NegroPuket['USD']
embed=discord.Embed(title="XMR", color=get_embeds.Common.COLOR)
embed.set_author(name=get_embeds.Common.AUTHOR_NAME, icon_url=get_embeds.Common.AUTHOR_URL)
embed.set_thumbnail(url="https://cdn.discordapp.com/attachments/877796755234783273/879739662837633074/monero-logo-png-transparent.png")
embed.add_field(name="USD", value=f"{usd}", inline=False)
embed.add_field(name="EUR", value=f"{eur}", inline=True)
embed.set_footer(text=f"Requested by {ctx.author.name}")
await loading_message.delete()
await ctx.send(embed=embed)
except Exception as e:
embed3=discord.Embed(title=get_embeds.ErrorEmbeds.TITLE, description=get_embeds.ErrorEmbeds.DESCRIPTION, color=get_embeds.ErrorEmbeds.COLOR)
embed3.set_author(name=get_embeds.Common.AUTHOR_NAME, icon_url=get_embeds.Common.AUTHOR_URL)
embed3.set_thumbnail(url=get_embeds.ErrorEmbeds.THUMBNAIL)
embed3.add_field(name=get_embeds.ErrorEmbeds.FIELD_NAME, value=f"{e}", inline=False)
embed3.set_footer(text=f"Requested by {ctx.author.name}")
await loading_message.delete()
await ctx.send(embed=embed3)
@commands.command()
async def doge(self, ctx):
loading_message = await ctx.send(embed=self.please_wait_emb)
try:
r = requests.get("https://min-api.cryptocompare.com/data/price?fsym=DOGE&tsyms=USD,EUR")
NegroPuketDOGE = r.json()
eur = NegroPuketDOGE['EUR']
usd = NegroPuketDOGE['USD']
embed=discord.Embed(title="Doge Coin", color=get_embeds.Common.COLOR)
embed.set_author(name=get_embeds.Common.AUTHOR_NAME, icon_url=get_embeds.Common.AUTHOR_URL)
embed.set_thumbnail(url="https://cdn.discordapp.com/attachments/877796755234783273/879741979183968286/Dogecoin_Logo.png")
embed.add_field(name="USD", value=f"{usd}", inline=False)
embed.add_field(name="EUR", value=f"{eur}", inline=True)
embed.set_footer(text=f"Requested by {ctx.author.name}")
await loading_message.delete()
await ctx.send(embed=embed)
except Exception as e:
embed3=discord.Embed(title=get_embeds.ErrorEmbeds.TITLE, description=get_embeds.ErrorEmbeds.DESCRIPTION, color=get_embeds.ErrorEmbeds.COLOR)
embed3.set_author(name=get_embeds.Common.AUTHOR_NAME, icon_url=get_embeds.Common.AUTHOR_URL)
embed3.set_thumbnail(url=get_embeds.ErrorEmbeds.THUMBNAIL)
embed3.add_field(name=get_embeds.ErrorEmbeds.FIELD_NAME, value=f"{e}", inline=False)
embed3.set_footer(text=f"Requested by {ctx.author.name}")
await loading_message.delete()
await ctx.send(embed=embed3)
@commands.command()
async def xrp(self, ctx):
loading_message = await ctx.send(embed=self.please_wait_emb)
try:
r = requests.get("https://min-api.cryptocompare.com/data/price?fsym=XRP&tsyms=USD,EUR")
kekistan = r.json()
eur = kekistan['EUR']
usd = kekistan['USD']
embed=discord.Embed(title="Ripple", color=get_embeds.Common.COLOR)
embed.set_author(name=get_embeds.Common.AUTHOR_NAME, icon_url=get_embeds.Common.AUTHOR_URL)
embed.set_thumbnail(url="https://cdn.discordapp.com/attachments/877796755234783273/879741815237017680/52.png")
embed.add_field(name="USD", value=f"{usd}", inline=False)
embed.add_field(name="EUR", value=f"{eur}", inline=True)
embed.set_footer(text=f"Requested by {ctx.author.name}")
await loading_message.delete()
await ctx.send(embed=embed)
except Exception as e:
embed3=discord.Embed(title=get_embeds.ErrorEmbeds.TITLE, description=get_embeds.ErrorEmbeds.DESCRIPTION, color=get_embeds.ErrorEmbeds.COLOR)
embed3.set_author(name=get_embeds.Common.AUTHOR_NAME, icon_url=get_embeds.Common.AUTHOR_URL)
embed3.set_thumbnail(url=get_embeds.ErrorEmbeds.THUMBNAIL)
embed3.add_field(name=get_embeds.ErrorEmbeds.FIELD_NAME, value=f"{e}", inline=False)
embed3.set_footer(text=f"Requested by {ctx.author.name}")
await loading_message.delete()
await ctx.send(embed=embed3)
def setup(client: commands.Bot):
client.add_cog(Crypto(client))
|
import copy
from numpy import float
import random
"""
Entrospector
Research project for Dr. Bryan Pickett
Developed by Piotr Senkow
November 10, 2017
"""
class Entropy:
def __init__(self, system, system_counts, duplicate, duplicate_counts):
self.genome = system
self.genome_counts = system_counts
self.duplicated_system = copy.deepcopy(duplicate)
self.duplicate_counts = copy.deepcopy(duplicate_counts)
def send_dictionary(self):
return self.genome
def print_network(self):
for x in self.genome: # prints each node and its connections in genome
print("{} : {}".format(x, self.genome[x]))
for x in self.genome_counts: # prints the number of connections for each node in genome
print("Gene {} connects to {} genes".format(x, self.genome_counts[x]))
def print_duplicate_network(self):
for x in self.duplicated_system: # prints each node and its connections in duplicate genome
print("{} : {}".format(x, self.duplicated_system[x]))
for x in self.duplicate_counts: # prints the number of connections for each node in duplicate genome
print("Gene {} connects to {} genes".format(x, self.duplicate_counts[x]))
def calculate_network_entropy(self):
total_entropy = 0
for x in self.genome_counts:
number_of_connections = self.genome_counts[x]
if number_of_connections == 1: # if there's only one connection, no entropy
number_of_connections = 0
total_entropy += number_of_connections # adds to the growing denominator
print("1/{}".format(total_entropy)) # test to see what denominator is
total_entropy = 1 / float(total_entropy) # total entropy calculation
print("The total entropy of the entire system is {}\n".format(total_entropy))
def calculate_duplicate_entropy(self):
total_entropy= 0
for x in self.duplicate_counts: # calculates entropy of duplicate entropy accessing duplicate counts dictionary
number_of_connections = self.duplicate_counts[x]
if number_of_connections == 1: # if there's only one connection, no entropy
number_of_connections = 0
total_entropy += number_of_connections
print("1/{}".format(total_entropy)) # test to see what denominator is
total_entropy = 1 / float(total_entropy) # total entropy of duplicate genome calculation
print("The total entropy of the duplicated system is {}\n".format(total_entropy))
return total_entropy
def duplicated_length(self): # check how many nodes left in duplicated system
print("The duplicated graph currently has {} nodes".format(len(self.duplicated_system)))
def degeneration(self):
print("Degenerating 10% of the graph\n")
degeneration_constant= len(self.duplicated_system) # grab number of nodes in duplicated system
constant = int(degeneration_constant*0.10) # constant is calculated to know how many nodes to delete
j = 0 # iterator so degeneration ends after a "constant" amount of nodes are deleted
while j < constant: # loop that deletes a node and all instances of node in its connections
key, val = random.choice(list(self.duplicated_system.items())) # choose a random node to delete
temp_list = copy.copy(self.duplicated_system.get(key)) # temp list of all connections of node to be deleted
"""
Currently loop is set up that if a connection is deleted from a node and there's no connection left,
it will delete that node too
"""
for i in range(0, len(self.duplicated_system.get(key))):
# for loop that visits each node that connects to node in question,
# and deletes its instance in their connections.
# unit tests to be implemented
# print(temp_list[i])
# print(self.duplicated_system.get(temp_list[i]))
self.duplicated_system.get(temp_list[i]).remove(key)
# print(self.duplicated_system.get(temp_list[i]))
if not self.duplicated_system.get(temp_list[i]):
# if there remain 0 connections after all instances were deleted, delete that node too.
# Cannot exist alone.
self.duplicated_system.pop(temp_list[i])
self.duplicate_counts.pop(temp_list[i])
j = j+1
self.duplicated_system.pop(key,None)
# once visited all the connections and deleted all instances
# of the node in question, DELETE THE KEY VALUE FOR THAT NODE
self.duplicate_counts.pop(key, None)
# DELETE IT FROM COUNTS TOO SO ENTROPY GETS AFFECTED
j += 1
|
# Generated by Django 2.2.10 on 2020-03-20 11:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('registration', '0003_auto_20200320_1645'),
]
operations = [
migrations.AlterField(
model_name='registrations',
name='email',
field=models.EmailField(default='', error_messages={'unique': 'This email Id already exist.'}, max_length=254, unique=True),
),
]
|
# !/usr/local/python/bin/python
# -*- coding: utf-8 -*-
# (C) Wu Dong, 2020
# All rights reserved
# @Author: 'Wu Dong <wudong@eastwu.cn>'
# @Time: '2020-06-23 11:00'
"""
给出集合 [1,2,3,…,n],其所有元素共有 n! 种排列。
按大小顺序列出所有排列情况,并一一标记,当 n = 3 时, 所有排列如下:
"123"
"132"
"213"
"231"
"312"
"321"
给定 n 和 k,返回第 k 个排列。
说明:
给定 n 的范围是 [1, 9]。
给定 k 的范围是[1, n!]。
示例 1:
输入: n = 3, k = 3
输出: "213"
"""
class Solution(object):
def getPermutation(self, n, k):
"""
:type n: int
:type k: int
:rtype: str
"""
answer_inf = dict(cnt=0, answer=[])
def back_track(rank, combine):
rank_len = len(rank)
for idx, num in enumerate(rank):
if answer_inf["cnt"] == k:
return
if rank_len == 1:
answer_inf["cnt"] += 1
if answer_inf["cnt"] == k:
answer_inf["answer"] = [*combine] + [num]
return
continue
back_track(rank[0: idx] + rank[idx + 1:], [*combine] + [num])
back_track([i for i in range(1, n + 1)], [])
return "".join([str(obj) for obj in answer_inf["answer"]])
if __name__ == "__main__":
import time
start_time = time.time()
print(Solution().getPermutation(9, 296662))
print(time.time() - start_time)
|
from sstcam_sandbox import get_checs
from CHECLabPy.plotting.setup import Plotter
from TargetCalibSB.pedestal import PedestalTargetCalib
import numpy as np
from os.path import join
class Hist2D(Plotter):
def plot(self, values, hits, clabel):
masked = np.ma.masked_where(hits == 0, values)
im = self.ax.imshow(
masked, cmap="viridis", origin='lower', aspect='auto'
)
cbar = self.fig.colorbar(im)
self.ax.patch.set(hatch='xx')
self.ax.set_xlabel("Blockphase + Waveform position")
self.ax.set_ylabel("Block")
cbar.set_label(clabel)
class HistPlot(Plotter):
def plot(self, hist, edges, mean, std):
label = f"\nMean: {mean:.3f} StdDev: {std:.3f}"
between = (edges[1:] + edges[:-1]) / 2
self.ax.hist(
between, bins=edges, weights=hist, label=label,
histtype='step',
)
def finish(self):
self.ax.set_xlabel("Residuals (ADC)")
self.add_legend('best')
def process(pedestal_path, output_dir):
pedestal = PedestalTargetCalib.from_tcal(pedestal_path)
std_mean = np.mean(pedestal.std)
std_std = np.std(pedestal.std)
std_hist, std_edges = np.histogram(pedestal.std, bins=100)
p_hist = HistPlot()
p_hist.plot(std_hist, std_edges, std_mean, std_std)
p_hist.save(join(output_dir, "hist.pdf"))
p_hist2d_pedestal = Hist2D()
p_hist2d_pedestal.plot(pedestal.pedestal[0], pedestal.hits[0], "Pedestal Mean (ADC)")
p_hist2d_pedestal.save(join(output_dir, "hist2d_mean.pdf"))
p_hist2d_std = Hist2D()
p_hist2d_std.plot(pedestal.std[0], pedestal.hits[0], "Pedestal Stddev (ADC)")
p_hist2d_std.save(join(output_dir, "hist2d_std.pdf"))
def main():
pedestal_path = get_checs("d181203_erlangen/pedestal/Pedestal_10deg_ped.tcal")
output_dir = get_checs("d181203_erlangen/pedestal/Pedestal_10deg_r0")
process(pedestal_path, output_dir)
pedestal_path = get_checs("d181203_erlangen/pedestal/Pedestal_15deg_ped.tcal")
output_dir = get_checs("d181203_erlangen/pedestal/Pedestal_15deg_r0")
process(pedestal_path, output_dir)
pedestal_path = get_checs("d181203_erlangen/pedestal/Pedestal_23deg_ped.tcal")
output_dir = get_checs("d181203_erlangen/pedestal/Pedestal_23deg_r0")
process(pedestal_path, output_dir)
pedestal_path = get_checs("d181203_erlangen/pedestal/Pedestal_30deg_ped.tcal")
output_dir = get_checs("d181203_erlangen/pedestal/Pedestal_30deg_r0")
process(pedestal_path, output_dir)
pedestal_path = get_checs("d181203_erlangen/pedestal/Pedestal_40deg_ped.tcal")
output_dir = get_checs("d181203_erlangen/pedestal/Pedestal_40deg_r0")
process(pedestal_path, output_dir)
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
from odoo import http, _
from odoo.addons.portal.controllers.portal import CustomerPortal, pager as portal_pager
from odoo.exceptions import AccessError, MissingError
from collections import OrderedDict
from odoo.http import request
class PortalMembership(CustomerPortal):
def _prepare_home_portal_values(self, counters):
values = super()._prepare_home_portal_values(counters)
if 'membership_count' in counters:
logged_in_user = request.env['res.users'].browse([request.session.uid])
membership_count = request.env['res.partner'].search_count([('user_ids', '=', logged_in_user.id)])
values['membership_count'] = membership_count
return values
def _membership_get_page_view_values(self, membership, access_token, **kwargs):
values = {
'page_name': 'membership',
'membership': membership,
}
return self._get_page_view_values(membership, access_token, values, 'my_membership_history', False, **kwargs)
@http.route(['/my/membership', '/my/membership/page/<int:page>'], type='http', auth="user", website=True)
def portal_my_membership(self, page=1, date_begin=None, date_end=None, sortby=None, filterby=None, **kw):
values = self._prepare_portal_layout_values()
Membership = request.env['res.partner']
logged_in_user = request.env['res.users'].browse([request.session.uid])
membership_count = Membership.search_count([('user_ids', '=', logged_in_user.id)])
pager = portal_pager(
url="/my/membership",
url_args={'date_begin': date_begin, 'date_end': date_end, 'sortby': sortby},
total=membership_count,
page=page,
step=self._items_per_page
)
memberships = Membership.search([('user_ids', '=', logged_in_user.id)], order=None, limit=self._items_per_page, offset=pager['offset'])
request.session['my_membership_history'] = memberships.ids[:100]
values.update({
'date': date_begin,
'memberships': memberships,
'page_name': 'membership',
'pager': pager,
'default_url': '/my/membership',
'sortby': sortby,
'filterby': filterby,
})
return request.render("custom_gym_app.portal_my_membership", values)
@http.route(['/my/membership/<int:membership_id>'], type='http', auth="public", website=True)
def portal_my_membership_detail(self, membership_id, access_token=None, report_type=None, download=False, **kw):
try:
membership_sudo = self._document_check_access('res.partner', membership_id, access_token)
except (AccessError, MissingError):
return request.redirect('/my')
if report_type in ('html', 'pdf', 'text'):
return self._show_report(model=membership_sudo, report_type=report_type, report_ref='custom_gym_app.report_member_card', download=download)
values = self._membership_get_page_view_values(membership_sudo, access_token, **kw)
return request.render("custom_gym_app.portal_my_membership_page", values)
|
import http.server
import sys
class RequestHandler(http.server.BaseHTTPRequestHandler):
def get_response(self):
if self.path.startswith("/get-my-path/"):
return b"/" + self.path.split("/", maxsplit=2)[2].encode()
elif self.path == "/":
return b"OK"
return None
def do_HEAD(self):
if self.path.startswith("/get-my-path/") or self.path == "/":
self.send_response(200)
self.send_header("Content-Type", "text/plain")
self.send_header("Content-Length", len(self.get_response()))
self.end_headers()
else:
self.send_response(404)
self.send_header("Content-Type", "text/plain")
self.end_headers()
def do_GET(self):
self.do_HEAD()
self.wfile.write(self.get_response())
httpd = http.server.HTTPServer(("0.0.0.0", int(sys.argv[1])), RequestHandler)
httpd.serve_forever()
|
from __future__ import print_function
import os
import sys
import json
from lxml import etree
LANGUAGES = json.load(file(os.path.join(os.path.dirname(__file__),'languages.json')))
COUNTRIES = json.load(file(os.path.join(os.path.dirname(__file__),'countries.json')))
PLURALS = json.load(file(os.path.join(os.path.dirname(__file__),'plurals.json')))
def indent(elem, level=0):
i = "\n" + level*" "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent(elem, level+1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
def traverse(rootdir, visitor):
for dirpath, dirnames, filenames in os.walk(rootdir):
locale = None
lastpart = os.path.split(dirpath)[1]
path_splits = set(dirpath.split(os.path.sep))
avoided = set(['build','bin'])
if len(path_splits.intersection(avoided)) > 0:
continue
if lastpart.startswith('values'):
parts = lastpart.split('-')
if len(parts)==1:
locale=None
else:
language = parts[1]
if language not in LANGUAGES:
continue
locale = language.lower()
if len(parts)>2:
region = parts[2]
if not region.startswith('r'):
continue
if not region[1:] in COUNTRIES:
continue
region = region[1:]
locale = locale+'-r'+region.upper()
for fn in filter(lambda x:x.endswith('.xml'),filenames):
modified = False
fn = os.path.join(dirpath,fn)
original_filename = fn.replace(rootdir,'')
parser = etree.XMLParser(remove_blank_text=True)
try:
tree = etree.parse(fn,parser)
except Exception,e:
print("Error in file %s, %s" % (fn,e),file=sys.stderr)
continue
root = tree.getroot()
if root.tag!='resources':
continue
globalcontext = None
for el in root:
if el.tag == 'string':
name = el.get('name')
priority = el.get('priority',4)
context = el.get('context',globalcontext)
translatable = el.get('translatable') != 'false'
text = el.text
if translatable:
modified = visitor(el,el,None,None,locale,name,name,text,original_filename,priority,context) or modified
elif el.tag == 'string-array':
array_name = el.get('name')
array_priority = el.get('priority',4)
array_context = el.get('context',globalcontext)
array_translatable = el.get('translatable')
items = el.xpath('item')
for index,item in enumerate(el):
priority = item.get('priority',array_priority)
context = item.get('context',array_context)
translatable = item.get('translatable',array_translatable) != 'false'
text = item.text
if translatable:
canonic_name = "%s::A::%03d" % (array_name,index)
modified = visitor(el,item,'array',index,locale,array_name,canonic_name,text,original_filename,priority,context) or modified
elif el.tag == 'plurals':
plural_name = el.get('name')
plural_priority = el.get('priority',4)
plural_context = el.get('context',globalcontext)
plural_translatable = el.get('translatable')
for item in el:
quantity = item.get('quantity')
priority = item.get('priority',plural_priority)
context = item.get('context',plural_context)
translatable = item.get('translatable',plural_translatable) != 'false'
text = item.text
if translatable:
canonic_name = "%s::P::%s" % (plural_name,quantity)
modified = visitor(el,item,'quantity',quantity,locale,plural_name,canonic_name,text,original_filename,priority,context) or modified
elif el.tag == etree.Comment:
globalcontext = el.text
else:
pass
if modified:
indent(root)
out = open(fn, 'w')
out.write(etree.tostring(root,encoding='UTF-8'))
out.close()
|
from django.urls import path
from .import views
urlpatterns = [
path('', views.index,name="home"),
path('portfolio/', views.portfolio,name='portfolio.page'),
path('price/', views.price,name='price.page'),
path('blog/', views.blog,name='blog.page'),
path('about/', views.about,name='about.page'),
] |
def maxSum(arr):
incl = 0
excl = 0
for i in arr:
new_excl = excl if excl>incl else incl
incl = excl + i
excl = new_excl
if excl>incl:
return excl
else:
return incl
inputList = [int(item) for item in input("Enter the list items: ").split()]
print(maxSum(inputList))
|
# GCS Utility Functions
# Copyright (C) 2014 Mitchell Barry
# Adapted from content distributed through Apache 2.0 License by Google
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
"""Google Cloud Storage Utility functions"""
__author__ = 'Mitchell Barry'
__email__ = 'mitch.barry@gmail.com'
import os
import logging
import cloudstorage as gcs
from google.appengine.api import app_identity
def load_from_gcs_bucket(filename,bucket=None):
"""
Return file payload from Google Cloud Storage
"""
file = None
# Retry can help overcome transient urlfetch or GCS issues, such as timeouts.
my_default_retry_params = gcs.RetryParams(initial_delay=0.2,
max_delay=5.0,
backoff_factor=2,
max_retry_period=15)
gcs.set_default_retry_params(my_default_retry_params)
if bucket is None:
# Default bucket name
bucket = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
try:
if bucket is not None:
file = gcs.open('/' + bucket + '/' + filename, 'r')
else:
logging.error('Bucket does not exist. Consider adding default bucket through GAE console')
except gcs.NotFoundError:
logging.error(filename + ' not found in default bucket')
return file
|
import sys
import numpy as np
import scipy
from scipy import ndimage
def sigmoid(z):
s = 1/(1+np.exp(-z))
return s
def initialize_with_zeros(dim):
w = np.zeros((dim,1))
b = 0
assert(w.shape == (dim, 1))
assert(isinstance(b, float) or isinstance(b,int))
return w,b
def propagate(w, b, X, Y):
m = X.shape[1]
A = sigmoid(np.dot(w.T, X)+1)
cost = -np.sum(Y*np.log(A)+(1-Y)*np.log(1-A))/m
dz = A-Y
dw = np.dot(X, dz.T)/m
db = np.sum(dz)/m
assert(dw.shape == w.shape)
assert(db.dtype == float)
cost = np.squeeze(cost)
assert(cost.shape == ())
grads = {"dw":dw,
"db":db
}
return grads, cost
def optimize(w, b, X, Y, num_iterations, learning_rate, print_cost=False):
costs = []
for i in range(num_iterations):
grads, cost = propagate(w, b, X, Y)
dw = grads["dw"]
db = grads["db"]
w = w - learning_rate * dw
b = b - learning_rate * db
if i%100 == 0:
costs.append(cost)
if print_cost and i%100 == 0:
print("Cost after iteration %i: %f" % (i, cost))
params = {"w":w,
"b":b
}
grads = {"dw":dw,
"db":db
}
return params, grads, costs
def predict(w, b, X):
m = X.shape[1]
w = w.reshape(X.shape[0], 1)
A = sigmoid(np.dot(w.T, X)+b)
Y_prediction = np.array(A>0.5, dtype=np.float64)
# Y_prediction = np.zeros((1, m))
# for i in range(A.shape[1]):
# if A[0, i] > 0.5:
# Y_prediction[0, i] = 1
# else:
# Y_prediction[0, i] = 0
assert(Y_prediction.shape == (1, m))
return Y_prediction
def accuracy(Y_prediction, Y_label):
return 100 - np.mean(np.abs(Y_prediction - Y_label))*100
def model(X_train, Y_train, X_test, Y_test, num_iterations=2000, learning_rate=0.5, print_cost=False):
#print("X_train.shape"+str(X_train.shape))
#print("Y_train.shape"+str(Y_train.shape))
#print("X_test.shape"+str(X_test.shape))
#print("Y_test.shape"+str(Y_test.shape))
nx = X_train.shape[0]
w, b = initialize_with_zeros(nx)
parameters, grads, costs = optimize(w, b, X_train, Y_train,
num_iterations=num_iterations,
learning_rate=learning_rate,
print_cost=print_cost)
w = parameters["w"]
b = parameters["b"]
print("b:"+str(b))
print("w[0,0]:"+str(w[0,0]))
Y_prediction_test = predict(w, b, X_test)
Y_prediction_train = predict(w, b, X_train)
print("train accuracy: {}%".format(accuracy(Y_prediction_train, Y_train)))
print("test_accuracy: {}%".format(accuracy(Y_prediction_test, Y_test)))
d = {"costs":costs,
"Y_prediction_test":Y_prediction_test,
"Y_prediction_train":Y_prediction_train,
"w":w,
"b":b,
"learning_rate":learning_rate,
"num_iterations":num_iterations
}
return d
if __name__ == "__main__":
# step 1 : sigmoid测试
print("sigmoid([0,2]) = "+str(sigmoid(np.array([0,2]))))
print("------------------------------------------------")
# step 2 : 参数初始化测试
dim = 2
w,b = initialize_with_zeros(dim)
print("w = "+str(w))
print("b = "+str(b))
print("------------------------------------------------")
# step 3 : 单次迭代测试(前向传播-->代价计算-->反向传播)
w, b, X, Y = np.array([[1.],[2.]]), 2., np.array([[1.,2.,-1.],[3.,4.,-3.2]]), np.array([[1,0,1]])
grads, cost = propagate(w, b, X, Y)
print ("dw = " + str(grads["dw"]))
print ("db = " + str(grads["db"]))
print ("cost = " + str(cost))
print("------------------------------------------------")
# step 4 : 完整训练过程
params, grads, costs = optimize(w, b, X, Y, num_iterations= 100, learning_rate = 0.009, print_cost = False)
print ("w = " + str(params["w"]))
print ("b = " + str(params["b"]))
print ("dw = " + str(grads["dw"]))
print ("db = " + str(grads["db"]))
print("------------------------------------------------")
# step 5 : 预测
w = np.array([[0.1124579],[0.23106775]])
b = -0.3
X = np.array([[1.,-1.1,-3.2],[1.2,2.,0.1]])
print ("predictions = " + str(predict(w, b, X)))
# step 6 : 完整测试
import dataset
import matplotlib.pyplot as plt
train_file = "../data/train_catvnoncat.h5"
test_file = "../data/test_catvnoncat.h5"
train_set_x_orig, train_set_y, test_set_x_orig, test_set_y, classes = dataset.load_dataset(train_file, test_file)
train_set_x_flatten = dataset.vectorize4images(train_set_x_orig)
test_set_x_flatten = dataset.vectorize4images(test_set_x_orig)
train_set_x = dataset.normalize4images(train_set_x_flatten)
test_set_x = dataset.normalize4images(test_set_x_flatten)
d = model(train_set_x, train_set_y,
test_set_x, test_set_y,
num_iterations=4000, learning_rate=0.005,
print_cost=True)
#costs = np.squeeze(d["costs"])
#print("costs.shape = "+str(costs.shape))
#plt.plot(costs)
#plt.ylabel("cost")
#plt.xlabel("iterations(per handreds)")
#plt.title("Learning rate = "+str(d["learning_rate"]))
#plt.show()
print("------------------------------------------------")
# step 7 : 测试不同的学习率
#learning_rates = [0.01, 0.005, 0.001, 0.0001]
#models = {}
#for i in learning_rates:
# print("learning rate is "+str(i))
# models[str(i)] = model(train_set_x, train_set_y, test_set_x, test_set_y, num_iterations=1500, learning_rate=i, print_cost=False)
# print("------------------------------------------------")
#for i in learning_rates:
# plt.plot(np.squeeze(models[str(i)]["costs"]), label=str(models[str(i)]["learning_rate"]))
#plt.ylabel("cost")
#plt.xlabel("iterations(hundreds)")
#legend = plt.legend(loc="upper center", shadow=True)
#frame = legend.get_frame()
#frame.set_facecolor("0.9")
#plt.show()
# step 8 : 用自己的图片测试
num_px = train_set_x_orig[0].shape[0]
while True:
print("your image file : ")
infile = sys.stdin.readline().strip()
image = np.array(ndimage.imread(infile, flatten=False))
image = image/255
print("image.shape"+str(image.shape))
print("image.shape="+str(image.shape))
my_image = scipy.misc.imresize(image, size=(num_px, num_px)).reshape((1, num_px*num_px*3)).T
print("my_image.shape"+str(my_image.shape))
my_predicted_image = predict(d["w"], d["b"], my_image)
print("y = " + str(np.squeeze(my_predicted_image)) + ", your algorithm predicts a \"" + classes[int(np.squeeze(my_predicted_image)),].decode("utf-8") + "\" picture.")
plt.imshow(image)
|
import re
text = input()
pattern = r'>>(?P<product>[A-Za-z]+)<<(?P<price>\d+(\.\d+)?)!(?P<quantity>\d+)'
total_spend = 0
print(f"Bought furniture:")
while not text == "Purchase":
match = re.fullmatch(pattern, text)
if match is None:
text = input()
continue
print(match.group("product"))
price = float(match.group("price"))
quantity = int(match.group("quantity"))
total_spend += price * quantity
text = input()
print(f"Total money spend: {total_spend:.2f}")
|
# Generated by Django 2.1.5 on 2019-03-04 14:45
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('main', '0003_auto_20190304_1139'),
]
operations = [
migrations.AlterField(
model_name='tutorial',
name='tutorial_published',
field=models.DateTimeField(default=datetime.datetime(2019, 3, 4, 14, 45, 32, 936854, tzinfo=utc), verbose_name='date published'),
),
migrations.AlterField(
model_name='tutorialcategory',
name='category_slug',
field=models.CharField(max_length=200),
),
]
|
# 1). A학급의 학생 이름은 다음과 같다:
#
# Bob
# John
# Sara
# Jack
# John
# Paul
# Belinda
# Jessica
#
# 위 자료를 리스트 a로 정리해본다.
x = '''Bob
John
Sara
Jack
John
Paul
Belinda
Jessica '''
list1 = x.replace(' ', '').split('\n')
print(list1)
# 학생 이름을 대문자화 한 리스트 A를 만들어 보시오.
x = x.upper()
list1 = x.replace(' ', '').split('\n')
print(list1)
#3). A학급과 B학급 명부를 통합한 리스트 AB를 만든다. (이어 붙이기) 전체 학생수는?
# 2). A학급의 학생 이름은 다음과 같다:
#
# John
# John
# Rebecca
# Paula
# Brandon
# Elilzabeth
# Sara
#
# A학급과 같은 방법으로 대문자화 한 리스트 B를 만들어 보시오.
y = '''John
John
Rebecca
Paula
Brandon
Elilzabeth
Sara '''
y = y.upper()
list2 = y.replace(' ', '').split('\n')
print(list2)
list_total = list1 + list2
# 4). 통합 리스트 AB를 알파벳 순으로 정렬해 본다.
list_total.sort()
print(list_total)
# 5). 통합 리스트 AB에서 서로 다른 이름의 가짓수는?
# sol 1 : set을 이용한 방법
set_st = set()
for i in list_total:
set_st.add(i)
print(type(set_st), len(set_st), set_st)
# 만약 set을 리스트로 바꾸려면
result = list(set_st)
print(type(result), len(result), result)
# sol 2 : for 문 돌면서 비교 후 결과리스트에 저장하거나 리스트 자체에서 제거
# 코드도 길어지고, 의미도 별로 없으므로 skip
# 스튜던트를 각각 객체로 만들고, 학생마다 ID 를 주어야 할 것!
# 이 후 ID 로 루프 돌면서 이름이 같은 학생은 list에 append 하지 않고, 어쩌구...
# for 문은 이중 포문
# for 1 : 본래의 리스트를 반복
# for 2 : 반복문 내 i 와 생성된 리스트를 비교
# 6). 통합 리스트 AB에서 'J'로 시작하는 이름만 출력해 본다.
for i in list_total:
if i[0] == 'J':
print(i)
# 7). 통합 리스트 AB에서 'A'로 끝나는 이름만 출력해 본다.
for i in list_total:
if i[-1] == 'A':
print(i) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.