hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringdate 2015-01-01 00:00:47 2022-03-31 23:42:18 ⌀ | max_issues_repo_issues_event_max_datetime stringdate 2015-01-01 17:43:30 2022-03-31 23:59:58 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e11a8e425c834148530d1f4e74a6a8f4d690673a | 146 | py | Python | Curso Python/ex009.py | sandro-fidelis/Cursos | cee1960181b1309be93034694cab8cf2878e2194 | [
"MIT"
] | null | null | null | Curso Python/ex009.py | sandro-fidelis/Cursos | cee1960181b1309be93034694cab8cf2878e2194 | [
"MIT"
] | null | null | null | Curso Python/ex009.py | sandro-fidelis/Cursos | cee1960181b1309be93034694cab8cf2878e2194 | [
"MIT"
] | null | null | null | n = int(input('Qual tabuada deseja ver: '))
c=1
print(11*'=')
while c <= 10:
print('{} x {:2} = {}'.format(n,c,c*n))
c += 1
print(11*'=')
| 18.25 | 43 | 0.493151 |
e11b19ef6b4d98bab620857b523abf42ea96c9a9 | 8,782 | py | Python | train.py | genisplaja/tf-diffwave | 32b0b403e7ca157f015f9af9f7dcdfa79e312a6a | [
"MIT"
] | 23 | 2020-09-29T08:38:09.000Z | 2022-03-16T03:00:44.000Z | train.py | genisplaja/tf-diffwave | 32b0b403e7ca157f015f9af9f7dcdfa79e312a6a | [
"MIT"
] | 1 | 2020-10-03T08:36:48.000Z | 2020-10-03T08:36:48.000Z | train.py | genisplaja/tf-diffwave | 32b0b403e7ca157f015f9af9f7dcdfa79e312a6a | [
"MIT"
] | 7 | 2020-09-29T19:11:53.000Z | 2022-01-06T14:29:21.000Z | import argparse
import json
import os
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
import tqdm
from config import Config
from dataset import LJSpeech
from model import DiffWave
class Trainer:
"""WaveGrad trainer.
"""
def __init__(self, model, lj, config):
"""Initializer.
Args:
model: DiffWave, diffwave model.
lj: LJSpeech, LJ-speec dataset
which provides already batched and normalized speech dataset.
config: Config, unified configurations.
"""
self.model = model
self.lj = lj
self.config = config
self.split = config.train.split // config.data.batch
self.trainset = self.lj.dataset().take(self.split) \
.shuffle(config.train.bufsiz) \
.prefetch(tf.data.experimental.AUTOTUNE)
self.testset = self.lj.dataset().skip(self.split) \
.prefetch(tf.data.experimental.AUTOTUNE)
self.optim = tf.keras.optimizers.Adam(
config.train.lr(),
config.train.beta1,
config.train.beta2,
config.train.eps)
self.eval_intval = config.train.eval_intval // config.data.batch
self.ckpt_intval = config.train.ckpt_intval // config.data.batch
self.train_log = tf.summary.create_file_writer(
os.path.join(config.train.log, config.train.name, 'train'))
self.test_log = tf.summary.create_file_writer(
os.path.join(config.train.log, config.train.name, 'test'))
self.ckpt_path = os.path.join(
config.train.ckpt, config.train.name, config.train.name)
self.alpha_bar = np.cumprod(1 - config.model.beta())
self.cmap = tf.constant(plt.get_cmap('viridis').colors, dtype=tf.float32)
def compute_loss(self, signal, logmel):
"""Compute loss for noise estimation.
Args:
signal: tf.Tensor, [B, T], raw audio signal segment.
logmel: tf.Tensor, [B, T // hop, mel], mel-spectrogram.
Returns:
loss: tf.Tensor, [], L1-loss between noise and estimation.
"""
# [B]
bsize = tf.shape(signal)[0]
# [B]
timesteps = tf.random.uniform(
[bsize], 1, self.config.model.iter + 1, dtype=tf.int32)
# [B]
noise_level = tf.gather(self.alpha_bar, timesteps - 1)
# [B, T], [B, T]
noised, noise = self.model.diffusion(signal, noise_level)
# [B, T]
eps = self.model.pred_noise(noised, timesteps, logmel)
# []
loss = tf.reduce_mean(tf.abs(eps - noise))
return loss
def train(self, step=0, ir_unit=5):
"""Train wavegrad.
Args:
step: int, starting step.
ir_unit: int, log ir units.
"""
for _ in tqdm.trange(step // self.split, self.config.train.epoch):
with tqdm.tqdm(total=self.split, leave=False) as pbar:
for signal, logmel in self.trainset:
with tf.GradientTape() as tape:
tape.watch(self.model.trainable_variables)
loss = self.compute_loss(signal, logmel)
grad = tape.gradient(loss, self.model.trainable_variables)
self.optim.apply_gradients(
zip(grad, self.model.trainable_variables))
norm = tf.reduce_mean([tf.norm(g) for g in grad])
del grad
step += 1
pbar.update()
pbar.set_postfix(
{'loss': loss.numpy().item(),
'step': step,
'grad': norm.numpy().item()})
with self.train_log.as_default():
tf.summary.scalar('loss', loss, step)
tf.summary.scalar('grad norm', norm, step)
if step % self.eval_intval == 0:
pred, _ = self.model(logmel)
tf.summary.audio(
'train', pred[..., None], self.config.data.sr, step)
tf.summary.image(
'train mel', self.mel_img(pred), step)
del pred
if step % self.ckpt_intval == 0:
self.model.write(
'{}_{}.ckpt'.format(self.ckpt_path, step),
self.optim)
loss = [
self.compute_loss(signal, logmel).numpy().item()
for signal, logmel in self.testset
]
loss = sum(loss) / len(loss)
with self.test_log.as_default():
tf.summary.scalar('loss', loss, step)
gt, pred, ir = self.eval()
tf.summary.audio(
'gt', gt[None, :, None], self.config.data.sr, step)
tf.summary.audio(
'eval', pred[None, :, None], self.config.data.sr, step)
tf.summary.image(
'gt mel', self.mel_img(gt[None]), step)
tf.summary.image(
'eval mel', self.mel_img(pred[None]), step)
for i in range(0, len(ir), ir_unit):
tf.summary.audio(
'ir_{}'.format(i),
np.clip(ir[i][None, :, None], -1., 1.),
self.config.data.sr, step)
del gt, pred, ir
def mel_img(self, signal):
"""Generate mel-spectrogram images.
Args:
signal: tf.Tensor, [B, T], speech signal.
Returns:
tf.Tensor, [B, mel, T // hop, 3], mel-spectrogram in viridis color map.
"""
# [B, T // hop, mel]
_, mel = self.lj.mel_fn(signal)
# [B, mel, T // hop]
mel = tf.transpose(mel, [0, 2, 1])
# minmax norm in range(0, 1)
mel = (mel - tf.reduce_min(mel)) / (tf.reduce_max(mel) - tf.reduce_min(mel))
# in range(0, 255)
mel = tf.cast(mel * 255, tf.int32)
# [B, mel, T // hop, 3]
mel = tf.gather(self.cmap, mel)
# make origin lower
mel = tf.image.flip_up_down(mel)
return mel
def eval(self):
"""Generate evaluation purpose audio.
Returns:
speech: np.ndarray, [T], ground truth.
pred: np.ndarray, [T], predicted.
ir: List[np.ndarray], config.model.iter x [T],
intermediate represnetations.
"""
# [T]
speech = next(iter(lj.rawset))
# [1, T // hop, mel]
_, logmel = lj.mel_fn(speech[None])
# [1, T], iter x [1, T]
pred, ir = self.model(logmel)
# [T]
pred = tf.squeeze(pred, axis=0).numpy()
# config.model.iter x [T]
ir = [np.squeeze(i, axis=0) for i in ir]
return speech.numpy(), pred, ir
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--config', default=None)
parser.add_argument('--load-step', default=0, type=int)
parser.add_argument('--ir-unit', default=10, type=int)
parser.add_argument('--data-dir', default=None)
parser.add_argument('--download', default=False, action='store_true')
parser.add_argument('--from-raw', default=False, action='store_true')
args = parser.parse_args()
config = Config()
if args.config is not None:
print('[*] load config: ' + args.config)
with open(args.config) as f:
config = Config.load(json.load(f))
log_path = os.path.join(config.train.log, config.train.name)
if not os.path.exists(log_path):
os.makedirs(log_path)
ckpt_path = os.path.join(config.train.ckpt, config.train.name)
if not os.path.exists(ckpt_path):
os.makedirs(ckpt_path)
lj = LJSpeech(config.data, args.data_dir, args.download, not args.from_raw)
diffwave = DiffWave(config.model)
trainer = Trainer(diffwave, lj, config)
if args.load_step > 0:
super_path = os.path.join(config.train.ckpt, config.train.name)
ckpt_path = '{}_{}.ckpt'.format(config.train.name, args.load_step)
ckpt_path = next(
name for name in os.listdir(super_path)
if name.startswith(ckpt_path) and name.endswith('.index'))
ckpt_path = os.path.join(super_path, ckpt_path[:-6])
print('[*] load checkpoint: ' + ckpt_path)
trainer.model.restore(ckpt_path, trainer.optim)
with open(os.path.join(config.train.ckpt, config.train.name + '.json'), 'w') as f:
json.dump(config.dump(), f)
trainer.train(args.load_step, args.ir_unit)
| 37.370213 | 86 | 0.535186 |
e11c13c2da24636a124e7f9a0bd4c8ced1cf20aa | 1,122 | py | Python | src/foxdot/research/boudoir/180617_1936_chuchotement_pantophobique.py | Neko250/aisthesis | 1d4a2c3070d10596c28b25ea2170523583e7eff0 | [
"Apache-2.0"
] | 4 | 2018-06-29T18:39:34.000Z | 2021-06-20T16:44:29.000Z | src/foxdot/research/boudoir/180617_1936_chuchotement_pantophobique.py | Neko250/aisthesis | 1d4a2c3070d10596c28b25ea2170523583e7eff0 | [
"Apache-2.0"
] | null | null | null | src/foxdot/research/boudoir/180617_1936_chuchotement_pantophobique.py | Neko250/aisthesis | 1d4a2c3070d10596c28b25ea2170523583e7eff0 | [
"Apache-2.0"
] | null | null | null | # boudoir - chuchotement pantophobique
# https://www.youtube.com/watch?v=KL2zW6Q5hWs
# https://gist.github.com/jf-parent/c8ea7e54e30593af01512f4e21b54670
Scale.default = Scale.major
Root.default = 0
Clock.bpm = 120
b1.reset() >> glass(
[0],
dur = 16,
).after(16, 'stop')
def play1():
print('play1')
p1.reset() >> bass(
[0, [0, 1, 2]],
dur = 8,
oct = 5,
lpf = 600,
room = 1,
mix = 1,
).after(320, 'stop')
def play2():
print('play2')
p2.reset() >> soft(
[0],
formant = linvar([0, 1], 8),
sus = 2,
vib = linvar([0, 2], 4),
oct = var([3, 4, 5], 4),
).every(
16,
'stutter'
).after(290, 'stop')
def play3():
print('play3')
p3.reset() >> sawbass(
P[0, 2, 4, [5, 6, 7]],
dur = [1, [1, 2], 2, [2, 4]],
oct = [4, 5, 6, 6],
vib = [0, 0, 0, [0, 1]],
formant = var([0, 1], 8),
).after(48, 'stop')
Clock.set_time(0)
Clock.future(0, play1)
Clock.future(30, play2)
Clock.future(60, play3)
Clock.future(120, play3)
Clock.future(240, play3)
| 20.4 | 68 | 0.496435 |
e1215b8a95ad1e693c4f500b1993173740393e02 | 14,101 | py | Python | cogs/fun.py | Der-Eddy/discord_bot | bc2511e6d030ee2e099410bd846ea871fe3f109d | [
"MIT"
] | 122 | 2016-08-05T02:27:31.000Z | 2022-03-21T07:53:10.000Z | cogs/fun.py | Der-Eddy/discord_bot | bc2511e6d030ee2e099410bd846ea871fe3f109d | [
"MIT"
] | 15 | 2017-12-07T14:28:20.000Z | 2021-11-19T13:03:37.000Z | cogs/fun.py | Der-Eddy/discord_bot | bc2511e6d030ee2e099410bd846ea871fe3f109d | [
"MIT"
] | 100 | 2016-08-21T18:12:29.000Z | 2022-02-19T11:21:23.000Z | import random
import urllib.parse
import sqlite3
import asyncio
import aiohttp
import discord
from discord.ext import commands
import loadconfig
class fun(commands.Cog):
def __init__(self, bot):
self.bot = bot
async def cog_command_error(self, ctx, error):
print('Error in {0.command.qualified_name}: {1}'.format(ctx, error))
def userOnline(self, memberList):
online = []
for i in memberList:
if i.status == discord.Status.online and i.bot == False:
online.append(i)
return online
@commands.command(aliases=['javascript', 'nodejs', 'js'])
async def java(self, ctx):
'''Weil Java != Javscript'''
await ctx.send(':interrobang: Meintest du jQuery, Javascript oder Node.js? https://abload.de/img/2016-05-102130191kzpu.png')
@commands.command(aliases=['c++', 'c#', 'objective-c'])
async def csharp(self, ctx):
'''Wie soll man da überhaupt durchblicken???'''
await ctx.send(':interrobang: Meintest du C, C++, C# oder Objective-C? https://i.imgur.com/Nd4aAXO.png')
@commands.command()
async def praise(self, ctx):
'''Praise the Sun'''
await ctx.send('https://i.imgur.com/K8ySn3e.gif')
@commands.command()
async def css(self, ctx):
'''Counter Strike: Source'''
await ctx.send('http://i.imgur.com/TgPKFTz.gif')
@commands.command()
async def countdown(self, ctx):
'''It's the final countdown'''
countdown = ['five', 'four', 'three', 'two', 'one']
for num in countdown:
await ctx.send('**:{0}:**'.format(num))
await asyncio.sleep(1)
await ctx.send('**:ok:** DING DING DING')
@commands.command(aliases=['cat', 'randomcat'])
async def neko(self, ctx):
'''Zufällige Katzen Bilder nyan~'''
#http://discordpy.readthedocs.io/en/latest/faq.html#what-does-blocking-mean
async with aiohttp.ClientSession() as cs:
async with cs.get('http://aws.random.cat/meow') as r:
res = await r.json()
emojis = [':cat2: ', ':cat: ', ':heart_eyes_cat: ']
await ctx.send(random.choice(emojis) + res['file'])
@commands.command(aliases=['rand'])
async def random(self, ctx, *arg):
'''Gibt eine zufällige Zahl oder Member aus
Benutzung:
-----------
:random
Gibt eine zufällige Zahl zwischen 1 und 100 aus
:random coin
Wirft eine Münze (Kopf oder Zahl)
:random 6
Gibt eine zufällige Zahl zwischen 1 und 6 aus
:random 10 20
Gibt eine zufällige Zahl zwischen 10 und 20 aus
:random user
Gibt einen zufällige Benutzer der gerade online ist aus
:random choice Dani Eddy Shinobu
Wählt aus der vorgegebenen Liste einen Namen aus
'''
if ctx.invoked_subcommand is None:
if not arg:
start = 1
end = 100
elif arg[0] == 'flip' or arg[0] == 'coin':
coin = ['Kopf', 'Zahl']
await ctx.send(f':arrows_counterclockwise: {random.choice(coin)}')
return
elif arg[0] == 'choice':
choices = list(arg)
choices.pop(0)
await ctx.send(f':congratulations: The winner is {random.choice(choices)}')
return
elif arg[0] == 'user':
online = self.userOnline(ctx.guild.members)
randomuser = random.choice(online)
if ctx.channel.permissions_for(ctx.author).mention_everyone:
user = randomuser.mention
else:
user = randomuser.display_name
await ctx.send(f':congratulations: The winner is {user}')
return
elif len(arg) == 1:
start = 1
end = int(arg[0])
elif len(arg) == 2:
start = int(arg[0])
end = int(arg[1])
await ctx.send(f'**:arrows_counterclockwise:** Zufällige Zahl ({start} - {end}): {random.randint(start, end)}')
@commands.command()
async def steinigt(self, ctx, member:str):
'''Monty Python'''
await ctx.send(f'R.I.P. {member}\nhttps://media.giphy.com/media/l41lGAcThnMc29u2Q/giphy.gif')
@commands.command(aliases=['hypu', 'train'])
async def hype(self, ctx):
'''HYPE TRAIN CHOO CHOO'''
hypu = ['https://cdn.discordapp.com/attachments/102817255661772800/219514281136357376/tumblr_nr6ndeEpus1u21ng6o1_540.gif',
'https://cdn.discordapp.com/attachments/102817255661772800/219518372839161859/tumblr_n1h2afSbCu1ttmhgqo1_500.gif',
'https://gfycat.com/HairyFloweryBarebirdbat',
'https://i.imgur.com/PFAQSLA.gif',
'https://abload.de/img/ezgif-32008219442iq0i.gif',
'https://i.imgur.com/vOVwq5o.jpg',
'https://i.imgur.com/Ki12X4j.jpg',
'https://media.giphy.com/media/b1o4elYH8Tqjm/giphy.gif']
msg = f':train2: CHOO CHOO {random.choice(hypu)}'
await ctx.send(msg)
@commands.command()
async def xkcd(self, ctx, *searchterm: str):
'''Zeigt den letzten oder zufälligen XKCD Comic
Beispiel:
-----------
:xkcd
:xkcd random
'''
apiUrl = 'https://xkcd.com{}info.0.json'
async with aiohttp.ClientSession() as cs:
async with cs.get(apiUrl.format('/')) as r:
js = await r.json()
if ''.join(searchterm) == 'random':
randomComic = random.randint(0, js['num'])
async with cs.get(apiUrl.format('/' + str(randomComic) + '/')) as r:
if r.status == 200:
js = await r.json()
comicUrl = 'https://xkcd.com/{}/'.format(js['num'])
date = '{}.{}.{}'.format(js['day'], js['month'], js['year'])
msg = '**{}**\n{}\nAlt Text:```{}```XKCD Link: <{}> ({})'.format(js['safe_title'], js['img'], js['alt'], comicUrl, date)
await ctx.send(msg)
@commands.command(aliases=['witz', 'joke'])
async def pun(self, ctx):
'''Weil jeder schlechte Witze mag'''
#ToDo: Add some way to fetch https://github.com/derphilipp/Flachwitze
puns = ['Was sagt das eine Streichholz zum anderen Streichholz?\n Komm, lass uns durchbrennen',
'Wieviele Deutsche braucht man um eine Glühbirne zu wechseln?\n Einen, wir sind humorlos und effizient.',
'Wo wohnt die Katze?\n Im Miezhaus.',
'Wie begrüßen sich zwei plastische Chirurgen?\n "Was machst du denn heute für ein Gesicht?"',
'Warum essen Veganer kein Huhn?\n Könnte Ei enthalten',
'85% der Frauen finden ihren Arsch zu dick, 10% zu dünn, 5% finden ihn so ok, wie er ist und sind froh, dass sie ihn geheiratet haben...',
'Meine Freundin meint, ich wär neugierig...\n...zumindest\' steht das in ihrem Tagebuch.',
'"Schatz, Ich muss mein T-Shirt waschen! Welches Waschmaschinen Programm soll ich nehmen?" - "Was steht denn auf dem T-Shirt drauf?"\n "Slayer!"',
'Gestern erzählte ich meinem Freund, dass ich schon immer dieses Ding aus Harry Potter reiten wollte.\n"einen Besen?" "nein, Hermine."',
'Warum gehen Ameisen nicht in die Kirche?\nSie sind in Sekten.',
'Was steht auf dem Grabstein eines Mathematikers?\n"Damit hat er nicht gerechnet."',
'Wenn ein Yogalehrer seine Beine senkrecht nach oben streckt und dabei furzt, welche Yoga Figur stellt er da?\n Eine Duftkerze',
'Warum ging der Luftballon kaputt?\n Aus Platzgründen.',
'Ich wollte Spiderman anrufen, aber er hatte kein Netz und beim Bäcker war alles belegt.',
'Was vermisst eine Schraube am meisten? Einen Vater',
'Geht ein Panda über die Straße. Bam....Bus!',
'Unterhalten sich zwei Gletscher. Sagt der eine: "Was meinst du, was wird die Zukunft bringen?" Sagt der Andere: "Naja, wir werden Seen."',
'Wenn sich ein Professor ein Brot macht ist das dann wissenschaftlich belegt?',
'Knabbern zwei Männern an einer Eisenbahnschiene. Sagt der eine: "Ganz schön hart, oder?"\nSagt der andere: "Aber guck mal, da drübern ist ne Weiche"',
'Warum sind bei IKEA Pfeile auf dem Boden?\nWeil es ein Einrichtungshaus ist',
'Was macht die Security in der Nudelfabrik?\nDie Pasta auf.',
'Wie nennt man einen kleinwüchsigen Securitymenschen?\nSicherheitshalber',
'Habe bei Weight Watchers angerufen. Hat keiner abgenommen.\nDanach beim DJ. Hat aber aufgelegt.'
'Meine Schwester hat eine Tochter bekommen.\nDa wurde mein Wunsch nach einem Neffen zur Nichte gemacht.',
'Praktizieren sie Inzest?\n"Mitnichten"',
'Wann sinkt ein U-Boot?\nAm Tag der offenen Tür.',
'Auf St. Pauli wurde letztens ein Sarg gefunden. Er konnte aber nicht geöffnet werden, war ein Zuhälter drin!',
'Treffen sich zwei Anwälte. Fragt der eine "Na, wie geht\'s?" Antwortet der andere "Schlecht. Ich kann nicht klagen"',
'Treffen sich zwei Jäger. Beide tot.',
'Treffen sich zwei Päpste.',
'Treffen sich zwei Psychologen, sagt der eine: "Dir geht\'s gut, wie geht\'s mir?"',
'Treffen sich zwei Linksextreme in einer Bar, kommen drei Splittergruppen raus.',
'Was macht man mit nem Hund ohne Beine?\nUm die Häuser ziehen.',
'Wo findest du nen Hund ohne Beine?\nDa wo du ihn liegen lassen hast.',
'Was macht eine Bombe im Bordell?\nPuff',
'Und was macht eine Bombe im Treppenhaus?\nHochgehen',
'Wo war Lucy nach der Explosion?\nÜberall',
'Egal, wie dicht du bist. Göthe war dichter!',
'Egal, wie gut du fährst. Züge fahren Güter!',
'Egal, wie sauer du bist, Dinos sind Saurier!',
'Egal, wie leer du bist, es gibt Menschen die sind Lehrer.',
'Wissenschaftler haben herausgefunden\nund sind dann wieder reingegangen.',
'Was ist klein, braun, rund und sitzt hinter Gittern? Eine Knastanie.',
'Was liegt am Strand und kann nicht richtig reden? - Eine Nuschel!',
'Was ist grün und klopft an die Tür? - Klopfsalat',
'Was ist rot und steht am Straßenrand? Eine Hagenutte',
'Und was ist blau und steht am Wegesrand? Eine Frostituierte',
'Was ist rosa und schwimmt durchs Meer? Eine Meerjungsau.',
'Was ist braun und schwimmt auch im Meer? Ein U-Brot.',
'Was raucht und springt durch den Wald? Ein Kaminchen.',
'Was machen Bits am liebsten? Busfahren.',
'Warum ist der Programmierer in der Dusche gestorben? Auf der Flasche stand “einschäumen, ausspülen, wiederholen"',
'Wo gehen Datenspeicher hin, wenn sie sich prügeln wollen? In den Byte Club.\n Und Regel Nummer Eins: Ihr verliert kein dword über den Byte Club!',
'Wer wohnt im Dschungel und schummelt? Mogli',
'Geht ein Mann zum Arzt weil er sich schlecht fühlt. Sagt der Arzt: "Sie müssen mit dem Masturbieren aufhören!"\nSagt der Mann: "Wieso das denn?!".\nSagt der Arzt: "Ja, sonst kann ich Sie nicht untersuchen."',
'Wie heißt ein Spanier ohne Auto?\nCarlos',
'Wie nennt man ein Cowboy ohne Pferd?\nSattelschlepper',
'Kommt ein Cowboy aus dem Frisiersalon heraus\nPony weg',
'Wie nennt man einen Schäfer, der seine Schafe schlägt?\nMähdrescher',
'Was trinkt die Chefin?\nLeitungswasser',
'Vampir in der Verkehrskontrolle.\n"Haben Sie was getrunken?"\n"Ja, zwei Radler."',
'Wie nennt man jemanden, der DIN A4 Blätter scannt?\nScandinavier',
'Wie nennt man einen Europäer aus Lettland?\nEuropalette',
'Hab nem Hipster ins Bein geschossen\nJetzt hopster',
'Wie viel wiegt ein Influencer?\nEin Instagramm',
'Was ist gelb und kann nicht schwimmen?\nEin Bagger\nUnd warum nicht?\nHat nur einen Arm',
'Was hat ein Mann ohne Beine?\nErdnüsse',
'Welcher Vogel hat Darth Vader auf denn Kopf geschissen?\nDer Star wars',
'Wie heißt ein Veganer Russe?\nMooskauer',
'Was ist der Unterschied zwischen Grießbrei und einem Epileptiker?\nDer Grießbrei liegt in Zucker und Zimt, der Epileptiker liegt im Zimmer und zuckt.',
'Was macht ein Clown im Büro?\nFaxen',
'Was ist grūn und nuschelt im Gurkensalat?\nDill Schweiger',
'Was ist die Vergangenheitsform von Tomate? Passierte Tomate',
'Gehören abgetriebene Babys eigentlich auch zu den entfernen Verwandten?',
'Kommt ein Dachdecker in ne Bar\nDa sagt der Barkeeper: "Der geht aufs Haus!"',
'Was spricht man in der Sauna? Schwitzerdeutsch.',
'Was ist grün und wird auf Knopfdruck rot?\nEin Frosch im Mixer',
'Was ist weiß und fliegt über die Wiese?\nBiene Majo',
'Warum trinken Veganer kein Leitungswasser?\nWeil es aus dem Hahn kommt']
emojis = [':laughing:', ':smile:', ':joy:', ':sob:', ':rofl:']
msg = f'{random.choice(emojis)} {random.choice(puns)}'
await ctx.send(msg)
def setup(bot):
bot.add_cog(fun(bot))
| 55.956349 | 226 | 0.582654 |
e121641fdd16503ebb092e218a41471693799a5f | 3,362 | py | Python | src/service/plugins/ssrs/ssr.py | awesome-archive/ssrs | 29c6e02d08270b3d9ca2174f29d4d32733acfdb6 | [
"Apache-2.0"
] | 32 | 2018-05-09T06:08:34.000Z | 2022-02-18T14:21:23.000Z | src/service/plugins/ssrs/ssr.py | awesome-archive/ssrs | 29c6e02d08270b3d9ca2174f29d4d32733acfdb6 | [
"Apache-2.0"
] | 1 | 2019-08-08T07:24:31.000Z | 2019-08-08T07:24:31.000Z | src/service/plugins/ssrs/ssr.py | awesome-archive/ssrs | 29c6e02d08270b3d9ca2174f29d4d32733acfdb6 | [
"Apache-2.0"
] | 19 | 2018-08-02T08:11:05.000Z | 2021-07-07T02:10:18.000Z | # !/usr/bin/env python
# -*- coding: utf-8 -*-
import base64
import json
import copy
import socket
import subprocess
import six
class SSR:
class Service:
def __init__(self, conf):
self.conf = conf
def update(self, array):
self.conf.update(array)
def port_open(self, first=True):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
for i in range(2):
try:
s.connect(('127.0.0.1', int(self.conf['port'])))
s.shutdown(2)
return True
except socket.error:
if first:
self.restart()
return self.port_open(first=False)
return False
return False
def restart(self):
if 'restart' in self.conf:
return subprocess.call(self.conf['restart'], shell=True)
return
def get_data(self):
if not self.port_open():
return None
self.conf.pop('restart')
return base64.urlsafe_b64encode(json.dumps(self.conf).encode()).decode()
def __init__(self, conf, host, group, remarks, restart):
self.data_list = []
self.conf = conf
with open(self.conf, 'r') as f:
try:
self.config = json.load(f)
except Exception:
raise ValueError('SSR config is not json: %s' % conf)
self.host = host
self.group = group
self.remarks = remarks
self.restart = restart
def get_services(self):
if ('server_port' not in self.config or 'password' not in self.config) and 'port_password' not in self.config:
raise KeyError('SSR config is incorrect')
base_service = SSR.Service(
{
'host': self.host,
'protocol': self.config.get('protocol', 'origin'),
'protoparam': self.config.get('protocol_param', ''),
'method': self.config.get('method', 'none'),
'obfs': self.config.get('obfs', 'plain'),
'obfsparam': self.config.get('obfs_param', ''),
'remarks': self.remarks,
'group': self.group,
'restart': self.restart,
'password': '',
'port': 0
}
)
if 'port_password' in self.config:
for port, data in six.iteritems(self.config['port_password']):
service = copy.deepcopy(base_service)
if type(data) is str:
array = {'password': data}
elif type(data) is dict:
array = data
else:
continue
array['remarks'] = self.remarks + ('_%s' % str(port))
array['port'] = port
service.update(array)
url = service.get_data()
if url is None:
continue
self.data_list.append(url)
else:
base_service.update({'port': self.config['server_port'], 'password': self.config['password']})
url = base_service.get_data()
if url is not None:
self.data_list.append(url)
return self.data_list
| 34.659794 | 118 | 0.496133 |
e122eb0c0e3191c6ed28f670de3cb045fb8a32e8 | 1,866 | py | Python | asconnect/models/beta_detail.py | guojiubo/asconnect | 1c725dc2036f0617854f19b9a310a91c42239c72 | [
"MIT"
] | 14 | 2020-09-30T14:45:38.000Z | 2022-03-04T09:49:26.000Z | asconnect/models/beta_detail.py | guojiubo/asconnect | 1c725dc2036f0617854f19b9a310a91c42239c72 | [
"MIT"
] | 8 | 2020-09-30T14:50:18.000Z | 2022-01-25T06:18:20.000Z | asconnect/models/beta_detail.py | guojiubo/asconnect | 1c725dc2036f0617854f19b9a310a91c42239c72 | [
"MIT"
] | 7 | 2020-10-09T18:06:18.000Z | 2022-01-25T05:21:12.000Z | """Build beta detail models for the API"""
import enum
from typing import Dict, Optional
import deserialize
from asconnect.models.common import BaseAttributes, Links, Relationship, Resource
class ExternalBetaState(enum.Enum):
"""External beta state."""
PROCESSING = "PROCESSING"
PROCESSING_EXCEPTION = "PROCESSING_EXCEPTION"
MISSING_EXPORT_COMPLIANCE = "MISSING_EXPORT_COMPLIANCE"
READY_FOR_BETA_TESTING = "READY_FOR_BETA_TESTING"
IN_BETA_TESTING = "IN_BETA_TESTING"
EXPIRED = "EXPIRED"
READY_FOR_BETA_SUBMISSION = "READY_FOR_BETA_SUBMISSION"
IN_EXPORT_COMPLIANCE_REVIEW = "IN_EXPORT_COMPLIANCE_REVIEW"
WAITING_FOR_BETA_REVIEW = "WAITING_FOR_BETA_REVIEW"
IN_BETA_REVIEW = "IN_BETA_REVIEW"
BETA_REJECTED = "BETA_REJECTED"
BETA_APPROVED = "BETA_APPROVED"
class InternalBetaState(enum.Enum):
"""Internal beta state."""
PROCESSING = "PROCESSING"
PROCESSING_EXCEPTION = "PROCESSING_EXCEPTION"
MISSING_EXPORT_COMPLIANCE = "MISSING_EXPORT_COMPLIANCE"
READY_FOR_BETA_TESTING = "READY_FOR_BETA_TESTING"
IN_BETA_TESTING = "IN_BETA_TESTING"
EXPIRED = "EXPIRED"
IN_EXPORT_COMPLIANCE_REVIEW = "IN_EXPORT_COMPLIANCE_REVIEW"
@deserialize.key("identifier", "id")
class BuildBetaDetail(Resource):
"""Represents a build localization."""
@deserialize.key("auto_notify_enabled", "autoNotifyEnabled")
@deserialize.key("external_build_state", "externalBuildState")
@deserialize.key("internal_build_state", "internalBuildState")
class Attributes(BaseAttributes):
"""Represents beta build localization attributes."""
auto_notify_enabled: bool
external_build_state: ExternalBetaState
internal_build_state: InternalBetaState
identifier: str
attributes: Attributes
relationships: Optional[Dict[str, Relationship]]
links: Links
| 32.172414 | 81 | 0.758307 |
e126ebf5b69520889633dea016ffe4b49b9b61da | 922 | py | Python | code2.py | cskurdal/VRSurfing | 6d3dae816a59b5949cac29d60b05ed75616c97f9 | [
"MIT"
] | null | null | null | code2.py | cskurdal/VRSurfing | 6d3dae816a59b5949cac29d60b05ed75616c97f9 | [
"MIT"
] | null | null | null | code2.py | cskurdal/VRSurfing | 6d3dae816a59b5949cac29d60b05ed75616c97f9 | [
"MIT"
] | null | null | null | import math
from plotter import Plotter
from plots import LinePlot
import board
import digitalio
import busio
import adafruit_sdcard
import storage
from adafruit_bitmapsaver import save_pixels
def plot():
sines = list(math.sin(math.radians(x))
for x in range(0, 361, 4))
lineplot = LinePlot([sines],'MicroPlot line')
plotter = Plotter()
lineplot.plot(plotter)
def save():
spi = busio.SPI(board.SCK, MOSI=board.MOSI, MISO=board.MISO)
cs = digitalio.DigitalInOut(board.SD_CS)
sdcard = adafruit_sdcard.SDCard(spi, cs)
vfs = storage.VfsFat(sdcard)
storage.mount(vfs, "/sd")
save_pixels("/sd/screenshot2.bmp")
plot()
#save()
print('done')
#import jax.numpy as np
#
#def periodic_spikes(firing_periods, duration: int):
# return 0 == (1 + np.arange(duration))[:, None] % firing_periods
#
#
#periodic_spikes(5, 22)
| 23.641026 | 71 | 0.659436 |
e1284d4bbaf6bf582868bfb66265b4397932b66a | 385 | py | Python | scripts/compile-tests.py | PENGUINLIONG/liella | d0d4bc3e05419705712384b15d1c5db00ee12f73 | [
"Apache-2.0",
"MIT"
] | null | null | null | scripts/compile-tests.py | PENGUINLIONG/liella | d0d4bc3e05419705712384b15d1c5db00ee12f73 | [
"Apache-2.0",
"MIT"
] | null | null | null | scripts/compile-tests.py | PENGUINLIONG/liella | d0d4bc3e05419705712384b15d1c5db00ee12f73 | [
"Apache-2.0",
"MIT"
] | null | null | null | from os import listdir
import subprocess
for f in listdir("tests/vulkan"):
if f.endswith(".spv"):
continue
print(f"-- compiling test {f}")
p = subprocess.run(["glslangValidator", f"tests/vulkan/{f}", "-H", "-o", f"tests/vulkan/{f}.spv"], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if p.returncode != 0:
print(bytes.decode(p.stdout))
| 29.615385 | 162 | 0.644156 |
e1297aff417ed953bdd6f0365aac41a401e15814 | 5,769 | py | Python | collections/nemo_asr/nemo_asr/parts/dataset.py | petermartigny/NeMo | b20821e637314940e36b63d32c601c43d1b74051 | [
"Apache-2.0"
] | 1 | 2020-03-22T11:23:11.000Z | 2020-03-22T11:23:11.000Z | collections/nemo_asr/nemo_asr/parts/dataset.py | petermartigny/NeMo | b20821e637314940e36b63d32c601c43d1b74051 | [
"Apache-2.0"
] | null | null | null | collections/nemo_asr/nemo_asr/parts/dataset.py | petermartigny/NeMo | b20821e637314940e36b63d32c601c43d1b74051 | [
"Apache-2.0"
] | 1 | 2020-08-25T06:43:34.000Z | 2020-08-25T06:43:34.000Z | # Taken straight from Patter https://github.com/ryanleary/patter
# TODO: review, and copyright and fix/add comments
import torch
from torch.utils.data import Dataset
from .manifest import Manifest
def seq_collate_fn(batch):
def find_max_len(seq, index):
max_len = -1
for item in seq:
if item[index].size(0) > max_len:
max_len = item[index].size(0)
return max_len
batch_size = len(batch)
audio_signal, audio_lengths = None, None
if batch[0][0] is not None:
max_audio_len = find_max_len(batch, 0)
audio_signal = torch.zeros(batch_size, max_audio_len,
dtype=torch.float)
audio_lengths = []
for i, s in enumerate(batch):
audio_signal[i].narrow(0, 0, s[0].size(0)).copy_(s[0])
audio_lengths.append(s[1])
audio_lengths = torch.tensor(audio_lengths, dtype=torch.long)
max_transcript_len = find_max_len(batch, 2)
transcript = torch.zeros(batch_size, max_transcript_len, dtype=torch.long)
transcript_lengths = []
for i, s in enumerate(batch):
transcript[i].narrow(0, 0, s[2].size(0)).copy_(s[2])
transcript_lengths.append(s[3])
transcript_lengths = torch.tensor(transcript_lengths, dtype=torch.long)
return audio_signal, audio_lengths, transcript, transcript_lengths
def audio_seq_collate_fn(batch):
"""
collate a batch (iterable of (sample tensor, label tensor) tuples) into
properly shaped data tensors
:param batch:
:return: inputs (batch_size, num_features, seq_length), targets,
input_lengths, target_sizes
"""
# sort batch by descending sequence length (for packed sequences later)
batch.sort(key=lambda x: -x[0].size(0))
minibatch_size = len(batch)
# init tensors we need to return
inputs = torch.zeros(minibatch_size, batch[0][0].size(0))
input_lengths = torch.zeros(minibatch_size, dtype=torch.long)
target_sizes = torch.zeros(minibatch_size, dtype=torch.long)
targets = []
metadata = []
# iterate over minibatch to fill in tensors appropriately
for i, sample in enumerate(batch):
input_lengths[i] = sample[0].size(0)
inputs[i].narrow(0, 0, sample[0].size(0)).copy_(sample[0])
target_sizes[i] = len(sample[1])
targets.extend(sample[1])
metadata.append(sample[2])
targets = torch.tensor(targets, dtype=torch.long)
return inputs, targets, input_lengths, target_sizes, metadata
class AudioDataset(Dataset):
def __init__(self, manifest_filepath, labels, featurizer,
max_duration=None,
min_duration=None, max_utts=0, normalize=True,
trim=False, eos_id=None, logger=False, load_audio=True):
"""
Dataset that loads tensors via a json file containing paths to audio
files, transcripts, and durations
(in seconds). Each new line is a different sample. Example below:
{"audio_filepath": "/path/to/audio.wav", "text_filepath":
"/path/to/audio.txt", "duration": 23.147}
...
{"audio_filepath": "/path/to/audio.wav", "text": "the
transcription", offset": 301.75, "duration": 0.82, "utt":
"utterance_id",
"ctm_utt": "en_4156", "side": "A"}
Args:
manifest_filepath: Path to manifest json as described above. Can
be coma-separated paths.
labels: String containing all the possible characters to map to
featurizer: Initialized featurizer class that converts paths of
audio to feature tensors
max_duration: If audio exceeds this length, do not include in
dataset
min_duration: If audio is less than this length, do not include
in dataset
max_utts: Limit number of utterances
normalize: whether to normalize transcript text (default): True
eos_id: Id of end of sequence symbol to append if not None
load_audio: Boolean flag indicate whether do or not load audio
"""
m_paths = manifest_filepath.split(',')
self.manifest = Manifest(m_paths, labels,
max_duration=max_duration,
min_duration=min_duration, max_utts=max_utts,
normalize=normalize)
self.featurizer = featurizer
self.trim = trim
self.eos_id = eos_id
self.load_audio = load_audio
if logger:
logger.info(
"Dataset loaded with {0:.2f} hours. Filtered {1:.2f} "
"hours.".format(
self.manifest.duration / 3600,
self.manifest.filtered_duration / 3600))
def __getitem__(self, index):
sample = self.manifest[index]
if self.load_audio:
duration = sample['duration'] if 'duration' in sample else 0
offset = sample['offset'] if 'offset' in sample else 0
features = self.featurizer.process(sample['audio_filepath'],
offset=offset,
duration=duration,
trim=self.trim)
f, fl = features, torch.tensor(features.shape[0]).long()
# f = f / (torch.max(torch.abs(f)) + 1e-5)
else:
f, fl = None, None
t, tl = sample["transcript"], len(sample["transcript"])
if self.eos_id is not None:
t = t + [self.eos_id]
tl += 1
return \
f, fl, \
torch.tensor(t).long(), torch.tensor(tl).long()
def __len__(self):
return len(self.manifest)
| 39.244898 | 78 | 0.600277 |
e129ccfbd3be47531b273fb3289a20523a49c675 | 5,277 | py | Python | HandSComp.py | CRZaug/NonlinearWaves | 2adfc2cc5e0c18576c6b73420a913ef1ce23000d | [
"MIT"
] | null | null | null | HandSComp.py | CRZaug/NonlinearWaves | 2adfc2cc5e0c18576c6b73420a913ef1ce23000d | [
"MIT"
] | null | null | null | HandSComp.py | CRZaug/NonlinearWaves | 2adfc2cc5e0c18576c6b73420a913ef1ce23000d | [
"MIT"
] | null | null | null | """
~~~ IMPORT EXPERIMENTAL DATA, PROCESS, AND NONDIMENSIONALIZE ~~~
This code reads in the rescaled Snodgrass data and compares parameters
to known parameters found in the Henderson and Segur paper.
1. Get distances
2. Read in the gauge data for each event (get frequencies and Fourier magnitudes)
3. Adjust the y axis units
4. Get the k vector using integer division and clean up
5. Get the carrier wave location (requires some restricting)
6. Factor out carrier wave
7. Get the energies at each gauge
8. Get nondimensionalization constants
"""
import numpy as np
import os
import glob
import matplotlib.pyplot as plt
from numpy.fft import fft, ifft
import NLS
import random as rand
from scipy import interpolate
### STEP 1: Get distance information
distv = np.array([0.0,2400000.0,4200000.0,8700000.0]) # Distances between gauges in METERS
### STEP 2: Read in information at each gauge for each event
subdirs = ['Aug1Data','Aug2Data','JulyData']
# Define something that will list directories that are not hidden
def listdirNH(path):
return glob.glob(os.path.join(path, '*'))
# Read in the data
j = 0
for sd in subdirs:
files = listdirNH(sd+'/Rescaled')
# Initialize some values
n = 0
pi =0
fig1,ax1 = plt.subplots(4,1)
plt.suptitle(sd)
# Get files
Deltavals = []
for f in files:
datavals = np.transpose(np.loadtxt(f).view(float))
N = len(datavals[1])
x = datavals[0] # Frequencies
sly = datavals[1] # Magnitudes
#ly = np.sqrt(sly*x)*0.01 #MULTIPLY VERSION (get the amplitude in meters)
mns = []
for w in range(N-1):
mns.append(np.abs(x[w+1]-x[w]))
#mns.append(np.mean(mns))
### STEP 3: Adjust the y axis units
ly = np.sqrt(sly*np.mean(mns))*0.01 # INTEGRATED VERSION
### STEP 4: Get the k vector using integer division and clean up
L = 3600*3 # The period
k = (x*0.001)//(2*np.pi/L) # Convert to mHz, then divide by 2pi/L to get the k vector
# REMOVE DUPLICATE VALUES
ndk = np.array(())
for fi in range(len(k)):
num = k[fi]
if num not in ndk:
ndk = np.append(ndk,num)
lll =[]
for h in ndk:
l1=np.where(k==h)[0]
lll.append(l1)
ndy = np.array(())
for ar in lll:
val = np.mean(ly[ar])
ndy=np.append(ndy,val)
### STEP 5: Get the location of the carrier wave (defined by the first gauge)
if n == 0:
m = max(ndy)
i = np.where(ndy == m)
if len(i[0]) > 1:
newi = i[0][len(i[0])//2]
carriermode = np.array(newi)
carrierloc = ndk[carriermode]
else:
newi = i[0][0]
carriermode = np.array(newi)
carrierloc = ndk[carriermode]
# First, find the carrier mode in ANY file, not just the first one
loc = np.where(np.logical_and(ndk>carrierloc*0.99, ndk<carrierloc*1.001))
#loc = np.where(np.logical_and(ndk>carrierloc-1, ndk<carrierloc+1))
# Be a little more restrictive
if len(loc[0])>1:
loc = loc[0][0]
else:
loc = loc[0][0]
### STEP 6: Redefine the k vector so that the carrier mode is at 0 (factor it out)
knew = ndk-ndk[loc]
xnew = x-x[loc]
### STEP 7: Get the "Energy" integrals
fnc = interpolate.interp1d(x, sly,kind ='cubic')
longx = np.linspace(x[0],x[-1],1000)
newy = fnc(longx)
A0 = np.sqrt(2*NLS.trapezoid(newy,(x[-1]-x[0])))*0.01
figg,axx = plt.subplots()
axx.plot(x,sly,'.',markersize=7)
axx.plot(longx,newy)
plt.show()
M000 = NLS.trapezoid(newy[np.where(np.logical_and(longx>41.2,longx<75.6))],(74.6-41.2))
Deltavals.append(M000)
### STEP 8: Get nondimensionalization constants
g = 9.81 #(m/s^2)
if n==0:
w0 = (2*np.pi)**2/L*ndk[loc] # Get the value from the integer
k0 = w0**2/g # The carrier wavenumber
m = max(ndy)
epsilon = 2*m*k0 # The nondimensionalization constant epsilon
heps = A0*k0
print(f,'Special Values')
print('2A0',A0)
print('Maximum value',m)
print('Carrier frequency',w0)
print('Wavenumber',k0)
print('MY epsilon',epsilon)
print('HENDERSON EPSILON', heps)
print('period',L)
n = n+1
M0 = Deltavals[0]
MX = Deltavals/M0
energyy = np.log(MX)
# Get the fit and define a new y vector
A = np.vstack([distv, np.ones(len(distv))]).T
m, b = np.linalg.lstsq(A, energyy,rcond=-1)[0] # m is delta
hdeltab = -m
hdeltas = hdeltab/(2*heps**2*k0)
xplot = np.linspace(distv[0],distv[-1],100)
newy = m*xplot+b
print('HENDERSON BIG Delta ',hdeltab, 'b ', b)
print('HENDERSON LITTLE delta', hdeltas)
print()
| 28.370968 | 95 | 0.548607 |
e12ad429759f61a8d7e2d053224398fdfc9dad67 | 19 | py | Python | pkgs/conf-pkg/src/genie/libs/conf/rip/__init__.py | miott/genielibs | 6464642cdd67aa2367bdbb12561af4bb060e5e62 | [
"Apache-2.0"
] | 94 | 2018-04-30T20:29:15.000Z | 2022-03-29T13:40:31.000Z | pkgs/conf-pkg/src/genie/libs/conf/rip/__init__.py | miott/genielibs | 6464642cdd67aa2367bdbb12561af4bb060e5e62 | [
"Apache-2.0"
] | 67 | 2018-12-06T21:08:09.000Z | 2022-03-29T18:00:46.000Z | pkgs/conf-pkg/src/genie/libs/conf/rip/__init__.py | miott/genielibs | 6464642cdd67aa2367bdbb12561af4bb060e5e62 | [
"Apache-2.0"
] | 49 | 2018-06-29T18:59:03.000Z | 2022-03-10T02:07:59.000Z | from .rip import *
| 9.5 | 18 | 0.684211 |
e12b12da65a67d3755d54e62e2738980186e27db | 15,542 | py | Python | src/services/explorer/core.py | solomonricky/epic-awesome-gamer | a6ecff90a716bb145931bb4042f9510e68698694 | [
"Apache-2.0"
] | null | null | null | src/services/explorer/core.py | solomonricky/epic-awesome-gamer | a6ecff90a716bb145931bb4042f9510e68698694 | [
"Apache-2.0"
] | null | null | null | src/services/explorer/core.py | solomonricky/epic-awesome-gamer | a6ecff90a716bb145931bb4042f9510e68698694 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Time : 2022/1/17 15:20
# Author : QIN2DIM
# Github : https://github.com/QIN2DIM
# Description:
import os.path
import time
from hashlib import sha256
from typing import List, Optional, Union, Dict
import cloudscraper
import yaml
from lxml import etree # skipcq: BAN-B410 - Ignore credible sources
from selenium.common.exceptions import WebDriverException, InvalidCookieDomainException
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
from services.settings import DIR_EXPLORER, EPIC_EMAIL
from services.settings import logger
from services.utils import ToolBox, ChallengerContext, StandardContext
from .exceptions import DiscoveryTimeoutException, ProtocolOutdatedWarning
class EpicAwesomeExplorer:
"""游戏商店探索者 获取免费游戏数据以及促销信息"""
# 平台对象参数
URL_STORE_HOME = "https://store.epicgames.com/zh-CN/"
URL_FREE_GAMES = "https://store.epicgames.com/zh-CN/free-games"
URL_STORE_PREFIX = "https://store.epicgames.com/zh-CN/browse?"
URL_STORE_FREE_GAME = (
f"{URL_STORE_PREFIX}sortBy=releaseDate&sortDir=DESC&priceTier=tierFree&count=40"
)
URL_STORE_FREE_DLC = f"{URL_STORE_PREFIX}sortBy=releaseDate&sortDir=DESC&priceTier=tierFree&category=GameAddOn&count=40&start=0" # noqa
URL_PROMOTIONS = (
"https://store-site-backend-static.ak.epicgames.com/freeGamesPromotions?locale=zh-CN"
)
URL_PRODUCT_PAGE = "https://store.epicgames.com/zh-CN/p/"
def __init__(self, silence: bool = None):
self.silence = True if silence is None else silence
# 驱动参数
self.action_name = "AwesomeFreeGirl"
# 运行缓存
self.runtime_workspace = None
self.path_free_games = "ctx_store.yaml"
self.game_objs = {} # {index0:{name:value url:value}, }
self.category_details = {
"game": {"url": self.URL_STORE_FREE_GAME, "flag": "免费游戏"},
"dlc": {"url": self.URL_STORE_FREE_DLC, "flag": "免费附加内容"},
}
# 初始化工作空间
self._init_workspace()
def _init_workspace(self) -> None:
"""初始化工作目录 缓存游戏商店数据"""
self.runtime_workspace = "." if not os.path.exists(DIR_EXPLORER) else DIR_EXPLORER
self.path_free_games = os.path.join(self.runtime_workspace, self.path_free_games)
def _discovery_free_games(
self,
ctx: Union[ChallengerContext, StandardContext],
ctx_cookies: List[dict],
category: str = "game",
) -> None:
"""发现玩家所属地区可视的常驻免费游戏数据"""
url = self.category_details[category]["url"]
flag = self.category_details[category]["flag"]
# 重载玩家令牌
if ctx_cookies:
ctx.get(self.URL_STORE_FREE_GAME)
for cookie_dict in ctx_cookies:
try:
ctx.add_cookie(cookie_dict)
except InvalidCookieDomainException:
pass
_mode = "(深度搜索)" if ctx_cookies else "(广度搜索)"
logger.debug(
ToolBox.runtime_report(
motive="DISCOVERY",
action_name=self.action_name,
message=f"📡 正在为玩家搜集{flag}{_mode}...",
)
)
# 获取免费游戏链接
_start = time.time()
_url_store_free = url
while True:
ctx.get(_url_store_free)
time.sleep(1)
WebDriverWait(ctx, 10, ignored_exceptions=WebDriverException).until(
EC.presence_of_element_located(
(By.XPATH, "//section[@data-testid='section-wrapper']")
)
)
# 滑到底部
action = ActionChains(ctx)
action.send_keys(Keys.END)
action.perform()
# 判断异常跳转
if "tierFree" not in ctx.current_url:
break
if time.time() - _start > 80:
raise DiscoveryTimeoutException(f"获取{flag}链接超时")
# 断言最后一页
WebDriverWait(ctx, 5, ignored_exceptions=WebDriverException).until(
EC.element_to_be_clickable((By.XPATH, "//a[@data-component='PaginationItem']"))
)
page_switcher = ctx.find_elements(By.XPATH, "//a[@data-component='PaginationItem']")[-1]
# 提取价值信息
game_objs = ctx.find_elements(By.XPATH, "//a[@class='css-1jx3eyg']")
for game_obj in game_objs:
name = game_obj.get_attribute("aria-label")
url = game_obj.get_attribute("href")
self.game_objs.update(
{
self.game_objs.__len__(): {
"name": name.split(",")[0].replace("\n", "").strip(),
"url": url.strip(),
"in_library": None,
}
}
)
# 页面跳转判断
page_end = page_switcher.get_attribute("href")
if page_end in ctx.current_url:
break
# 更新跳转链接
_url_store_free = page_end
logger.success(
ToolBox.runtime_report(
motive="DISCOVERY",
action_name=self.action_name,
message=f"{flag}搜集完毕",
qsize=len(self.game_objs),
)
)
def stress_expressions(self, ctx: Union[ChallengerContext, StandardContext]) -> Dict[str, str]:
"""应力表达式的主要实现"""
logger.debug(
ToolBox.runtime_report(
motive="DISCOVERY", action_name=self.action_name, message="📡 使用应力表达式搜索周免游戏..."
)
)
# 访问链接 游戏名称
pending_games = {}
for i in range(2):
try:
ctx.get(self.URL_STORE_HOME)
time.sleep(3)
# 定位周免游戏的绝对位置
WebDriverWait(ctx, 45, ignored_exceptions=WebDriverException).until(
EC.presence_of_element_located((By.XPATH, "//a[contains(string(),'当前免费')]"))
)
# 周免游戏基本信息
stress_operator = ctx.find_elements(By.XPATH, "//a[contains(string(),'当前免费')]")
title_seq = ctx.find_elements(
By.XPATH,
"//a[contains(string(),'当前免费')]//span[@data-testid='offer-title-info-title']",
)
# 重组周免游戏信息
for index, _ in enumerate(stress_operator):
href = stress_operator[index].get_attribute("href")
try:
pending_games[href] = f"{title_seq[index].text}".strip()
except AttributeError as err:
if i == 0:
raise AttributeError from err
pending_games[href] = "null"
break
except (WebDriverException, AttributeError):
continue
return pending_games
class GameLibManager(EpicAwesomeExplorer):
"""游戏对象管理 缓存商城数据以及判断游戏在库状态"""
def __init__(self):
super().__init__()
self.action_name = "GameLibManager"
self.email = EPIC_EMAIL
self.auth_str = "explorer"
def _z(self) -> str:
return (
sha256(f"{self.email[-3::-2]}{self.auth_str}".encode("utf-8")).hexdigest()
if self.email
else ""
)
def _check_protocol(self):
"""
读取协议文件,检查协议头
:except ProtocolOutdatedWarning: 缓存文件异常,请返回空数据
:return: stream
"""
try:
with open(self.path_free_games, "r", encoding="utf8") as file:
ctx_protocol = yaml.safe_load(file)
except FileNotFoundError as err:
raise ProtocolOutdatedWarning from err
else:
if not ctx_protocol or not isinstance(ctx_protocol, dict):
raise ProtocolOutdatedWarning
return ctx_protocol
@staticmethod
def _update_status(game_objs, ctx_content=None, runtime: bool = False):
"""
更新实体信息
- case1 文件不存在 空文件 过时 协议。 跳过读取,初始化协议对象,创建并写入内容。
- case2 缓存协议镜像,扩建实体,并逐条比对更新实体信息 加入新增内容,更新 ``in_library`` 状态
:param game_objs: 详见 `Game Object`
:param ctx_content: 实际上就是上一次保存的 game_objs
:param runtime: 默认False,是否为运行时更新实例状态。
一个显然的认知是,``IF runtime is False`` 为全量更新,应进行全库实例比对
`` IF runtime is True `` 为局部更新,此时 new_objs 数量一般会远少于 memory_objs 的数量,
应在比对后将 new_objs 的缺省实例补回。
:type game_objs: List[Dict[str, str|bool]]
:type ctx_content: List[Dict[str, str|bool]]
:return:
"""
if not ctx_content:
return game_objs
runtime = bool(runtime)
new_objs_map = {game["url"]: game for game in game_objs}
memory_objs_map = {c["url"]: c for c in ctx_content}
new_content_objs = []
# 对新内容迭代会自动丢弃过期的周免实例
for flag in new_objs_map.keys():
# 添加上一轮未发现的新游戏
if not memory_objs_map.get(flag):
new_content_objs.append(new_objs_map[flag])
# 逐条对比 更新实体在库状态
else:
memory = memory_objs_map[flag]
new_obj = new_objs_map[flag]
if new_obj.get("in_library") is None:
new_obj["in_library"] = memory.get("in_library")
# 添加更新在库状态后的实例
new_content_objs.append(new_obj)
# 将 new_objs 的缺省实例补回
if runtime is True:
for flag in memory_objs_map.keys():
if not new_objs_map.get(flag):
new_content_objs.append(memory_objs_map[flag])
return new_content_objs
def save_game_objs(self, game_objs, category: str, runtime: Optional[bool] = None):
"""
缓存免费商城数据
:param runtime:
:param game_objs:
:param category:
:type game_objs: List[Dict[str, str|bool]]
:return:
"""
if not game_objs:
return
try:
ctx_protocol = self._check_protocol()
except ProtocolOutdatedWarning:
content = {self._z(): {category: game_objs}}
with open(self.path_free_games, "w", encoding="utf8") as file:
yaml.dump(content, file, allow_unicode=True)
logger.success(
ToolBox.runtime_report(
motive="SAVE",
action_name=self.action_name,
message="Cache Epic store information.",
)
)
else:
# 切换账号 键值补全
if not ctx_protocol.get(self._z()):
ctx_protocol[self._z()] = {}
# 键值比对更新
merged_content = self._update_status(
game_objs=game_objs,
ctx_content=ctx_protocol[self._z()].get(category),
runtime=runtime,
)
ctx_protocol[self._z()].update({category: merged_content})
# 缓存更新
with open(self.path_free_games, "w", encoding="utf8") as file:
yaml.dump(ctx_protocol, file, allow_unicode=True)
logger.success(
ToolBox.runtime_report(
motive="MERGE",
action_name=self.action_name,
message="Update Epic store information.",
)
)
def load_game_objs(self, category: str, only_url: bool = None):
"""
加载缓存在本地的免费游戏对象
:param category:
:param only_url:
:return:
"""
try:
ctx_protocol = self._check_protocol()
except ProtocolOutdatedWarning:
return []
else:
ctx_content = ctx_protocol.get(self._z(), {}).get(category, [])
if not ctx_content:
return []
if only_url is True:
return [obj["url"] for obj in ctx_content]
return ctx_content
@staticmethod
def is_my_game(ctx_cookies, page_link: str, pre_assert_content: bytes = None) -> Optional[dict]:
"""
判断游戏在库状态
:param pre_assert_content: 前置协同响应流,将耗时的网络请求操作前置,
封装成协程任务,而仅将此函数用于解析上游模块的静态返回值。
:param ctx_cookies:
:param page_link:
:type ctx_cookies: List[dict]|str
:return:
None 异常状态
True 跳过任务
False 继续任务
仅当返回值为 False 时可以继续任务,并可以进一步筛选掉 AjaxLoadingReject 目标。
"""
def response_wrapper(new_params: dict):
resp_ = {"assert": "", "status": None, "warning": ""}
resp_.update(new_params)
return resp_
# 模式匹配 --> 上下文呈递|重新握手
if pre_assert_content is None:
headers = {
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/100.0.4896.75 Safari/537.36 Edg/100.0.1185.36",
"cookie": ctx_cookies
if isinstance(ctx_cookies, str)
else ToolBox.transfer_cookies(ctx_cookies),
}
scraper = cloudscraper.create_scraper()
response = scraper.get(page_link, headers=headers)
content = response.content
else:
content = pre_assert_content
# 清洗页面的促销信息
tree = etree.HTML(content)
assert_obj = tree.xpath(
"//span[@data-component='PurchaseCTA']//span[@data-component='Message']"
)
# 模式匹配 --> 向下游呈递资源对象的状态
# 1. 剔除已在库的、付费的、未推出的资源;
# 2. 剔除被限制的免费资源;
# 3. 呈递待领取的免费资源;
# 🚧 异常状态 忽略尚未发布的游戏对象
if not assert_obj:
return response_wrapper({"assert": "AssertObjectNotFound", "status": None})
assert_message = assert_obj[0].text
# 🚧 跳过 `无法认领` 的日志信息
if assert_message in ["已在游戏库中", "已在库中", "立即购买", "购买", "即将推出"]:
return response_wrapper({"assert": assert_message, "status": True})
# 🚧 惰性加载,前置节点不处理动态加载元素
if assert_message in ["正在载入"]:
return response_wrapper({"assert": "AjaxLoadingReject", "status": False})
# 🍟 未领取的免费游戏
if assert_message in ["获取"]:
# 無遮挡警告 繼續任務
warning_obj = tree.xpath("//h1[@class='css-1gty6cv']//span")
if not warning_obj:
return response_wrapper({"assert": assert_message, "status": False})
# 成人内容可获取
warning_message = warning_obj[0].text
if warning_message in ["成人内容"]:
return response_wrapper(
{"assert": assert_message, "warning": warning_message, "status": False}
)
# 地区限制無法獲取
return response_wrapper(
{"assert": assert_message, "warning": warning_message, "status": None}
)
class _Game:
"""基础游戏对象"""
# 游戏名称
name: str = "《堡垒之夜》"
# 商城访问链接
url: str = "https://store.epicgames.com/zh-CN/p/fortnite"
# 在库情况 True在库 False不在 None不到啊(初始化状态)
in_library: bool = None
class _Dlc:
"""游戏附加内容对象"""
# 附加内容名称
name: str = "《消逝的光芒》-《求生之路 2》Weapon Pack"
# 商城访问链接
url: str = "https://store.epicgames.com/zh-CN/p/dying-light--left-4-dead-2-weapon-pack"
# 在库情况 True在库 False不在 None不到啊(初始化状态)
in_library: bool = None
class _Report:
"""消息推送|资源封装对象"""
# 资源名称
name: str = ""
# 资源链接
url: str = ""
# 执行结果
status: str = ""
| 33.786957 | 140 | 0.559452 |
e12b30211ce2a1a3e4ccda61c62066c6b101ba25 | 7,312 | py | Python | model/utils.py | Tiamat-Tech/VAENAR-TTS | 69b6b5be1ab5168cfd3c6ab902075638e76a3b8d | [
"MIT"
] | 62 | 2021-07-15T10:09:56.000Z | 2022-03-31T02:53:09.000Z | model/utils.py | Tiamat-Tech/VAENAR-TTS | 69b6b5be1ab5168cfd3c6ab902075638e76a3b8d | [
"MIT"
] | 3 | 2021-07-19T14:45:26.000Z | 2022-03-31T02:38:57.000Z | model/utils.py | Tiamat-Tech/VAENAR-TTS | 69b6b5be1ab5168cfd3c6ab902075638e76a3b8d | [
"MIT"
] | 10 | 2021-07-19T03:20:44.000Z | 2022-02-21T07:07:38.000Z | import torch
import torch.nn as nn
from torch.nn import functional as F
class LinearNorm(nn.Module):
def __init__(self, in_features, out_features, activation=None,
use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros'):
super(LinearNorm, self).__init__()
self.linear = nn.Linear(
in_features=in_features,
out_features=out_features,
bias=use_bias)
# init weight
if kernel_initializer == 'glorot_uniform':
nn.init.xavier_uniform_(self.linear.weight)
elif kernel_initializer == 'zeros':
nn.init.zeros_(self.linear.weight)
# init bias
if use_bias:
if bias_initializer == 'zeros':
nn.init.constant_(self.linear.bias, 0.0)
else:
raise NotImplementedError
self.activation = activation if activation is not None else nn.Identity()
def forward(self, x):
x = self.activation(self.linear(x))
return x
class ConvNorm(nn.Module):
def __init__(
self, in_channels, out_channels, kernel_size=1, stride=1,
padding=None, dilation=1, activation=None,
use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros'):
super(ConvNorm, self).__init__()
self.conv = nn.Conv1d(in_channels, out_channels, kernel_size=kernel_size,
stride=stride, padding=padding, dilation=dilation, bias=use_bias
)
# init weight
if kernel_initializer == 'glorot_uniform':
nn.init.xavier_uniform_(self.conv.weight)
elif kernel_initializer == 'zeros':
nn.init.zeros_(self.conv.weight)
# init bias
if use_bias:
if bias_initializer == 'zeros':
nn.init.constant_(self.conv.bias, 0.0)
def forward(self, signal):
conv_signal = self.conv(signal)
return conv_signal
class PreNet(nn.Module):
def __init__(self, in_features, units, drop_rate, activation):
super(PreNet, self).__init__()
self.dense1 = LinearNorm(
in_features, units, activation=activation)
self.dense2 = LinearNorm(
units, units, activation=activation)
self.dropout_layer = nn.Dropout(p=drop_rate)
def forward(self, inputs):
dense1_out = self.dense1(inputs)
dense1_out = self.dropout_layer(dense1_out)
dense2_out = self.dense2(dense1_out)
dense2_out = self.dropout_layer(dense2_out)
return dense2_out
class ConvPreNet(nn.Module):
def __init__(self, nconv, hidden, conv_kernel, drop_rate,
activation=nn.ReLU(), bn_before_act=True):
super(ConvPreNet, self).__init__()
self.conv_stack = nn.ModuleList(
[
Conv1D(in_channels=hidden, out_channels=hidden, kernel_size=conv_kernel, activation=activation,
drop_rate=drop_rate, bn_before_act=bn_before_act)
for i in range(nconv)
]
)
self.projection = LinearNorm(hidden, hidden)
def forward(self, inputs, mask=None):
conv_outs = inputs
for conv in self.conv_stack:
conv_outs = conv(conv_outs, mask)
projections = self.projection(conv_outs)
return projections
class FFN(nn.Module):
def __init__(self, in_features, hidden1, hidden2):
super(FFN, self).__init__()
self.dense1 = LinearNorm(in_features, hidden1, activation=nn.ReLU())
self.dense2 = LinearNorm(hidden1, hidden2, activation=None)
self.layer_norm = nn.LayerNorm(hidden2)
def forward(self, inputs, mask=None):
dense1_outs = self.dense1(inputs)
dense2_outs = self.dense2(dense1_outs)
outs = dense2_outs + inputs
outs = self.layer_norm(outs)
if mask is not None:
outs = outs.masked_fill(mask.unsqueeze(-1), 0.0)
return outs
class Conv1D(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, activation, drop_rate,
bn_before_act=False, strides=1):
super(Conv1D, self).__init__()
self.conv1d = ConvNorm(in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=strides,
padding=int((kernel_size - 1) / 2),
dilation=1,
activation=None)
self.activation = activation if activation is not None else nn.Identity()
self.bn = nn.BatchNorm1d(out_channels)
self.dropout = nn.Dropout(p=drop_rate)
self.bn_before_act = bn_before_act
def forward(self, inputs, mask=None):
conv_outs = inputs.contiguous().transpose(1, 2)
conv_outs = self.conv1d(conv_outs)
if self.bn_before_act:
conv_outs = self.bn(conv_outs)
conv_outs = self.activation(conv_outs)
else:
conv_outs = self.activation(conv_outs)
conv_outs = self.bn(conv_outs)
dropouts = self.dropout(conv_outs)
dropouts = dropouts.contiguous().transpose(1, 2)
if mask is not None:
dropouts = dropouts.masked_fill(mask.unsqueeze(-1), 0.0)
return dropouts
class PostNet(nn.Module):
def __init__(self, n_conv, hidden, conv_filters, conv_kernel,
drop_rate):
super(PostNet, self).__init__()
activations = [nn.Tanh()] * (n_conv - 1) + [nn.Identity()]
self.conv_stack = nn.ModuleList(
[
Conv1D(in_channels=hidden if i == 0 else conv_filters, out_channels=conv_filters,
kernel_size=conv_kernel,
activation=activations[i], drop_rate=drop_rate)
for i in range(n_conv)
]
)
def forward(self, inputs, mask=None):
conv_out = inputs
for conv in self.conv_stack:
conv_out = conv(conv_out, mask)
return conv_out
class PositionalEncoding(nn.Module):
def __init__(self):
super(PositionalEncoding, self).__init__()
@staticmethod
def positional_encoding(len, dim, device, step=1.):
"""
:param len: int scalar
:param dim: int scalar
:param device:
:param step:
:return: position embedding
"""
pos_mat = torch.tile(
(torch.arange(0, len, dtype=torch.float32, device=device) * step).unsqueeze(-1),
[1, dim])
dim_mat = torch.tile(
torch.arange(0, dim, dtype=torch.float32, device=device).unsqueeze(0),
[len, 1])
dim_mat_int = dim_mat.type(torch.int32)
pos_encoding = torch.where( # [time, dims]
torch.eq(torch.fmod(dim_mat_int, 2), 0),
torch.sin(pos_mat / torch.pow(10000., dim_mat / float(dim))),
torch.cos(pos_mat / torch.pow(10000., (dim_mat - 1) / float(dim))))
return pos_encoding
| 37.88601 | 112 | 0.583425 |
e12bf8233ff1f13a2dd46e4e371f37801c0e563f | 2,186 | py | Python | fedjax/legacy/core/federated_algorithm.py | alshedivat/fedjax | ff46ba9955f167160353d7be72f6f5e1febee32c | [
"Apache-2.0"
] | null | null | null | fedjax/legacy/core/federated_algorithm.py | alshedivat/fedjax | ff46ba9955f167160353d7be72f6f5e1febee32c | [
"Apache-2.0"
] | null | null | null | fedjax/legacy/core/federated_algorithm.py | alshedivat/fedjax | ff46ba9955f167160353d7be72f6f5e1febee32c | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interface definitions for federated algorithms."""
import abc
from typing import Generic, List, TypeVar
from fedjax.legacy.core.model import Model
from fedjax.legacy.core.typing import FederatedData
T = TypeVar('T')
class FederatedAlgorithm(Generic[T], metaclass=abc.ABCMeta):
"""Interface for federated algorithms.
This interface structures federated algorithms at the per round level.
Algorithm state contains any round specific parameters (e.g. model parameters)
that will be passed from round to round. This state is initialized by
`init_state` and passed as input into and returned as output from `run_round`.
We suggest defining state using `typing.NamedTuple` as this provides
immutability, type hinting, and works by default with JAX transformations.
Anything else that is static and doesn't update at each round can be defined
in the `__init__`.
The expected usage of FederatedAlgorithm is as follows:
```
algorithm = FederatedAlgorithm()
state = algorithm.init_state()
for i in range(num_rounds):
client_ids = sample_clients(i)
state = algorithm.run_round(state, client_ids)
```
"""
@abc.abstractproperty
def federated_data(self) -> FederatedData:
"""Returns federated data used in federated training."""
@abc.abstractproperty
def model(self) -> Model:
"""Returns model used in federated training."""
@abc.abstractmethod
def init_state(self) -> T:
"""Returns initial state of algorithm."""
@abc.abstractmethod
def run_round(self, state: T, client_ids: List[str]) -> T:
"""Runs one round of federated training."""
| 34.15625 | 80 | 0.747027 |
0100201d7067edc12b14792aa66df0f99a8f5f65 | 2,306 | py | Python | lib/galaxy/webapps/galaxy/services/jobs.py | itisAliRH/galaxy | b3b693ea0788f773442c8481472a87f43ccb10d7 | [
"CC-BY-3.0"
] | null | null | null | lib/galaxy/webapps/galaxy/services/jobs.py | itisAliRH/galaxy | b3b693ea0788f773442c8481472a87f43ccb10d7 | [
"CC-BY-3.0"
] | 6 | 2021-11-11T20:57:49.000Z | 2021-12-10T15:30:33.000Z | lib/galaxy/webapps/galaxy/services/jobs.py | itisAliRH/galaxy | b3b693ea0788f773442c8481472a87f43ccb10d7 | [
"CC-BY-3.0"
] | null | null | null | from enum import Enum
from typing import (
Any,
Dict,
)
from galaxy import (
exceptions,
model,
)
from galaxy.managers import hdas
from galaxy.managers.context import ProvidesUserContext
from galaxy.managers.jobs import (
JobManager,
JobSearch,
view_show_job,
)
from galaxy.schema.fields import EncodedDatabaseIdField
from galaxy.schema.schema import JobIndexQueryPayload
class JobIndexViewEnum(str, Enum):
collection = "collection"
admin_job_list = "admin_job_list"
class JobIndexPayload(JobIndexQueryPayload):
view: JobIndexViewEnum = JobIndexViewEnum.collection
class JobsService:
job_manager: JobManager
job_search: JobSearch
hda_manager: hdas.HDAManager
def __init__(
self,
job_manager: JobManager,
job_search: JobSearch,
hda_manager: hdas.HDAManager,
):
self.job_manager = job_manager
self.job_search = job_search
self.hda_manager = hda_manager
def show(
self,
trans: ProvidesUserContext,
id: EncodedDatabaseIdField,
full: bool = False,
) -> Dict[str, Any]:
id = trans.app.security.decode_id(id)
job = self.job_manager.get_accessible_job(trans, id)
return view_show_job(trans, job, bool(full))
def index(
self,
trans: ProvidesUserContext,
payload: JobIndexPayload,
):
security = trans.security
is_admin = trans.user_is_admin
if payload.view == JobIndexViewEnum.admin_job_list:
payload.user_details = True
user_details = payload.user_details
if payload.view == JobIndexViewEnum.admin_job_list and not is_admin:
raise exceptions.AdminRequiredException("Only admins can use the admin_job_list view")
query = self.job_manager.index_query(trans, payload)
out = []
view = payload.view
for job in query.yield_per(model.YIELD_PER_ROWS):
job_dict = job.to_dict(view, system_details=is_admin)
j = security.encode_all_ids(job_dict, True)
if view == JobIndexViewEnum.admin_job_list:
j["decoded_job_id"] = job.id
if user_details:
j["user_email"] = job.get_user_email()
out.append(j)
return out
| 28.469136 | 98 | 0.662186 |
0101f0c173a9caa73adb1fcaf5f05657435355f6 | 1,984 | py | Python | tests/deephub/trainer/test_early_stopping.py | deeplab-ai/deephub | b1d271436fab69cdfad14f19fa2e29c5338f18d6 | [
"Apache-2.0"
] | 8 | 2019-10-17T12:46:13.000Z | 2020-03-12T08:09:40.000Z | tests/deephub/trainer/test_early_stopping.py | deeplab-ai/deephub | b1d271436fab69cdfad14f19fa2e29c5338f18d6 | [
"Apache-2.0"
] | 12 | 2019-10-22T13:11:56.000Z | 2022-02-10T00:23:30.000Z | tests/deephub/trainer/test_early_stopping.py | deeplab-ai/deephub | b1d271436fab69cdfad14f19fa2e29c5338f18d6 | [
"Apache-2.0"
] | 1 | 2019-10-17T13:21:27.000Z | 2019-10-17T13:21:27.000Z | import pytest
import numpy as np
from deephub.models.registry.toy import DebugToyModel
from deephub.models.feeders import MemorySamplesFeeder
from deephub.trainer import Trainer
@pytest.mark.slow
def test_early_stopping(tmpdir):
model_params = {
'type': 'toy:DebugToyModel',
'model_dir': str(tmpdir / 'test_early_stopping/'),
'learning_rate': 0.01,
'num_classes': 2,
'num_steps_per_epoch': 1,
'hidden_neurons': 512
}
# Initialize the model
model = DebugToyModel(**model_params)
# Read training data
train_feeder = MemorySamplesFeeder(np.asarray(np.arange(0, 10).reshape(10, 1), dtype=np.float32),
np.array([int(0 if i < 5 else 1)
for i in range(10)], dtype=np.int64, ndmin=1),
batch_size=10, feed_as_dict=False)
# Read validation data
validation_feeder = MemorySamplesFeeder(np.asarray(np.arange(10, 20).reshape(10, 1), dtype=np.float32),
np.array([int(0 if i < 5 else 1)
for i in range(10)], dtype=np.int64, ndmin=1),
batch_size=10, feed_as_dict=False)
# Initialize the Trainer
trainer = Trainer()
train_params = {
'epochs': 12,
'save_summary_steps': 1,
'save_checkpoint_steps': 1,
'early_stopping_metric': 'loss',
'early_stopping_steps_without_decrease': 3,
'early_stopping_min_steps': 1,
'early_stopping_hook_run_every_steps': 1
}
# Start training process
trainer.train(model=model, train_feeder=train_feeder, eval_feeder=validation_feeder,
**train_params)
# Grab global step from model.estimator object
global_step = model.estimator().get_variable_value('global_step')
assert global_step < train_params['epochs']
| 36.072727 | 107 | 0.594758 |
0102028974c26fedb9d3e8e681861c033e610fbc | 2,157 | py | Python | tests/switchconfig/conftest.py | utsc-networking/utsc-tools | d5bc10cf825f1be46999d5a42da62cc0df456f0c | [
"MIT"
] | null | null | null | tests/switchconfig/conftest.py | utsc-networking/utsc-tools | d5bc10cf825f1be46999d5a42da62cc0df456f0c | [
"MIT"
] | null | null | null | tests/switchconfig/conftest.py | utsc-networking/utsc-tools | d5bc10cf825f1be46999d5a42da62cc0df456f0c | [
"MIT"
] | null | null | null | from typing import TYPE_CHECKING
import pytest
from . import CapturedOutput
from utsc.switchconfig import config
from prompt_toolkit.application import create_app_session
from prompt_toolkit.input import create_pipe_input
if TYPE_CHECKING:
from .. import MockedUtil
from pytest_mock import MockerFixture
@pytest.fixture()
def mock_config(mocker: "MockerFixture", mock_util: "MockedUtil"):
mocker.patch.object(config, "util", mock_util)
return config
@pytest.fixture(scope="function")
def mock_pt_app():
pipe_input = create_pipe_input()
pipe_output = CapturedOutput()
try:
with create_app_session(input=pipe_input, output=pipe_output) as app:
yield app
finally:
pipe_input.close()
# endregion
# region failed interactive fixture experiment
# def pytest_addoption(parser):
# parser.addoption(
# "--interactive", action="store_true", default=False, help="run interactive tests"
# )
# @pytest.fixture()
# def interactive(request, capfd: 'CaptureFixture'):
# if request.config.getoption("--interactive") or os.getenv("VSCODE_DEBUGGER"):
# # here we reach directly into capsys._capture,
# # because the capsys.disabled context manager
# # does not suspend capturing of stdin.
# capmanager: 'CaptureManager' = capfd.request.config.pluginmanager.getplugin("capturemanager")
# capmanager.suspend(in_=True)
# assert capfd._capture # noqa
# capfd._capture.suspend_capturing(in_=True) # noqa
# yield
# capmanager.resume()
# capfd._capture.resume_capturing() # noqa
# else:
# pytest.skip("This test can only be run with the --interactive option")
# def pytest_collection_modifyitems(config, items):
# if config.getoption("--interactive"):
# # --interactive given in cli: do not skip interactive tests
# return
# skip_interactive = pytest.mark.skip(reason="need --interactive option to run")
# for item in items:
# if "interactive" in item.keywords and not os.getenv("VSCODE_DEBUGGER"):
# item.add_marker(skip_interactive)
# endregion
| 31.26087 | 103 | 0.696801 |
0104208de3be81be65db916a9965b3d5c0b060ef | 10,742 | py | Python | hf/protocol/frame.py | HashFast/hashfast-tools | 9617691ac997f12085b688c3ecc6746e8510976d | [
"BSD-3-Clause"
] | 1 | 2020-12-15T02:49:36.000Z | 2020-12-15T02:49:36.000Z | hf/protocol/frame.py | HashFast/hashfast-tools | 9617691ac997f12085b688c3ecc6746e8510976d | [
"BSD-3-Clause"
] | null | null | null | hf/protocol/frame.py | HashFast/hashfast-tools | 9617691ac997f12085b688c3ecc6746e8510976d | [
"BSD-3-Clause"
] | 3 | 2015-09-02T00:31:06.000Z | 2020-12-15T02:52:06.000Z | # Copyright (c) 2014, HashFast Technologies LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of HashFast Technologies LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL HASHFAST TECHNOLOGIES LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from abc import ABCMeta, abstractmethod
from ..load import crc
from ..util import with_metaclass, int_to_lebytes, lebytes_to_int
# Operation codes from hf_protocol.h.
opcodes = {
# Serial protocol operation codes (Second header byte)
'OP_NULL': 0,
'OP_ROOT': 1,
'OP_RESET': 2,
'OP_PLL_CONFIG': 3,
'OP_ADDRESS': 4,
'OP_READDRESS': 5,
'OP_HIGHEST': 6,
'OP_BAUD': 7,
'OP_UNROOT': 8,
'OP_HASH': 9,
'OP_NONCE': 10,
'OP_ABORT': 11,
'OP_STATUS': 12,
'OP_GPIO': 13,
'OP_CONFIG': 14,
'OP_STATISTICS': 15,
'OP_GROUP': 16,
'OP_CLOCKGATE': 17,
# Factory Codes
'OP_SERIAL': 50, # Serial number read/write
'OP_LIMITS': 51, # Operational limits read/write
'OP_HISTORY': 52, # Read operational history data
'OP_CHARACTERIZE': 53, # Characterize one or more die
'OP_CHAR_RESULT': 54, # Characterization result
'OP_SETTINGS': 55, # Read or write settings
'OP_FAN_SETTINGS': 56,
'OP_POWER': 57,
'OP_BAD_CORE': 58, # Set or clear bad core status
# USB interface specific operation codes
'OP_USB_INIT': 128, # Initialize USB interface details
'OP_GET_TRACE': 129, # Send back the trace buffer if present
'OP_LOOPBACK_USB': 130,
'OP_LOOPBACK_UART': 131,
'OP_DFU': 132, # Jump into the boot loader
'OP_USB_SHUTDOWN': 133, # Initialize USB interface details
'OP_DIE_STATUS': 134, # Die status. There are 4 die per ASIC
'OP_GWQ_STATUS': 135, # Global Work Queue protocol status
'OP_WORK_RESTART': 136, # Stratum work restart regime
'OP_USB_STATS1': 137, # Statistics class 1
'OP_USB_GWQSTATS': 138, # GWQ protocol statistics
'OP_USB_NOTICE': 139, # Asynchronous notification event
'OP_PING': 140, # Echo
'OP_CORE_MAP': 141, # Return core map
'OP_VERSION': 142, # Version information
'OP_FAN': 143, # Set Fan Speed
'OP_NAME': 144, # System name write/read
'OP_USB_DEBUG': 255
}
opnames = {}
for opcode_name, opcode in opcodes.items():
assert opcode not in opnames
opnames[opcode] = opcode_name
known_opcodes = set(opcodes.keys())
known_opnames = set(opnames.keys())
def check_framebytes(framebytes):
assert {x >= 0 and x < 256 for x in framebytes} == set([True])
assert len(framebytes) >= 8
assert framebytes[0] == 0xaa
assert framebytes[7] == crc.crc8(framebytes[1:7])
if framebytes[6] == 0:
assert len(framebytes) == 8
else:
data_length = 4 * framebytes[6]
# Eight byte frame header, data, plus 4 crc32 bytes.
# Fix: Restore when using serial line directly
# expected_framebytes_length = 8 + data_length + 4
expected_framebytes_length = 8 + data_length
assert expected_framebytes_length == len(framebytes)
data = framebytes[8:8+data_length]
# Fix: Restore when using serial line directly
# crc32 = framebytes[-4:]
# if crc32 != crc.crc32_to_bytelist(crc.crc32(data)):
# raise HF_Error("Bad CRC32 checksum.")
class hf_frame_data(with_metaclass(ABCMeta, object)):
def __init__(self, bytes=None):
self.initialize()
if bytes is None:
self.generate_frame_data()
else:
self.parse_frame_data(bytes)
@abstractmethod
def initialize(self):
pass
@abstractmethod
def parse_frame_data(self, bytes):
pass
@abstractmethod
def generate_frame_data(self):
pass
class hf_frame_data_base(hf_frame_data):
LENGTH = 0
def intialize(self):
pass
def parse_frame_data(self, bytes):
assert len(bytes) >= self.LENGTH
def generate_frame_data(self):
self.frame_data = [0x00]
return self.frame_data
# Fix: Document terminology: frame is the whole thing and consists of up to
# three parts: the header, the data, and the CRC32 checksum.
# Fix: Wants to verify checksums and throw exception if they are not right.
# And check for 0xaa.
# Fix: Wants to make all the fields of the header accessible, but also provide raw bytes.
# Fix: Should be able to initialize with stream of bytes or by filling in fields
# and asking for the bytes. Throw exception if field values are out of bounds.
# Fix: Maybe want something which checks for known opcode and whether fields are
# plausible for that opcode -- problem is that if we are using this to report
# what was seen on the wire, we need to make those assumptions, maybe.
# Fix: The really pure way to do this is to create a subclass for every opcode type
# and then have specific methods for that type. Probably more trouble than
# its worth, but it would also let us have specific methods for parameters
# that just occupy a couple bits.
class HF_Frame():
def __init__(self, initial_state):
self.initialize()
if initial_state is None:
pass
elif isinstance(initial_state, list):
self.off_the_wire(initial_state)
elif isinstance(initial_state, dict):
self.buildframe(initial_state)
else:
raise HF_Error("Argument type not supported: %s" % (inital_state))
def initialize(self):
self.framebytes = []
self.operation_code = None
self.chip_address = 0
self.core_address = 0
self.hdata = 0
self.data_length_field = 0
self.crc8 = 0
self.data = None
# Fix: Restore when using serial line directly
# self.crc32 = None
self.data_length = 0;
def off_the_wire(self, framebytes):
check_framebytes(framebytes)
self.framebytes = framebytes
self.operation_code = framebytes[1]
self.chip_address = framebytes[2]
self.core_address = framebytes[3]
self.hdata = lebytes_to_int(framebytes[4:6])
self.data_length_field = framebytes[6]
self.data_length = 4 * self.data_length_field
self.crc8 = framebytes[7]
if self.data_length > 0:
assert {x >= 0 and x < 256 for x in framebytes} == set([True])
self.data = framebytes[8:8+self.data_length]
# Fix: Restore when using serial line directly
# self.crc32 = framebytes[8+self.data_length:]
def set_data(self, data):
self.data = data
self.data_length = len(data)
self.data_length_field = int(self.data_length / 4)
# Fix: Restore when using serial line directly
# self.crc32 = crc.crc32(self.data)
def construct_framebytes(self):
crc8_input = [self.operation_code]
crc8_input += [self.chip_address]
crc8_input += [self.core_address]
crc8_input += int_to_lebytes(self.hdata, 2)
crc8_input += [self.data_length_field]
self.crc8 = crc.crc8(crc8_input)
frameheader = [0xaa, self.operation_code, self.chip_address, self.core_address] + \
int_to_lebytes(self.hdata, 2) + [self.data_length_field, self.crc8]
if self.data_length > 0:
# Fix: Restore when using serial line directly
# return frameheader + self.data + crc.crc32_to_bytelist(self.crc32)
self.framebytes = frameheader + self.data
else:
self.framebytes = frameheader
return self.framebytes
def buildframe(self, framedict):
legal_fields = set(['operation_code', 'chip_address', 'core_address', 'hdata', 'data'])
received_fields = set(framedict.keys())
assert received_fields.issubset(legal_fields)
assert 'operation_code' in framedict
assert framedict['operation_code'] in opnames
self.operation_code = framedict['operation_code']
if 'chip_address' in framedict:
if framedict['chip_address'] < 0 or framedict['chip_address'] > 255:
raise HF_Error("chip_address is out of range: %d" % (framedict['chip_address']))
self.chip_address = framedict['chip_address']
if 'core_address' in framedict:
if framedict['core_address'] < 0 or framedict['core_address'] > 255:
raise HF_Error("core_address is out of range: %d" % (framedict['core_address']))
self.core_address = framedict['core_address']
if 'hdata' in framedict:
if framedict['hdata'] < 0 or framedict['hdata'] > 65535:
raise HF_Error("hdata is out of range: %d" % (framedict['hdata']))
self.hdata = framedict['hdata']
if 'data' in framedict:
assert len(framedict['data']) == 0 or {x >= 0 and x < 256 for x in framedict['data']} == set([True])
assert len(framedict['data']) <= 1020 and len(framedict['data']) % 4 == 0
if len(framedict['data']) > 0:
self.set_data(framedict['data'])
return self.construct_framebytes()
def __str__(self):
string = ""
#string += "framebytes: {}\n".format(self.framebytes)
string += "operation_code: {:#x}\n".format(self.operation_code)
string += "chip_address: {:#x}\n".format(self.chip_address)
string += "core_address: {:#x}\n".format(self.core_address)
string += "hdata: {:#x}\n".format(self.hdata)
string += "data_length_field: {}\n".format(self.data_length)
#string += "data: {}\n".format(self.data)
return string | 41.474903 | 106 | 0.67129 |
01044352dba301fc4c0e8b880755aef7cda79a1f | 561 | py | Python | page/models.py | Dynamicist-handa/EscuelaLingua | 198abfcc14204d8ecd2706f2de2650293219662e | [
"Apache-2.0"
] | null | null | null | page/models.py | Dynamicist-handa/EscuelaLingua | 198abfcc14204d8ecd2706f2de2650293219662e | [
"Apache-2.0"
] | null | null | null | page/models.py | Dynamicist-handa/EscuelaLingua | 198abfcc14204d8ecd2706f2de2650293219662e | [
"Apache-2.0"
] | null | null | null | from django.db import models
from django.conf import settings
from courses.models import Course
# Create your models here.
class Update(models.Model):
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
null=True
)
course = models.ForeignKey(Course, on_delete=models.CASCADE)
update_title = models.CharField(max_length=200)
update_description = models.TextField()
#course_icon = models.CharField(max_length=200)
update_created_datetime = models.DateTimeField(auto_now_add=True)
| 26.714286 | 69 | 0.741533 |
010633c8fe4a1f8f50f2cbc160f034fdd91b60e5 | 11,162 | py | Python | src/command.py | 2minchul/chip-helper | 437d33938a19bab7e7380ff9dd0e7e98ec26fdb7 | [
"Apache-2.0"
] | 2 | 2020-05-12T06:11:39.000Z | 2020-07-17T10:45:20.000Z | src/command.py | 2minchul/chip-helper | 437d33938a19bab7e7380ff9dd0e7e98ec26fdb7 | [
"Apache-2.0"
] | 3 | 2021-06-08T21:30:59.000Z | 2022-03-12T00:28:26.000Z | src/command.py | 2minchul/chip-helper | 437d33938a19bab7e7380ff9dd0e7e98ec26fdb7 | [
"Apache-2.0"
] | null | null | null | import argparse
import os
import re
import sys
from operator import itemgetter
from typing import Optional
import sentry_sdk
import youtube_dl
from selenium.common.exceptions import SessionNotCreatedException
from cmd_tool import (
get_execution_path,
exit_enter,
get_input_path_or_exit,
get_chrome_driver_path_or_exit,
get_resource_path,
cd
)
from imagetools import Size
from qrcode import NaverQrCode, make_qr_image, make_redirect_html
from thumbnail import composite_thumbnail, capture_video
from youtube_uploader import YoutubeUploader, YoutubeUploaderException
sentry_sdk.init("https://1ff694f9169a4fa383a867fe10ed9329@o342398.ingest.sentry.io/5243685")
def make_dirs():
os.chdir(get_execution_path())
print(os.path.abspath(os.curdir))
print('폴더 생성기')
print('존재하는 폴더는 건너뜀')
path = input('생성할 경로: ')
if not os.path.isdir(path):
print('없는 경로입니다')
exit_enter(1)
s, e = map(int, (input('시작할 숫자: '), input('끝나는 숫자: ')))
for i in range(s, e + 1):
os.makedirs(os.path.join(path, f'{i:04}'), exist_ok=True)
print('완료')
exit_enter()
def make_thumbnail():
input_path = os.path.join(get_execution_path(), 'input')
for cur_dir, _, files in os.walk(input_path):
dir_name = os.path.basename(cur_dir)
def _is_mp4(filename):
_, ext = os.path.splitext(filename)
return ext == '.mp4'
mp4_files = tuple(filter(_is_mp4, files))
if 1 < len(mp4_files):
print(f'pass: "{dir_name}" 안에 한개 이상의 mp4 파일이 존재합니다')
if not dir_name.isnumeric():
print(f'skip: "{dir_name}" 는 숫자로 구성된 폴더이름이 아닙니다')
continue
idx_text = f'{int(dir_name):04}'
mp4_filename = mp4_files[0]
jpg_filename = f'{idx_text}.jpg'
jpg_filepath = os.path.join(cur_dir, jpg_filename)
print(f'capture:\t{mp4_filename} to {jpg_filename}')
capture_video(os.path.join(cur_dir, mp4_filename), jpg_filepath)
target_filename = f'p{idx_text}.jpg'
print(f'composite:\t{jpg_filename} to {target_filename} ...')
composite_thumbnail(jpg_filepath, os.path.join(cur_dir, target_filename))
print('완료되었습니다!')
exit_enter()
def upload_videos():
path = get_execution_path()
chrome_driver_path = get_chrome_driver_path_or_exit()
input_path = get_input_path_or_exit()
cookie_path = os.path.join(get_execution_path(), 'cookies.txt')
if not os.path.isfile(cookie_path):
print('최상위 폴더에 cookies.txt 를 작성해야 합니다')
exit_enter(1)
uploader = None
video_dirs = {}
to_upload = {}
for cur_dir, _, files in os.walk(input_path):
dir_name = os.path.basename(cur_dir)
video_path = video_name = thumbnail_path = None
if 1 < len(tuple(filter(lambda s: s.endswith('.mp4'), files))):
print(f'"{cur_dir}" 에 여러개의 .mp4 파일이 존재합니다!')
continue
for filename in files:
if filename == 'youtube_url.txt':
video_path = thumbnail_path = None
print(f'already uploaded: {dir_name}')
break
current_video_name, ext = os.path.splitext(filename)
if ext == '.mp4':
if not dir_name.isnumeric():
print(f'skip: "{dir_name}" 는 숫자로 구성된 폴더이름이 아닙니다')
break
video_name = f'{int(dir_name):04}'
video_path = os.path.join(cur_dir, f'{video_name}.mp4')
if current_video_name != video_name:
print(f'rename "{filename}" to "{video_name}.mp4"')
os.rename(os.path.join(cur_dir, filename), video_path)
video_dirs[video_name] = cur_dir
elif ext == '.jpg' and re.match(r'^\d+[.]jpg$', filename):
thumbnail_path = os.path.join(cur_dir, filename)
if not (video_path and thumbnail_path):
continue
to_upload[int(dir_name)] = (video_name, video_path, thumbnail_path)
for dir_number, (video_name, video_path, thumbnail_path) in sorted(to_upload.items(), key=lambda e: e[0]):
# if not uploader:
uploader = YoutubeUploader()
try:
my_channel_id = uploader.init(chrome_driver_path, cookie_path)
except SessionNotCreatedException as e:
print(e)
print('컴퓨터에 설치된 chrome 과 chromedriver 의 버전이 일치하지 않습니다.')
print('https://chromedriver.chromium.org/downloads 에서 다시 chromedriver 를 다운로드 해주세요.')
break
with open(os.path.join(path, '.mychannelid'), 'w') as f:
f.write(my_channel_id)
print(f'uploading {video_name}')
try:
if uploader.upload_video(video_path, thumbnail_path):
print(f'success: {video_name}')
else:
print(f'failure: {video_name}')
except YoutubeUploaderException as e:
print(e)
print(f'failure: {video_name}')
try:
uploader.browser.close()
except:
pass
print('모든 업로드 작업을 마쳤습니다.')
exit_enter()
def update_youtube_urls(my_channel_id=None):
path = get_execution_path()
input_path = get_input_path_or_exit()
cookie_path = os.path.join(get_execution_path(), 'cookies.txt')
if not my_channel_id:
mychannelid_path = os.path.join(path, '.mychannelid')
if os.path.isfile(mychannelid_path):
with open(mychannelid_path, 'r') as f:
my_channel_id = f.read()
else:
print('youtube upload 를 먼저 실행해주세요')
exit_enter(1)
yn = input('기존에 존재하는 youtube_url.txt 도 덮어쓰시겠습니까? [y/n]: ')
overwrite = yn == 'y'
video_dirs = {}
for cur_dir, _, files in os.walk(input_path):
if not overwrite and os.path.isfile(os.path.join(cur_dir, 'youtube_url.txt')):
continue
dir_name = os.path.basename(cur_dir)
for filename in files:
name, ext = os.path.splitext(filename)
if ext == '.mp4' and dir_name.isnumeric():
video_dirs[name] = cur_dir
yt = youtube_dl.YoutubeDL(dict(cookiefile=cookie_path))
my_channel_playlist = yt.extract_info(
f'https://www.youtube.com/channel/{my_channel_id}', download=False, process=False
).get('url')
is_created = False
video_urls = {}
for video in yt.extract_info(my_channel_playlist, download=False, process=False).get('entries'):
title = video['title']
if title.isnumeric() and video_dirs.get(title):
is_created = True
video_urls[int(title)] = (title, f"https://www.youtube.com/watch?v={video['id']}")
if not is_created:
print('새로 업로드 된 동영상이 없거나, 아직 업로드가 완전히 완료되지 않았습니다.')
print('잠시 후 다시 시도해주세요.')
else:
for _, (title, url) in sorted(video_urls.items(), key=itemgetter(0)):
print(f'make youtube_url.txt: {title}')
with open(os.path.join(video_dirs[title], 'youtube_url.txt'), 'w') as f:
f.write(url)
exit_enter()
def qrcode():
input_path = get_input_path_or_exit()
chrome_driver_path = get_chrome_driver_path_or_exit()
resource_path = get_resource_path()
if not os.path.isfile(os.path.join(resource_path, 'DXGulimB-KSCpc-EUC-H.ttf')):
print('폰트 파일을 찾을 수 없습니다.')
print('DXGulimB-KSCpc-EUC-H.ttf 파일을 "font/" 안에 넣어주세요!')
exit_enter(1)
naver_qr: Optional[NaverQrCode] = None
def walk_dir():
walk_dirs = {}
for cur_dir, dirs, files in os.walk(input_path):
dir_name = os.path.basename(cur_dir)
if not dir_name.isnumeric():
continue
if 'youtube_url.txt' not in files:
continue
if 'qrcode.html' in files:
print(f'already created: {dir_name}')
continue
walk_dirs[int(dir_name)] = (cur_dir, dirs, files)
return (v for k, v in sorted(walk_dirs.items(), key=itemgetter(0)))
for cur_dir, _, files in walk_dir():
dir_name = os.path.basename(cur_dir)
idx = int(dir_name)
idx_text = f'{idx:04}'
with open(os.path.join(cur_dir, 'youtube_url.txt'), 'r') as f:
youtube_url = f.read()
if not naver_qr:
naver_qr = NaverQrCode()
naver_qr.init(chrome_driver_path)
print('waiting login ...')
naver_qr.login()
print('login success')
qr_data = naver_qr.create_qr(idx_text, youtube_url).get('QRCodeData', {})
qr_url = qr_data.get('qrCodeUrl')
qr_id = qr_data.get('qrcdNo')
if not qr_url:
print(f'{idx_text}: QR CODE 생성에 실패했습니다')
continue
with cd(resource_path):
print(f'creating "{idx_text}.png"')
image = make_qr_image(Size(591, 738), qr_url, idx) # 5cm x 6.25cm (300dpi)
with open(os.path.join(cur_dir, f'{idx_text}.png'), 'wb') as f:
image.save(f, format='PNG', dpi=(300, 300))
make_redirect_html(
os.path.join(cur_dir, 'qrcode.html'),
f'https://qr.naver.com/code/updateForm.nhn?qrcdNo={qr_id}'
)
if naver_qr:
naver_qr.visit_admin_page()
print('모든 작업이 끝났습니다.')
input('press enter to exit...')
if naver_qr:
naver_qr.close()
sys.exit(0)
def organize():
input_path = get_input_path_or_exit()
for filename in os.listdir(input_path):
filepath = os.path.join(input_path, filename)
if not os.path.isfile(filepath) or 'README.txt' == filename:
continue
name, _ = os.path.splitext(filename)
if not name.isnumeric():
print(f'pass: "{filename}" 은 숫자로 이루어진 이름이 아닙니다')
continue
dir_path = os.path.join(input_path, f'{int(name)}')
os.makedirs(dir_path, exist_ok=True)
try:
os.rename(filepath, os.path.join(dir_path, filename))
print(f'move "{filename}" to "{int(name)}/{filename}"')
except Exception as e:
print(f'"{filename}" 을 옮기는데 실패하였습니다: {e}')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Chip Helper')
subparsers = parser.add_subparsers(help='commands', dest='command', required=True)
subparsers.add_parser('makedirs', help='Create dirs like "nnnn" format in a specific path')
subparsers.add_parser('organize', help='Create numeric dirs and move video files in it')
subparsers.add_parser('thumbnail', help='Create thumbnails')
subparsers.add_parser('upload', help='Upload videos to youtube')
subparsers.add_parser('youtube-url', help='Make youtube_url.txt in input dirs')
subparsers.add_parser('qrcode', help='Generate Naver QR and composite qr image')
args = parser.parse_args()
func = {
'makedirs': make_dirs,
'thumbnail': make_thumbnail,
'upload': upload_videos,
'youtube-url': update_youtube_urls,
'qrcode': qrcode,
'organize': organize,
}.get(args.command)
func()
print('모든 작업이 완료되었습니다.')
exit_enter()
| 34.450617 | 110 | 0.610554 |
0108aa0614cb046c0695b8425a9b7d179e4c447f | 1,338 | py | Python | code/get.py | tionn/holo-at-on | 8bda6e73d94184fa6fde3c1d26640e96341ae2a2 | [
"CC0-1.0"
] | null | null | null | code/get.py | tionn/holo-at-on | 8bda6e73d94184fa6fde3c1d26640e96341ae2a2 | [
"CC0-1.0"
] | null | null | null | code/get.py | tionn/holo-at-on | 8bda6e73d94184fa6fde3c1d26640e96341ae2a2 | [
"CC0-1.0"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import io
import urllib2
import string
from BeautifulSoup import BeautifulSoup
import pandas as pd
import sys
city_url = 'http://twblg.dict.edu.tw/holodict_new/index/xiangzhen_level1.jsp?county=1'
def extract_items(base_url):
html = urllib2.urlopen(base_url).read()
soup = BeautifulSoup(html)
#print(soup.prettify())
data = []
table = soup.findAll('tr', attrs={'class':['all_space1', 'all_space2']})
for row in table:
cols = row.findAll('td')
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols if ele]) # Get rid of empty values
return data
def get_area_url():
base_url = 'http://twblg.dict.edu.tw/holodict_new/index/xiangzhen_level1.jsp?county=%s'
url = []
for i in string.ascii_uppercase:
url.append(base_url % i)
return url
if __name__=='__main__':
# 縣市名稱
data = extract_items(city_url)
data.pop() # ignore data from '其他'
print 'Cities and countries are done.'
# 鄉鎮區名稱
area_url = get_area_url()
for i in area_url:
area_data = extract_items(i)
data.extend(area_data)
print 'Townships are done.'
#df = pd.DataFrame(data, columns=['name', 'holo'])
df = pd.DataFrame(data)
df.to_csv('moe_mapping.csv', encoding='utf-8', index=False, header=0)
print 'csv file done.' | 24.777778 | 89 | 0.672646 |
01092c860365112e2ab6bab4644a012763fb75a9 | 3,729 | py | Python | Soccer_league_project1.py | denisela1/Soccer_League_P1 | 5bc6de71259643ed2a6d9791ddbc70773f1c259d | [
"BSD-3-Clause-Clear"
] | 1 | 2018-02-26T08:47:15.000Z | 2018-02-26T08:47:15.000Z | Soccer_league_project1.py | denisela1/Soccer_League_P1 | 5bc6de71259643ed2a6d9791ddbc70773f1c259d | [
"BSD-3-Clause-Clear"
] | null | null | null | Soccer_league_project1.py | denisela1/Soccer_League_P1 | 5bc6de71259643ed2a6d9791ddbc70773f1c259d | [
"BSD-3-Clause-Clear"
] | null | null | null | import csv
#global variables for teams:
sharks = []
dragons = []
raptors = []
#read the csv file with the player info and create a player dictionary:
def read_players():
player_reader = csv.reader(open('soccer_players.csv'))
player_dictionary = {}
for row in player_reader:
key = row[0]
player_dictionary[key] = row[1:]
#deleting the first row with column titles:
del player_dictionary['Name']
return player_dictionary
#distribute kids based on experience:
def experienced_players():
exp_kids = []
#calling the previous function to use the player dictionary:
player_list = read_players()
for keys, values in player_list.items():
if values[1] == "YES":
exp_kids.append(keys)
return exp_kids
def inexperienced_players():
inexp_kids = []
#calling the previous function to use the player dictionary:
player_list = read_players()
for keys, values in player_list.items():
if values[1] == "NO":
inexp_kids.append(keys)
return inexp_kids
#finalize teams:
def make_teams():
#calling a previous function to generate 2 separate lists for experienced and inexperienced kids:
ek = experienced_players()
iek = inexperienced_players()
sharks.extend(ek[0:3])
sharks.extend(iek[0:3])
dragons.extend(ek[3:6])
dragons.extend(iek[3:6])
raptors.extend(ek[6:9])
raptors.extend(iek[6:9])
return sharks, dragons, raptors
#update the player dictionary to include the assigned teams:
def final_league():
#calling the function to create a player dictionary
letter_info = read_players()
for keys, values in letter_info.items():
if keys in sharks:
values.append("Sharks")
if keys in dragons:
values.append("Dragons")
if keys in raptors:
values.append("Raptors")
return letter_info
#write the league info into the text file:
def create_textfile():
files = final_league()
with open("teams.txt", "w") as textfile:
textfile.write("Sharks" + "\n")
for keys,values in files.items():
if values[3] == "Sharks":
textfile.write(str(keys) + ", " + str(values[1]) + ", " + str(values[2]) + "\n")
textfile.write("\n")
textfile.write("Dragons" + "\n")
for keys, values in files.items():
if values[3] == "Dragons":
textfile.write(str(keys) + ", " + str(values[1]) + ", " + str(values[2]) + "\n")
textfile.write("\n")
textfile.write("Raptors" + "\n")
for keys, values in files.items():
if values[3] == "Raptors":
textfile.write(str(keys) + ", " + str(values[1]) + ", " + str(values[2]) + "\n")
textfile.write("\n")
#generate letters to send the guardians:
def letter_generator():
letters = final_league()
for keys, values in letters.items():
if values[3] == "Sharks":
practice_start = '8am'
if values[3] == "Dragons":
practice_start = '9am'
if values[3] == "Raptors":
practice_start = '10am'
for keys, values in letters.items():
letter = ("Dear {},\n"
"Congratulations! Your child, {}, is selected to play on the {} team. "
"The first practice is at {} next Saturday, July 1st."
"Thanks,\n"
"Coach Deniz".format(values[2],keys,values[3],practice_start))
with open("{}.txt".format((keys.lower()).replace(" ", "_")), "w") as textfile:
textfile.write(letter)
if __name__ == "__main__":
read_players()
experienced_players()
inexperienced_players()
make_teams()
create_textfile()
final_league()
letter_generator()
| 33.594595 | 97 | 0.61196 |
010b4ad2a97b357a77ffe35ad3089e6223aec664 | 2,312 | py | Python | Gobot-Mecanum/robot.py | FRC1076/2019-Parade | 3824449ed10e33b401efb646fd2e6470c3941c8b | [
"MIT"
] | null | null | null | Gobot-Mecanum/robot.py | FRC1076/2019-Parade | 3824449ed10e33b401efb646fd2e6470c3941c8b | [
"MIT"
] | 2 | 2019-06-17T23:38:23.000Z | 2019-06-17T23:39:43.000Z | Gobot-Mecanum/robot.py | FRC1076/2019-Parade | 3824449ed10e33b401efb646fd2e6470c3941c8b | [
"MIT"
] | null | null | null | import wpilib
import wpilib.drive
import ctre
import robotmap
from wpilib.interfaces import GenericHID
RIGHT_HAND = GenericHID.Hand.kRight
LEFT_HAND = GenericHID.Hand.kLeft
class Robot(wpilib.TimedRobot):
def robotInit(self):
front_left_motor = ctre.WPI_TalonSRX(robotmap.mecanum['front_left_motor'])
back_left_motor = ctre.WPI_TalonSRX(robotmap.mecanum['back_left_motor'])
front_right_motor = ctre.WPI_TalonSRX(robotmap.mecanum['front_right_motor'])
back_right_motor = ctre.WPI_TalonSRX(robotmap.mecanum['back_right_motor'])
front_left_motor.setInverted(True)
#back_left_motor.setInverted(True)
self.drive = wpilib.drive.MecanumDrive(
front_left_motor,
back_left_motor,
front_right_motor,
back_right_motor
)
self.drive.setExpiration(0.1)
self.lstick = wpilib.XboxController(0)
self.rstick = wpilib.XboxController(1)
self.gyro = wpilib.AnalogGyro(1)
#def teleopInit(self):
# self.xforward = 0
# self.yforward = 0
"""def operatorControl(self):
Called when operation control mode is enabled
while self.isOperatorControl() and self.isEnabled():
self.drive.driveCartesian(
self.lstick.getX(), self.lstick.getY(), self.rstick.getX(), 0
)
wpilib.Timer.delay(0.04)
"""
def teleopPeriodic(self):
"""Called when operation control mode is enabled"""
if not self.rstick.getXButton() or not self.lstick.getXButton():
lspeed = deadzone(self.lstick.getX(LEFT_HAND), 0.2)
rspeed = deadzone(self.lstick.getY(LEFT_HAND), 0.2)
rotate = self.lstick.getX(RIGHT_HAND)
else:
rotate = 0
lspeed = 0
rspeed = 0
self.drive.driveCartesian(
lspeed, rspeed, rotate, self.gyro.getAngle()
)
def deadzone(val, deadzone):
if abs(val) < deadzone:
return 0
elif val < (0):
x = ((abs(val) - deadzone)/(1-deadzone))
return (-x)
else:
x = ((val - deadzone)/(1-deadzone))
return (x)
if __name__ == "__main__":
wpilib.run(Robot,physics_enabled=True) | 27.2 | 86 | 0.608564 |
01122030ff57d9377ddf61352858ba09c5197d30 | 139 | py | Python | blog/urls.py | 31-13/portfolio | 86d69abc05ead28823db5def49622f04af0ebfd2 | [
"MIT"
] | null | null | null | blog/urls.py | 31-13/portfolio | 86d69abc05ead28823db5def49622f04af0ebfd2 | [
"MIT"
] | null | null | null | blog/urls.py | 31-13/portfolio | 86d69abc05ead28823db5def49622f04af0ebfd2 | [
"MIT"
] | null | null | null | from django.contrib import admin
from django.urls import path
from .views import blog
urlpatterns = [
path('', blog, name='blog'),
]
| 15.444444 | 32 | 0.705036 |
0112992950dc4c577579c050f7017281022ccc42 | 139 | py | Python | iris_sdk/models/maps/local_rate_center_list.py | NumberAI/python-bandwidth-iris | 0e05f79d68b244812afb97e00fd65b3f46d00aa3 | [
"MIT"
] | 2 | 2020-04-13T13:47:59.000Z | 2022-02-23T20:32:41.000Z | iris_sdk/models/maps/local_rate_center_list.py | bandwidthcom/python-bandwidth-iris | dbcb30569631395041b92917252d913166f7d3c9 | [
"MIT"
] | 5 | 2020-09-18T20:59:24.000Z | 2021-08-25T16:51:42.000Z | iris_sdk/models/maps/local_rate_center_list.py | bandwidthcom/python-bandwidth-iris | dbcb30569631395041b92917252d913166f7d3c9 | [
"MIT"
] | 5 | 2018-12-12T14:39:50.000Z | 2020-11-17T21:42:29.000Z | #!/usr/bin/env python
from iris_sdk.models.maps.base_map import BaseMap
class LocalRateCenterListMap(BaseMap):
rate_center_id = None | 19.857143 | 49 | 0.791367 |
01157eaf40b4347f7763196480bf6b81341c469b | 5,374 | py | Python | webapp/services/hexun_service.py | myfreshcity/mystock | 3a8832e8c498128683b6af528da92d7fda32386d | [
"MIT"
] | 2 | 2016-09-19T09:18:17.000Z | 2022-02-16T14:55:51.000Z | webapp/services/hexun_service.py | myfreshcity/mystock | 3a8832e8c498128683b6af528da92d7fda32386d | [
"MIT"
] | 2 | 2020-04-29T13:01:45.000Z | 2020-04-29T13:01:45.000Z | webapp/services/hexun_service.py | myfreshcity/mystock | 3a8832e8c498128683b6af528da92d7fda32386d | [
"MIT"
] | 2 | 2018-06-29T15:09:36.000Z | 2019-09-05T09:26:06.000Z | import re
import traceback
import urllib2
import pandas as pd
import json,random,time,datetime
from bs4 import BeautifulSoup
from pandas.tseries.offsets import YearEnd
from sqlalchemy import text
from webapp import db, app
from webapp.models import FinanceBasic
headers = {'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6'}
def updateFinanceBasic(code):
sql = "select max(report_type) from hexun_finance_basic where code=:code";
resultProxy = db.session.execute(text(sql), {'code': code})
s_date = resultProxy.scalar()
if (s_date == None):
s_date = '2000.01.01' #获得2000年以来的所有会计年度
index = pd.date_range(s_date, datetime.today(), freq='Q')
periods = [x.strftime('%Y.%m.%d') for x in index]
#为避免网络访问频繁,分2年为一个阶段分别读取数据取值
l = len(periods)
i = 0
while (i <= l):
ri = random.randint(2, 5)
time.sleep(ri) #延迟执行
updateFinanceBasicByPeriod(periods[i:i+16],code)
i = i + 16
def updateFinanceBasicByPeriod(periods,code):
df = pd.DataFrame()
for x in periods:
fb = db.session.query(FinanceBasic).filter_by(code=code, report_type=x).first()
if (not fb):
fd = findFinanceData(code, x)
if not fd.empty:
df = df.append(fd)
if df.size > 0:
df.columns = ['code', 'report_type', 'yysr', 'jlr', 'lrze', 'kjlr', 'zzc', 'gdqy', 'jyjxjl', 'mgsy', 'roe',
'mgjyxjl', 'mgjzc']
tpd = pd.DataFrame({
'mgsy_ttm': '',
'mgjyxjl_ttm': ''
}, index=df.index)
df1 = pd.concat([df, tpd], axis=1)
ndf = calculateTTMValue(df1, code)
ndf.to_sql('hexun_finance_basic', db.engine, if_exists='append', index=False, chunksize=1000)
app.logger.info(code + ' finance update done...')
return True
else:
return False
def findFinanceData(code,period):
period = re.sub('03.31', '03.15', period)
url = "http://stockdata.stock.hexun.com/2008/zxcwzb.aspx?stockid="+code+"&accountdate="+period
app.logger.info('query stock(' + code + ') finance data url is:' + url)
req = urllib2.Request(url =url,headers = headers)
feeddata = urllib2.urlopen(req).read()
soup = BeautifulSoup(feeddata, "html5lib")
paper_name = soup.html.body.find(id="zaiyaocontent").table.tbody
if paper_name is None:
return pd.DataFrame()
else:
paper_name = paper_name.find_all('tr')
data = [code]
for e in paper_name:
s = e.find_all('td')
i = '0' if s[1].div.text == '--' else re.sub(',', '',s[1].div.text)
data.append(i)
df = pd.DataFrame(data).T
#如果无会计日期,丢弃数据
if df.iat[0, 1]=='':
return pd.DataFrame()
else:
return df.iloc[:,:13]
#更新ttm数据
def calculateTTMValue(in_df,code):
in_df_date = in_df['report_type'].map(lambda x: pd.to_datetime(x))
df = pd.read_sql_query(
"select code,report_type,yysr,jlr,lrze,kjlr,zzc,gdqy,jyjxjl,mgsy,roe,mgjyxjl,mgjzc,mgsy_ttm,mgjyxjl_ttm \
from hexun_finance_basic \
where code=%(name)s",
db.engine, params={'name': code})
df = df.append(in_df)
i = df['report_type'].map(lambda x: pd.to_datetime(x))
df3 = df.set_index(i)
for index, row in df3.iterrows():
if row.mgjyxjl_ttm is None or row.mgjyxjl_ttm == '':
# 去年年底
lastYearEnd = YearEnd().rollback(index)
# offset = offset.strftime('%Y-%m-%d')
lastYearQuart = index - pd.DateOffset(months=12)
app.logger.debug(index.strftime('%Y-%m-%d') + ':' + lastYearEnd.strftime('%Y-%m-%d') + ':' + lastYearQuart.strftime(
'%Y-%m-%d'))
try:
if index.quarter != 4:
n_mgsy = float(df3.loc[lastYearEnd].mgsy) - float(df3.loc[lastYearQuart].mgsy) + float(row.mgsy)
n_mgjyxjl = float(df3.loc[lastYearEnd].mgjyxjl) - float(df3.loc[lastYearQuart].mgjyxjl) + float(
row.mgjyxjl)
else:
n_mgsy = float(row.mgsy)
n_mgjyxjl = float(row.mgjyxjl)
df3.mgsy_ttm.loc[index] = n_mgsy
df3.mgjyxjl_ttm.loc[index] = n_mgjyxjl
except Exception, ex:
app.logger.warn(traceback.format_exc())
df3.mgsy_ttm.loc[index] = float(row.mgsy)
df3.mgjyxjl_ttm.loc[index] = float(row.mgjyxjl)
#数据位截取
v_mgsy_ttm = round(df3.mgsy_ttm.loc[index],2)
v_mgjyxjl_ttm = round(df3.mgjyxjl_ttm.loc[index],2)
#零值处理
v_mgsy_ttm = 0.01 if v_mgsy_ttm==0 else v_mgsy_ttm
v_mgjyxjl_ttm = 0.01 if v_mgjyxjl_ttm == 0 else v_mgjyxjl_ttm
df3.mgsy_ttm.loc[index] = v_mgsy_ttm
df3.mgjyxjl_ttm.loc[index] = v_mgjyxjl_ttm
return df3.iloc[df3.index.isin(in_df_date)]
#获取每股收益,每股净资产,每股经营现金流
def getPerStockRevenue():
df = pd.read_sql_query("select code,report_type,mgsy_ttm,mgjzc,mgjyxjl_ttm from hexun_finance_basic", db.engine)
i = df['report_type'].map(lambda x: pd.to_datetime(x))
df = df.set_index(i)
df = df.sort_index(ascending=False)
df = df.groupby([df['code']]).first()
df = df.reset_index()
return df.set_index(df['code'])
| 36.067114 | 128 | 0.595646 |
0118814a3663bee91c59984af98f47d72c8f9e4c | 2,555 | py | Python | machine-learning-and-ai/handwriting-classifier/neural_network_handwriting_classifier.py | fraserlove/python | b449259c02e73102e37a4cd42018dbcc6b04d0ba | [
"Apache-2.0"
] | 16 | 2020-06-11T16:54:55.000Z | 2022-01-07T01:36:05.000Z | machine-learning-and-ai/handwriting-classifier/neural_network_handwriting_classifier.py | fraserlove/python-projects | b449259c02e73102e37a4cd42018dbcc6b04d0ba | [
"Apache-2.0"
] | null | null | null | machine-learning-and-ai/handwriting-classifier/neural_network_handwriting_classifier.py | fraserlove/python-projects | b449259c02e73102e37a4cd42018dbcc6b04d0ba | [
"Apache-2.0"
] | 15 | 2020-06-14T08:29:50.000Z | 2021-08-05T17:25:42.000Z | import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets('MNIST_data', one_hot = True)
# Network hyperparameters
learning_rate = 0.0001 # 1.95 for sigmoid activation function
batch_size = 10
update_step = 10
input_nodes = 784 # 28x38 images as input
layer_1_nodes = 500
layer_2_nodes = 500
layer_3_nodes = 500
output_nodes = 10
network_input = tf.placeholder(tf.float32, [None, input_nodes])
target_output = tf.placeholder(tf.float32, [None, output_nodes])
# Network model, weights and biases
layer_1 = tf.Variable(tf.random_normal([input_nodes, layer_1_nodes]))
layer_2 = tf.Variable(tf.random_normal([layer_1_nodes, layer_2_nodes]))
layer_3 = tf.Variable(tf.random_normal([layer_2_nodes, layer_3_nodes]))
output_layer = tf.Variable(tf.random_normal([layer_3_nodes, output_nodes]))
layer_1_bias = tf.Variable(tf.random_normal([layer_1_nodes]))
layer_2_bias = tf.Variable(tf.random_normal([layer_2_nodes]))
layer_3_bias = tf.Variable(tf.random_normal([layer_3_nodes]))
output_layer_bias = tf.Variable(tf.random_normal([output_nodes]))
# Feedforward calculations
layer_1_out = tf.nn.relu(tf.matmul(network_input, layer_1) + layer_1_bias)
layer_2_out = tf.nn.relu(tf.matmul(layer_1_out, layer_2) + layer_2_bias)
layer_3_out = tf.nn.relu(tf.matmul(layer_2_out, layer_3) + layer_3_bias)
network_out_1 = tf.matmul(layer_3_out, output_layer) + output_layer_bias
network_out_2 = tf.nn.softmax(network_out_1)
cost_function = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits = network_out_1, labels = target_output))
training_step = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost_function)
predicitions = tf.equal(tf.argmax(network_out_2, 1), tf.argmax(target_output, 1))
accuracy = tf.reduce_mean(tf.cast(predicitions, tf.float32))
# Running the neural network
with tf.Session() as session:
session.run(tf.global_variables_initializer())
no_epochs = 10
for epoch in range(no_epochs):
total_cost = 0
no_batches = int(mnist.train.num_examples / batch_size)
for batch in range(no_batches):
input_data, labels = mnist.train.next_batch(batch_size)
step, cost = session.run([training_step, cost_function], feed_dict = {network_input: input_data, target_output: labels})
total_cost += cost
print('Epoch {} out of {} completed, loss: {}'.format(epoch, no_epochs, total_cost))
print('Accuracy: {}'.format(accuracy.eval({network_input: mnist.test.images, target_output: mnist.test.labels})))
| 46.454545 | 132 | 0.768689 |
011929cc6bf535432cf049cfeb608476447f32f5 | 1,157 | py | Python | 202_happyNumber.py | stuti-rastogi/leetcode-python-solutions | 73593fe642a06a83cde974ba5e6de3a7b396ec84 | [
"MIT"
] | 4 | 2018-07-24T08:36:42.000Z | 2019-08-25T17:48:47.000Z | 202_happyNumber.py | stuti-rastogi/leetcodesolutions | 73593fe642a06a83cde974ba5e6de3a7b396ec84 | [
"MIT"
] | null | null | null | 202_happyNumber.py | stuti-rastogi/leetcodesolutions | 73593fe642a06a83cde974ba5e6de3a7b396ec84 | [
"MIT"
] | null | null | null | class Solution(object):
def isHappy(self, n):
"""
:type n: int
:rtype: bool
"""
seen = set()
while (True):
sumOfSquaredDigits = 0
while n > 0:
digit = n % 10
n = n // 10
sumOfSquaredDigits += digit**2
if sumOfSquaredDigits == 1:
return True
if sumOfSquaredDigits in seen:
return False
seen.add(sumOfSquaredDigits)
n = sumOfSquaredDigits
# seen = []
# while (True):
# print (seen)
# digits = self.getDigits(n)
# total = 0
# print ("Digits: " + str(digits))
# for i in digits:
# total += int(pow(i,2))
# if (total in seen):
# return False
# if (total == 1):
# return True
# seen.append(total)
# n = total
# def getDigits(self, n):
# digits = []
# while (n > 0):
# digits.append(n%10)
# n = n//10
# return digits | 26.906977 | 46 | 0.395851 |
011acc08c0fc9cd09faf7e3c06fdec11827adac8 | 434 | py | Python | dataclazzes/playlist.py | navrudh/youtube-music-helper-scripts | 7bae74d698e15e11bac427e42bd0a21e08163f88 | [
"MIT"
] | null | null | null | dataclazzes/playlist.py | navrudh/youtube-music-helper-scripts | 7bae74d698e15e11bac427e42bd0a21e08163f88 | [
"MIT"
] | null | null | null | dataclazzes/playlist.py | navrudh/youtube-music-helper-scripts | 7bae74d698e15e11bac427e42bd0a21e08163f88 | [
"MIT"
] | null | null | null | from dataclasses import dataclass, field
from dataclazzes.track import Track
@dataclass
class Playlist:
"""Class for storing playlist information."""
id: str
title: str
tracks: [Track] = field(default_factory=list)
@staticmethod
def from_raw(raw_playlists: list):
return [
Playlist(id=playlist['playlistId'], title=playlist['title'])
for playlist in raw_playlists
]
| 22.842105 | 72 | 0.663594 |
011b749c0cb7168d1d612e734d1940a1245eb56c | 9,091 | py | Python | entity.py | PIRXrav/pyhack | af5c86fb721053d8a3e819ab772c8144a23b86bf | [
"MIT"
] | null | null | null | entity.py | PIRXrav/pyhack | af5c86fb721053d8a3e819ab772c8144a23b86bf | [
"MIT"
] | null | null | null | entity.py | PIRXrav/pyhack | af5c86fb721053d8a3e819ab772c8144a23b86bf | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# pylint: disable=C0103
"""
Définie la classe entity
Permet de modeliser le personnage et des monstre
"""
from random import choice
from vect import Vect
from astar import calc_path_astart
import chars
class Player():
"""
Classe Player :
"""
BULLET_MAX = 10
HP_MAX = 10
START_MONEY = 0
def __init__(self):
"""
Personnage
"""
self.pos = Vect(0, 0)
self.direction = Vect(1, 0)
self.distance_view = 7
self.bullet = self.BULLET_MAX
self.hp = self.HP_MAX
self.level = 0
self.money = self.START_MONEY
self.sword_damage = 1
self.gun_damage = 2
def level_up(self, pos):
"""
Le personnage gagne un level
"""
self.level += 1
self.pos = pos
def g_case_visible(self, mat_collide):
"""
retourne sur toutes les cases visibles
par self dans mat_collide
"""
# Nb : prend les segments depuis un cercle et non un rect
# n'est pas OK
border = self.pos.g_rect(Vect(self.distance_view, self.distance_view))
for bordure_pos in border:
for pos in self.pos.g_bresenham_line(bordure_pos):
if self.pos.distance(pos) >= self.distance_view:
break
if not Vect(0, 0) <= pos < Vect(len(mat_collide),
len(mat_collide[0])):
break
if not mat_collide[pos.x][pos.y]:
yield pos
break
yield pos
def shoot(self):
"""
Tire une nouvelle balle
"""
self.bullet -= 1
return Bullet(self.pos, self.direction, self.gun_damage)
def strike(self, mat_collide):
"""
Donne un coup d'épée
"""
return Sword(self.pos + self.direction, self.sword_damage)
def add_money(self, value):
"""
Ajoute des pièces au Player
"""
assert value >= 0
self.money += value
return True
def add_hp(self, value):
"""
Ajoute des HP au Player
"""
assert value >= 0
if self.hp == self.HP_MAX:
return False
self.hp = min(self.hp + value, self.HP_MAX)
return True
def add_bullets(self, value):
"""
Ajoute des balles au Player
"""
assert value >= 0
if self.bullet == self.BULLET_MAX:
return False
self.bullet = min(self.bullet + value, self.BULLET_MAX)
return True
def update(self, mat_collide, depl_vect):
"""
Met à jour la position du personnage en fonction des evenements
et de mat_collide
"""
if depl_vect != Vect(0, 0):
self.direction = depl_vect
new_pos = self.pos + depl_vect
# Tests de collision (Diagonales)
if mat_collide[new_pos.x][self.pos.y]:
# premier chemin libre en x
if mat_collide[new_pos.x][new_pos.y]:
# deuxieme chemin libre en y
self.pos = new_pos
else:
# deuxieme chemin bloque en y
self.pos.x = new_pos.x
elif mat_collide[self.pos.x][new_pos.y]:
# premier chemin libre en y
if mat_collide[new_pos.x][new_pos.y]:
# deuxieme chemin libre en x
self.pos = new_pos
else:
# deuxieme chemin bloque en x
self.pos.y = new_pos.y
else:
# Aucun chemin libre
# Do nothind
pass
def render(self):
"""
Retourne le char à afficher
"""
return chars.C_PLAYER
def __str__(self):
"""
Retourne une chaine d'affichage
"""
heal_str = ('\u2665' * int(self.hp / self.HP_MAX * 10)
+ ' ' * (10-int(self.hp / self.HP_MAX * 10)))
bullet_str = ('|' * int(self.bullet)
+ ' ' * int(self.BULLET_MAX - self.bullet))
return ('Position : {} | HP : ['
+ heal_str
+ '] | Bullets ['
+ bullet_str + ']')
class Bullet:
"""
Classe Bullet :
"""
def __init__(self, pos, directions, dammage):
"""
Personnage
"""
self.pos = pos
self.direction = directions
self.dammage = dammage
def update(self, mat_collide):
"""
Met à jour la balle
Retourne 1 si elle touche un obstacle
"""
self.pos += self.direction
return mat_collide[self.pos.x][self.pos.y]
def render(self):
"""
Retourne le char à afficher
"""
return chars.C_BULLETS[int(self.direction.angle()/2/3.1415 * 8)]
def __str__(self):
return "(*:{})".format(self.pos)
class Monster:
"""
Monster
"""
IDLE = 0
RUN = 1
DECOMPOSITION = 2
SHOCKED = 3
def __init__(self, pos, dammage, health):
"""
Personnage
"""
self.pos = pos
self.dammage = dammage
self.health = health
self.shocked = 0
self.state = self.IDLE
self.ttd = 8 # TIme to die
# TEMP
# Le chemin du monstre au joueur
self.path = []
def update(self, mat_collide, player_pos):
"""
Met à jour l'enemie
"""
if self.state == self.SHOCKED:
if self.shocked:
self.shocked -= 1
else:
self.state = self.IDLE
return False
if self.state == self.IDLE or self.state == self.RUN:
if self.pos.distance(player_pos) <= 10:
self.state = self.RUN
else:
self.state = self.IDLE
if self.state == self.RUN:
self.path = calc_path_astart(mat_collide, self.pos, player_pos)
if self.path != []:
self.pos = self.path[0]
if self.state == self.IDLE:
# TODO: Depl aléatoire
pass
return False
self.ttd -= 1
return self.ttd == 0 # Mort
def render(self):
"""
Retourne le char à afficher
"""
if self.state == self.RUN:
return chars.C_MONSTER_RUN
return chars.C_MONSTERS[self.ttd % len(chars.C_MONSTERS)]
def kill(self):
"""
Elimine le mechant
"""
if self.health <= 0:
self.state = self.DECOMPOSITION
else:
self.state = self.SHOCKED
self.shocked = 4
def __str__(self):
return "(*:{})".format(self.pos)
class Treasure:
"""
Trésor, peut contenir 3 types d'objet différents :
* des sous
* des munitions
* de la vie
"""
HEART = 0
BULLET = 1
GOLD = 2
STRENGH = 3
POWER = 4
CHARS = [chars.C_HEART, chars.C_BULLET_CHRG, chars.C_MONEY]
def __init__(self, pos, value):
"""
Init
"""
self.pos = pos
if value == 1:
self.object = choice([self.HEART, self.BULLET, self.GOLD])
else:
self.cpt = -1
if value == 2:
self.object = self.STRENGH
else:
self.object = self.POWER
self.value = value
def render(self):
"""
Render
"""
if self.value == 1:
return self.CHARS[self.object]
self.cpt += 1
if self.value == 2:
return chars.C_TRE_WEAPON[self.cpt % len(chars.C_TRE_WEAPON)]
return chars.C_TRE_GUN[self.cpt % len(chars.C_TRE_GUN)]
def get_value(self):
"""
Retourne la valeur du contenue du coffre
"""
return self.value
class Sword:
"""
coup d'épée venant du joueur
"""
DELTA_POSS = list(Vect(0, 0).g_rect(Vect(1, 1)))
def __init__(self, pos, dammage):
self.pos = pos
self.cpt = len(self.DELTA_POSS)-1
self.dammage = dammage
def update(self, mat_collide, player_pos):
"""
Met à jour l'enemie
"""
if self.cpt < 0:
return True
self.pos = player_pos + self.DELTA_POSS[self.cpt]
self.cpt -= 1
return False
def render(self):
"""
render
"""
return chars.C_SWORDS[- self.cpt % len(chars.C_SWORDS)]
class Door:
"""
La porte de sortie
"""
def __init__(self, pos):
"""
Init
"""
self.pos = pos
self.cpt = 0
def update(self):
"""
Update
"""
def render(self):
"""
Render
"""
self.cpt += 1
return chars.C_DOORS[self.cpt % len(chars.C_DOORS)]
def main():
"""
Test unitaire
"""
if __name__ == '__main__':
main()
| 24.438172 | 79 | 0.494995 |
011dbd3f8e4f3dc4a3cd128fe4d90224e86d26f2 | 3,488 | py | Python | apps/Ipo.py | KiloSat/FirstNivesh | 0fe200e08bb9f7d89de91f59eb14448fa7b972b9 | [
"MIT"
] | null | null | null | apps/Ipo.py | KiloSat/FirstNivesh | 0fe200e08bb9f7d89de91f59eb14448fa7b972b9 | [
"MIT"
] | null | null | null | apps/Ipo.py | KiloSat/FirstNivesh | 0fe200e08bb9f7d89de91f59eb14448fa7b972b9 | [
"MIT"
] | 2 | 2021-04-03T16:39:23.000Z | 2021-08-15T08:09:21.000Z | import streamlit as st
def app():
import plotly.express as px
import plotly.graph_objects as go
from textblob import TextBlob
import tweepy
import sys
import pandas as pd
api_key = 'q7QHHHAKEwd5igoUvVrx5sCiw'
api_secret_key = 'i7uhcFirM38bnbYscv32beJnMpsmMxFdYSHitwfSCPIeMj7Lcs'
access_token = '916414257993879552-kWKlelyL9e6HGH40wcdawT8CiCvO3Hz'
access_token_secret= 'zYflOPrxysrdOsQiAhp8gmJjAtwRMUcSyX6KlexMk03eB'
auth_handler = tweepy.OAuthHandler(consumer_key = api_key,consumer_secret=api_secret_key)
auth_handler.set_access_token(access_token,access_token_secret)
api = tweepy.API(auth_handler)
searchwhat = st.sidebar.text_input("Search Term", 'Zomato IPO')
tweet_amount = int(st.sidebar.text_input('Tweet Amount', '50'))
tweets = tweepy.Cursor(api.search,q=searchwhat,lang='en').items(tweet_amount)
polarity,positive,negative,neutral = 0,0,0,0
tweetlist = []
polaritylist = []
for tweet in tweets:
final_text = tweet.text.replace('RT','')
if final_text.startswith(' @'):
position = final_text.index(':')
final_text = final_text[position+2:]
if final_text.startswith('@'):
position = final_text.index('')
final_text = final_text[position+2:]
analysis = TextBlob(final_text)
tweet_polarity = analysis.polarity
if tweet_polarity>0:
positive+=1
polaritylist.append('positive')
elif tweet_polarity<0:
negative+=1
polaritylist.append('negative')
else:
neutral+=1
polaritylist.append('neutral')
polarity += tweet_polarity
tweetlist.append(final_text)
labels = ['Positive','Negative','Neutral']
values = [positive,negative,neutral]
st.write(f'The Sentiment Analysis for Search Term : {searchwhat}')
# Use `hole` to create a donut-like pie chart
fig = go.Figure(data=[go.Pie(labels=labels, values=values, hole=.5)])
st.plotly_chart(fig)
# tweetcontainer = pd.DataFrame(list(zip(tweetlist,polaritylist)),columns=['Tweets','Sentiment'])
# st.write(tweetcontainer)
if len(tweetlist)<10:
def showTweets(index=0,limit=len(tweetlist)):
while(index<limit):
st.write(tweetlist[index])
st.write(polaritylist[index])
index+=1
with st.beta_expander('See the Tweets'):
showTweets()
else:
def showTweets(index=0,limit=10):
while(index<limit):
st.write(tweetlist[index])
st.write(polaritylist[index])
index+=1
with st.beta_expander('See the Tweets'):
showTweets()
st.subheader('RHP Analysis')
#st.write('Enter any text from the RHP here')
user_input = st.text_area('Enter any section from the RHP here')
rhpanalysis = TextBlob(user_input)
rhppolarity = rhpanalysis.polarity
if rhppolarity>0:
ipo_outlook = 'Positve'
elif rhppolarity<0:
ipo_outlook = 'Negative'
else:
ipo_outlook = 'Neutral'
with st.beta_expander('See the Analysis'):
st.subheader(f'Outlook of the Particular IPO is {ipo_outlook} with polarity :{rhppolarity}')
| 35.591837 | 109 | 0.610952 |
011ec6d9a369d9cd8fa960e87d7fc5aabbdb09f6 | 2,447 | py | Python | tests/test_unit_varfilter.py | gomibaya/pyVarfilter | 098414223e575dda3fabe7b8ccb1b16f6f8da3a0 | [
"MIT"
] | null | null | null | tests/test_unit_varfilter.py | gomibaya/pyVarfilter | 098414223e575dda3fabe7b8ccb1b16f6f8da3a0 | [
"MIT"
] | null | null | null | tests/test_unit_varfilter.py | gomibaya/pyVarfilter | 098414223e575dda3fabe7b8ccb1b16f6f8da3a0 | [
"MIT"
] | null | null | null | import unittest
import logging
from varfilter import varfilter, filter
class TestVarfilter(unittest.TestCase):
def setUp(self):
print("Preparando el contexto")
self.source1 = {"teststr1": "Value teststr1",
"testint1": 10}
self.source2 = {"teststr2": "Value teststr2",
"testint2": 10,
"teststr": "Value str"}
def test_fVar_default_nonexistent(self):
print("fVar con valor por defecto no existente")
t = varfilter.fVar("test1",
"default value",
None,
self.source1)
self.assertEqual(t, "default value")
def test_fVar_default_nonexistent_int(self):
print("fVar con valor por defecto no existente de tipo int")
t = varfilter.fVar("test1",
0,
"int",
self.source1)
self.assertEqual(t, 0)
def test_fVar_default_existent(self):
print("fVar con valor por defecto")
t = varfilter.fVar("teststr1",
"default value",
None,
self.source1)
self.assertEqual(t, "Value teststr1")
def test_fVar_default_existent_int(self):
print("fVar con valor por defecto existente de tipo int")
t = varfilter.fVar("testint1",
0,
"int",
self.source1)
self.assertEqual(t, 10)
def test_fVar_default_existent_int_filtererror(self):
print("fVar con valor por defecto erroneo existente de tipo int")
self.assertRaises(filter.IntegerError,
varfilter.fVar,
'test1',
'error',
'int',
self.source1)
def test_fVar_existent_severalsources(self):
print("fVar con valor existente varios sources")
t = varfilter.fVar("teststr",
"default value",
None,
self.source1,
self.source2)
self.assertEqual(t, "Value str")
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s - %(message)s',
level=logging.DEBUG)
unittest.main()
| 34.464789 | 73 | 0.495709 |
01203f70632858bfabcba480840b28432e8c773f | 4,989 | py | Python | tests/test_extensions/test_arithmatex.py | pawamoy/pymdown-extensions | 90de4c0c52456751141e898af3941c729914a80e | [
"MIT"
] | null | null | null | tests/test_extensions/test_arithmatex.py | pawamoy/pymdown-extensions | 90de4c0c52456751141e898af3941c729914a80e | [
"MIT"
] | null | null | null | tests/test_extensions/test_arithmatex.py | pawamoy/pymdown-extensions | 90de4c0c52456751141e898af3941c729914a80e | [
"MIT"
] | null | null | null | """Test cases for Arithmatex."""
from .. import util
class TestArithmatexLimit(util.MdCase):
"""Test limiting Arithmatex inline and block inputs."""
extension = [
'pymdownx.arithmatex'
]
extension_configs = {'pymdownx.arithmatex': {'inline_syntax': ['round'], 'block_syntax': ['square']}}
def test_round_only(self):
"""Test round only."""
self.check_markdown(
"\\(1 + 2 + 3\\)",
"""<p><span class="arithmatex"><span class="MathJax_Preview">1 + 2 + 3</span><script type="math/tex">1 + 2 + 3</script></span></p>""" # noqa: E501
)
def test_square_only(self):
"""Test square only."""
self.check_markdown(
r"""
\[
1 + 2 + 3
\]
""",
r"""
<div class="arithmatex">
<div class="MathJax_Preview">
1 + 2 + 3
</div>
<script type="math/tex; mode=display">
1 + 2 + 3
</script>
</div>
""",
True
)
class TestArithmatexBlockEscapes(util.MdCase):
"""Test escaping cases for Arithmatex blocks."""
extension = [
'pymdownx.arithmatex'
]
extension_configs = {}
def test_escaped_dollar_block(self):
"""Test escaping a dollar."""
self.check_markdown(
r'''
$$3+2\$$
''',
r'''
<p>$<span class="arithmatex"><span class="MathJax_Preview">3+2\$</span><script type="math/tex">3+2\$</script></span></p>
''', # noqa: E501
True
)
def test_escaped_dollar_dollar_block(self):
"""Test escaping both dollars."""
self.check_markdown(
r'''
$$3+2\$\$
''',
r'''
<p>$$3+2$$</p>
''',
True
)
def test_double_escaped_dollar_block(self):
"""Test double escaping a dollar."""
self.check_markdown(
r'''
$$3+2\\$$
''',
r'''
<div class="arithmatex">
<div class="MathJax_Preview">3+2\\</div>
<script type="math/tex; mode=display">3+2\\</script>
</div>
''',
True
)
def test_escaped_end_block(self):
"""Test escaping an end."""
self.check_markdown(
r'''
\begin{align}3+2\\end{align}
''',
r'''
<p>\begin{align}3+2\end{align}</p>
''',
True
)
def test_double_escaped_end_block(self):
"""Test double escaping an end."""
self.check_markdown(
r'''
\begin{align}3+2\\\end{align}
''',
r'''
<div class="arithmatex">
<div class="MathJax_Preview">\begin{align}3+2\\\end{align}</div>
<script type="math/tex; mode=display">\begin{align}3+2\\\end{align}</script>
</div>
''',
True
)
def test_escaped_bracket_block(self):
"""Test escaping a bracket."""
self.check_markdown(
r'''
\[3+2\\]
''',
r'''
<p>[3+2\]</p>
''',
True
)
def test_double_escaped_bracket_block(self):
"""Test double escaping a bracket."""
self.check_markdown(
r'''
\[3+2\\\]
''',
r'''
<div class="arithmatex">
<div class="MathJax_Preview">3+2\\</div>
<script type="math/tex; mode=display">3+2\\</script>
</div>
''',
True
)
class TestArithmatexHang(util.MdCase):
"""Test hang cases."""
def test_hang_dollar(self):
"""
We are just making sure this works.
Previously this pattern would hang. It isn't supposed to match due to the space before the last dollar,
but it definitely shouldn't hang the process.
"""
self.check_markdown(
r'''
$z^{[1]} = \begin{bmatrix}w^{[1]T}_1 \\ w^{[1]T}_2 \\ w^{[1]T}_3 \\ w^{[1]T}_4 \end{bmatrix} \begin{bmatrix}x_1 \\ x_2 \\ x_3 \end{bmatrix} + \begin{bmatrix}b^{[1]}_1 \\ b^{[1]}_2 \\ b^{[1]}_3 \\ b^{[1]}_4 \end{bmatrix}= \begin{bmatrix}w^{[1]T}_1 x + b^{[1]}_1 \\ w^{[1]T}_2 x + b^{[1]}_2\\ w^{[1]T}_3 x + b^{[1]}_3 \\ w^{[1]T}_4 x + b^{[1]}_4 \end{bmatrix} $
''', # noqa: E501
r'''
<p>$z^{[1]} = \begin{bmatrix}w^{[1]T}_1 \ w^{[1]T}_2 \ w^{[1]T}_3 \ w^{[1]T}_4 \end{bmatrix} \begin{bmatrix}x_1 \ x_2 \ x_3 \end{bmatrix} + \begin{bmatrix}b^{[1]}_1 \ b^{[1]}_2 \ b^{[1]}_3 \ b^{[1]}_4 \end{bmatrix}= \begin{bmatrix}w^{[1]T}_1 x + b^{[1]}_1 \ w^{[1]T}_2 x + b^{[1]}_2\ w^{[1]T}_3 x + b^{[1]}_3 \ w^{[1]T}_4 x + b^{[1]}_4 \end{bmatrix} $</p>
''', # noqa: E501
True
)
| 28.83815 | 371 | 0.453999 |
012152e2a37577150f9d63f073997bc92e0bc861 | 197 | py | Python | source/applications/advanced/hand_eye_calibration/ur_hand_eye_calibration/3rdParty/rtde-2.3.6/setup.py | ebruun/python-samples | 746e5090f45659c60f01bf831a0308966d713b21 | [
"BSD-3-Clause"
] | 10 | 2020-12-03T22:59:39.000Z | 2022-03-27T07:31:42.000Z | source/applications/advanced/hand_eye_calibration/ur_hand_eye_calibration/3rdParty/rtde-2.3.6/setup.py | ebruun/python-samples | 746e5090f45659c60f01bf831a0308966d713b21 | [
"BSD-3-Clause"
] | 55 | 2019-07-23T09:05:27.000Z | 2020-11-02T14:42:55.000Z | source/applications/advanced/hand_eye_calibration/ur_hand_eye_calibration/3rdParty/rtde-2.3.6/setup.py | ebruun/python-samples | 746e5090f45659c60f01bf831a0308966d713b21 | [
"BSD-3-Clause"
] | 4 | 2020-01-09T08:36:23.000Z | 2020-09-12T20:28:31.000Z | # -*- coding: utf-8 -*-
from setuptools import setup
setup(
name="UrRtde",
packages=["rtde"],
version=1.0,
description="Real-Time Data Exchange (RTDE) python client + examples",
)
| 19.7 | 74 | 0.639594 |
0121afa2ee5440a70a9a651bed1ddda312a2e7ae | 891 | py | Python | code/roman2int.py | wp-lai/xpython | 3d90362e56173052d8dd66817feffd67dc07db91 | [
"MIT"
] | 5 | 2016-11-17T07:35:05.000Z | 2018-04-07T16:34:16.000Z | code/roman2int.py | wp-lai/xpython | 3d90362e56173052d8dd66817feffd67dc07db91 | [
"MIT"
] | null | null | null | code/roman2int.py | wp-lai/xpython | 3d90362e56173052d8dd66817feffd67dc07db91 | [
"MIT"
] | null | null | null | """
Task:
Given a roman numeral, convert it to an integer.
Input is guaranteed to be within the range from 1 to 3999.
Symbol Value
I 1 (unus)
V 5 (quinque)
X 10 (decem)
L 50 (quinquaginta)
C 100 (centum)
D 500 (quingenti)
M 1,000 (mille)
>>> roman_to_int("DCXXI")
621
>>> roman_to_int("VI")
6
>>> roman_to_int("LXXVI")
76
>>> roman_to_int("XIII")
13
>>> roman_to_int("MMMCMXCIX")
3999
>>> roman_to_int("")
0
"""
def roman_to_int(s):
values = {'M': 1000, 'D': 500, 'C': 100, 'L': 50,
'X': 10, 'V': 5, 'I': 1}
result = 0
for i in range(len(s)):
if i == len(s) - 1:
result += values[s[i]]
else:
if values[s[i]] < values[s[i + 1]]:
result -= values[s[i]]
else:
result += values[s[i]]
return result
if __name__ == '__main__':
import doctest
doctest.testmod()
| 18.183673 | 62 | 0.539843 |
01243d69b6f9b70a1311214737f35975b0a644a4 | 2,082 | py | Python | test/test_upstream.py | bninja/rump | 3b6c4ff29974b3c04a260d8275567beebb296e5d | [
"0BSD"
] | 6 | 2015-07-27T09:02:36.000Z | 2018-07-18T11:11:33.000Z | test/test_upstream.py | bninja/rump | 3b6c4ff29974b3c04a260d8275567beebb296e5d | [
"0BSD"
] | null | null | null | test/test_upstream.py | bninja/rump | 3b6c4ff29974b3c04a260d8275567beebb296e5d | [
"0BSD"
] | null | null | null | import mock
import pytest
from rump import parser, Server, Upstream, Selection, exc
@pytest.fixture
def parse():
return parser.for_upstream()
def test_upstream_valid(parse):
cases = [
('hi',
Upstream(Selection(Server('http', 'hi'), 1))),
('https://hi',
Upstream(Selection(Server('https', 'hi'), 1))),
('https://hi,23',
Upstream(Selection(Server('https', 'hi'), 23))),
('https://hi,23 http://there/you,33 bye',
Upstream([
Selection(Server('https', 'hi'), 23),
Selection(Server('http', 'there/you'), 33),
Selection(Server('http', 'bye'), 1),
])),
]
for raw, expected in cases:
parsed = parse(raw)
assert expected == parsed
def test_upstream_invalid(parse):
cases = [
('hi,nope', exc.ParseException),
('hi,-1', exc.ParseException),
('', exc.ParseException),
]
for raw, ex in cases:
with pytest.raises(ex):
parse(raw)
def test_upstream_select(parse):
upstream = parse('http://1:81,1 http://2:82,5 https://4:84,3')
assert not upstream.uniform
servers = [selection.server for selection in upstream]
for _ in xrange(20):
server = upstream()
assert server in servers
def test_upstream_select_uniform(parse):
upstream = parse('http://1:81 http://2:82 https://4:84')
assert [
Server('http', '1:81'),
Server('http', '2:82'),
Server('https', '4:84')
] == upstream.servers
assert upstream.uniform
servers = [selection.server for selection in upstream]
for _ in xrange(20):
server = upstream()
assert server in servers
def test_upstream_select_impossible(parse):
upstream = parse('http://1:81,1 http://2:825 https://4:84,3')
with mock.patch('rump.upstream.random.randint') as randint:
randint.return_value = upstream.total
with pytest.raises(Exception):
upstream()
randint.return_value = upstream.total - 1
upstream()
| 28.135135 | 66 | 0.583573 |
0124ac8c7a202aa897f92f830d9e99028d3f1d5a | 1,113 | py | Python | places/admin.py | moshthepitt/shulezote | e903a208948ab5294183e2a8c2dac9360a184654 | [
"MIT"
] | 2 | 2015-12-02T08:14:34.000Z | 2020-12-16T19:56:46.000Z | places/admin.py | moshthepitt/shulezote | e903a208948ab5294183e2a8c2dac9360a184654 | [
"MIT"
] | 4 | 2016-10-04T12:15:42.000Z | 2021-06-10T19:47:39.000Z | places/admin.py | moshthepitt/shulezote | e903a208948ab5294183e2a8c2dac9360a184654 | [
"MIT"
] | 1 | 2018-08-20T14:19:32.000Z | 2018-08-20T14:19:32.000Z | from django.contrib import admin
from places.models import County, Constituency, Province, District
from places.models import Division, Location, SubLocation, SchoolZone
class CountyAdmin(admin.ModelAdmin):
search_fields = ['name']
class ProvinceAdmin(admin.ModelAdmin):
search_fields = ['name']
class DistrictAdmin(admin.ModelAdmin):
search_fields = ['name']
class DivisionAdmin(admin.ModelAdmin):
search_fields = ['name']
class ConstituencyAdmin(admin.ModelAdmin):
search_fields = ['name']
class LocationAdmin(admin.ModelAdmin):
search_fields = ['name']
class SubLocationAdmin(admin.ModelAdmin):
search_fields = ['name']
class SchoolZoneAdmin(admin.ModelAdmin):
search_fields = ['name']
admin.site.register(County, CountyAdmin)
admin.site.register(Province, ProvinceAdmin)
admin.site.register(District, DistrictAdmin)
admin.site.register(Division, DivisionAdmin)
admin.site.register(Constituency, ConstituencyAdmin)
admin.site.register(Location, LocationAdmin)
admin.site.register(SubLocation, SubLocationAdmin)
admin.site.register(SchoolZone, SchoolZoneAdmin)
| 24.195652 | 69 | 0.779874 |
0125022bc1c32fb48c4660789e204445cd4abb92 | 520 | py | Python | pygeems/__init__.py | arkottke/pygeems | 5bfb563dbc151dc7d7581c31de0061e564cf7d84 | [
"MIT"
] | 3 | 2019-01-11T04:44:29.000Z | 2022-01-05T01:09:46.000Z | pygeems/__init__.py | arkottke/pygeems | 5bfb563dbc151dc7d7581c31de0061e564cf7d84 | [
"MIT"
] | null | null | null | pygeems/__init__.py | arkottke/pygeems | 5bfb563dbc151dc7d7581c31de0061e564cf7d84 | [
"MIT"
] | 1 | 2021-02-21T17:29:21.000Z | 2021-02-21T17:29:21.000Z | """pyGEEMs: Geotechnical earthquake engineering models implemented in Python."""
import pathlib
from pkg_resources import get_distribution
import scipy.constants
FPATH_DATA = pathlib.Path(__file__).parent / "data"
KPA_TO_ATM = scipy.constants.kilo / scipy.constants.atm
__author__ = "Albert Kottke"
__copyright__ = "Copyright 2018 Albert Kottke"
__license__ = "MIT"
__title__ = "pygeems"
__version__ = get_distribution("pygeems").version
from . import dyn_props
from . import ground_motion
from . import slope_disp
| 26 | 80 | 0.796154 |
012579b5541f7896f0ff2928c89b8dec890eb8d1 | 414 | py | Python | Ago-Dic-2020/sena-martinez-angel-david/Primer Parcial/Gui.py | bryanbalderas/DAS_Sistemas | 1e31f088c0de7134471025a5730b0abfc19d936e | [
"MIT"
] | 41 | 2017-09-26T09:36:32.000Z | 2022-03-19T18:05:25.000Z | Ago-Dic-2020/sena-martinez-angel-david/Primer Parcial/Gui.py | bryanbalderas/DAS_Sistemas | 1e31f088c0de7134471025a5730b0abfc19d936e | [
"MIT"
] | 67 | 2017-09-11T05:06:12.000Z | 2022-02-14T04:44:04.000Z | Ago-Dic-2020/sena-martinez-angel-david/Primer Parcial/Gui.py | bryanbalderas/DAS_Sistemas | 1e31f088c0de7134471025a5730b0abfc19d936e | [
"MIT"
] | 210 | 2017-09-01T00:10:08.000Z | 2022-03-19T18:05:12.000Z | from tkinter import *
from tkinter.ttk import *
# creando ventana tkinter
root = Tk()
# Agregando herramientas a la ventana
Label(root, text = 'PuntosExtra', font =(
'Verdana', 15)).pack(side = TOP, pady = 10)
# Insertando la imagen de login
foto = PhotoImage(file = r"C:\Users\david\OneDrive\Imágenes\login.png")
Button(root, text = 'Click Me !', image = foto).pack(side = TOP)
mainloop() | 25.875 | 72 | 0.669082 |
012711b60afee7420df0f399f035f95d78d3df36 | 2,200 | py | Python | custom/ewsghana/urls.py | johan--/commcare-hq | 86ee99c54f55ee94e4c8f2f6f30fc44e10e69ebd | [
"BSD-3-Clause"
] | null | null | null | custom/ewsghana/urls.py | johan--/commcare-hq | 86ee99c54f55ee94e4c8f2f6f30fc44e10e69ebd | [
"BSD-3-Clause"
] | 1 | 2022-03-12T01:03:25.000Z | 2022-03-12T01:03:25.000Z | custom/ewsghana/urls.py | johan--/commcare-hq | 86ee99c54f55ee94e4c8f2f6f30fc44e10e69ebd | [
"BSD-3-Clause"
] | null | null | null | from django.conf.urls import patterns, url, include
from corehq.apps.api.urls import CommCareHqApi
from custom.ewsghana.resources.v0_1 import EWSLocationResource
from custom.ewsghana.views import EWSConfigView, EWSGlobalStats, InputStockView, EWSUserExtensionView, \
DashboardRedirectReportView
hq_api = CommCareHqApi(api_name='v0.3')
hq_api.register(EWSLocationResource())
urlpatterns = patterns('custom.ewsghana.views',
url(r'^ews_config/$', EWSConfigView.as_view(), name=EWSConfigView.urlname),
url(r'^sync_ewsghana/$', 'sync_ewsghana', name='sync_ewsghana'),
url(r'^global_stats/$', EWSGlobalStats.as_view(), name=EWSGlobalStats.urlname),
# for testing purposes
url(r'^ews_sync_stock_data/$', 'ews_sync_stock_data', name='ews_sync_stock_data'),
url(r'^ews_clear_stock_data/$', 'ews_clear_stock_data', name='ews_clear_stock_data'),
url(r'^configure_in_charge/$', 'configure_in_charge', name='configure_in_charge'),
url(r'^ews_resync_web_users/$', 'ews_resync_web_users', name='ews_resync_web_users'),
url(r'^inventory_managment/$', 'inventory_management', name='inventory_managment'),
url(r'^stockouts_product/$', 'stockouts_product', name='stockouts_product'),
url(r'^ews_fix_locations/$', 'ews_fix_locations', name='ews_fix_locations'),
url(r'^ews_add_products_to_locs/$', 'ews_add_products_to_locs', name='ews_add_products_to_locs'),
url(r'^migrate_email_settings/$', 'migrate_email_settings_view', name='migrate_email_settings'),
url(r'^fix_sms_users/$', 'fix_sms_users', name='fix_sms_users'),
url(r'^delete_last_stock_data/$', 'delete_last_stock_data', name='delete_last_stock_data'),
url(r'^(?P<site_code>\w+)/input_stock/$', InputStockView.as_view(), name='input_stock'),
url(r'^', include(hq_api.urls)),
url(r'^convert_user_data_fields/$', 'convert_user_data_fields', name='convert_user_data_fields'),
url(r'^non_administrative_locations/$', 'non_administrative_locations_for_select2'),
url(r'^user_settings/(?P<user_id>[ \w-]+)/$', EWSUserExtensionView.as_view(), name='ews_user_settings'),
url(r'^dashboard/(?P<site_code>\w+)/', DashboardRedirectReportView.as_view(), name='dashboard_report')
)
| 64.705882 | 108 | 0.753636 |
01274e1a8dfe413246f9258fed40ee9356e14195 | 9,288 | py | Python | ee559/hw2/classifier.py | chenying-wang/usc-ee-coursework-public | 5bc94c2350bcebf1036fb058fe7dc4f7e31e1de1 | [
"MIT"
] | 1 | 2021-03-24T10:46:20.000Z | 2021-03-24T10:46:20.000Z | ee559/hw2/classifier.py | chenying-wang/usc-ee-coursework-public | 5bc94c2350bcebf1036fb058fe7dc4f7e31e1de1 | [
"MIT"
] | null | null | null | ee559/hw2/classifier.py | chenying-wang/usc-ee-coursework-public | 5bc94c2350bcebf1036fb058fe7dc4f7e31e1de1 | [
"MIT"
] | 1 | 2021-03-25T09:18:45.000Z | 2021-03-25T09:18:45.000Z | import numpy as np
from scipy.spatial.distance import cdist
import sys
from plot_area import plot_area
COLOR = ['tab:blue', 'tab:orange', 'tab:green']
class BaseClassifier:
d = -1
c = -1
def __init__(self, d):
super().__init__()
if (d <= 0):
raise RuntimeError("Classifier.D/C cannot be negative or zero")
self.d = d
def evaluate(self, feature):
if (np.shape(feature)[1] != self.d):
raise RuntimeError("Size of feature does not fit")
if (self.c < 0):
raise RuntimeError("Evaluate with an untrained classifier")
return 0
def train(self, data):
return self
class NearestMeanClassifier(BaseClassifier):
training_features = np.array([])
training_classifications = np.array([])
feature_means = np.array([])
classifications = np.array([])
num_of_classifications = np.array([])
weights_group = np.array([])
def __init__(self, d):
super().__init__(d)
def evaluate(self, feature):
if (super().evaluate(feature) or np.size(self.weights_group) == 0):
return None
length = np.shape(feature)[0]
min_distance = np.full(length, sys.maxsize)
classification = np.full(length, None)
for i in range(self.c):
weights = self.weights_group[i]
result = np.matmul(weights, np.transpose(np.insert(feature, 0, 1, axis = 1)))
classification[np.all(result > 0, axis = 0)] = self.classifications[i]
return classification
def test(self, feature, classification):
eval_classification = self.evaluate(feature)
return np.count_nonzero(eval_classification == classification) / \
np.size(classification)
def train_by_weights_group(self, weights_group, classifications):
self.weights_group = weights_group
self.classifications = classifications
self.c = len(self.classifications)
def train(self, data, training_type = 'ovo'):
self.training_features = data[:, :-1]
self.training_classifications = data[:, -1]
if (np.shape(self.training_features)[1] != self.d):
raise RuntimeError("Size of training data does not fit")
self.classifications = np.unique(self.training_classifications)
self.c = len(self.classifications)
self.feature_means = np.zeros([self.c, self.d])
self.num_of_classifications, _ = np.histogram(self.training_classifications, \
bins = np.append(self.classifications, np.max(self.classifications) + 1))
for i in range(self.c):
self.feature_means[i] = np.mean(\
self.training_features[self.training_classifications == self.classifications[i]], \
axis = 0)
if (training_type == 'ovo' or training_type == 'mvm'):
weights_group = np.array([])
for i in range(self.c):
classification = self.classifications[i]
weights = np.array([])
for j in range(self.c):
if (i == j):
continue
another_classification = self.classifications[j]
weight = self.get_boundary_weight_by_ovo(classification, another_classification)
if (np.size(weights) == 0):
weights = np.array([weight])
else:
weights = np.append(weights, [weight], axis = 0)
if (np.size(weights_group) == 0):
weights_group = np.array([weights])
else:
weights_group = np.append(weights_group, [weights], axis = 0)
self.weights_group = weights_group
elif (training_type == 'ovr'):
weights = np.array([])
for classification in self.classifications:
weight = self.get_boundary_weight_by_ovr(classification)
if (np.size(weights) == 0):
weights = np.array([weight])
else:
weights = np.append(weights, [weight], axis = 0)
weights_group = np.tile(-weights, (self.c, 1, 1))
for i in range(self.c):
weights_group[i][i] = -weights_group[i][i]
self.weights_group = weights_group
return self
def get_boundary_weight_by_ovo(self, classification, another_classification):
if (self.c < 0):
raise RuntimeError("Get boundary with an untrained classifier")
return self._get_boundary_weight_by_two_means(\
self.feature_means[self.classifications == classification], \
self.feature_means[self.classifications == another_classification])
def get_boundary_weight_by_ovr(self, classification):
if (self.c < 0):
raise RuntimeError("Get boundary with an untrained classifier")
i = np.argwhere(self.classifications == classification)
feature_mean = self.feature_means[i]
non_feature_means = np.delete(self.feature_means, i, axis = 0)
num_of_non_classifications = np.delete(self.num_of_classifications, i)
non_feature_mean = np.average(non_feature_means, weights = num_of_non_classifications, axis = 0)
return self._get_boundary_weight_by_two_means(feature_mean, non_feature_mean)
def plot_features(self, ax):
if (np.size(self.training_features) == 0 or self.c < 0):
raise RuntimeError('Plot with an untrained classifier or not applicable')
for i in range(self.c):
features = self.training_features[self.training_classifications == self.classifications[i], :]
ax.scatter(features[:, 0], features[:, 1], label = 'Class ' + str(int(self.classifications[i])), \
marker = 'x', alpha = 0.8, color = COLOR[i])
def plot_means(self, ax):
if (np.size(self.feature_means) == 0 or self.c < 0):
raise RuntimeError('Plot with an untrained classifier or not applicable')
for i in range(self.c):
ax.scatter(self.feature_means[i][0], self.feature_means[i][1], label = 'Class ' + str(self.classifications[i]) + ' Mean', \
linewidth = 8, marker = 'o', color = COLOR[i])
def plot_decision_regions_boundaries(self, ax):
if (self.c < 0):
raise RuntimeError('Plot with an untrained classifier')
for i in range(self.c):
plot_area(ax, [self.weights_group[i]], self.get_plot_linespace(), \
label = 'Gamma_' + str(int(self.classifications[i])), \
color = COLOR[i], alpha = 0.6)
def plot_indeterminate_regions(self, ax):
if (self.c < 0):
raise RuntimeError('Plot with an untrained classifier')
indeterminate_weights_group = self._get_complement_weights_group(self.weights_group)
plot_area(ax, indeterminate_weights_group, self.get_plot_linespace(), \
label = 'Indeterminate', \
color = 'tab:grey')
def get_plot_linespace(self):
linespace_lim = self._get_plot_limit()[0]
linespace = np.arange(linespace_lim[0], linespace_lim[1], 0.002)
return linespace
def plot_setlim(self, ax):
plot_limit = self._get_plot_limit()
ax.set_xlim(plot_limit[0][0], plot_limit[0][1])
ax.set_ylim(plot_limit[1][0], plot_limit[1][1])
def plot_legend(self, ax, **kwargs):
handles, labels = ax.get_legend_handles_labels()
newLabels, newHandles = [], []
for handle, label in zip(handles, labels):
if label not in newLabels:
newLabels.append(label)
newHandles.append(handle)
ax.legend(newHandles, newLabels, **kwargs)
def _get_plot_limit(self):
if (np.size(self.training_features) == 0):
return np.array([[0, 6], [0, 6]])
min = np.min(self.training_features, axis = 0)
max = np.max(self.training_features, axis = 0)
min = min - np.maximum((max - min) * 0.25, [0.5, 0.5])
max = max + np.maximum((max - min) * 0.25, [0.5, 0.5])
return np.transpose([min, max])
def _get_boundary_weight_by_two_means(self, a, b):
return np.append(np.sum(np.square(b)) - np.sum(np.square(a)), 2 * (a - b))
def _get_complement_weights_group(self, weights_group, complement_weights_group = np.array([]), weights = np.array([])):
_weights = weights
for weight in weights_group[0]:
if (np.size(_weights) == 0):
weights = np.array([-weight])
else:
weights = np.append(_weights, [-weight], axis = 0)
if (np.shape(weights_group)[0] > 1):
complement_weights_group = self._get_complement_weights_group(weights_group = weights_group[1:, :, :], \
complement_weights_group = complement_weights_group,
weights = weights)
else:
if (np.size(complement_weights_group) == 0):
complement_weights_group = np.array([weights])
else:
complement_weights_group = np.append(complement_weights_group, [weights], axis = 0)
return complement_weights_group
| 42.605505 | 135 | 0.602498 |
0127942a3e99b818d3bf03948c616cc5027b74c1 | 1,920 | py | Python | Web/discussManager.py | cmd2001/Open-TesutoHime | 2c30aa35650383adfb99496aebd425dffd287eda | [
"MIT"
] | 11 | 2020-11-28T16:45:35.000Z | 2021-08-31T07:56:26.000Z | Web/discussManager.py | cmd2001/Open-TesutoHime | 2c30aa35650383adfb99496aebd425dffd287eda | [
"MIT"
] | null | null | null | Web/discussManager.py | cmd2001/Open-TesutoHime | 2c30aa35650383adfb99496aebd425dffd287eda | [
"MIT"
] | 2 | 2021-05-16T03:09:58.000Z | 2021-08-21T07:24:58.000Z | import sys
from utils import *
class DiscussManager:
def add_discuss(self, problem_id: int, username: str, data: str):
db = db_connect()
cursor = db.cursor()
try:
cursor.execute("INSERT INTO Discuss(Problem_ID, Username, Data, Time) VALUES(%s, %s, %s, %s)",
(problem_id, username, data, unix_nano()))
db.commit()
except pymysql.Error:
db.rollback()
sys.stderr.write("SQL Error in DiscussManager: Add_Discuss\n")
db.close()
return
def modify_discuss(self, discuss_id: int, new_data: str):
db = db_connect()
cursor = db.cursor()
try:
cursor.execute("UPDATE Discuss SET DATA = %s WHERE ID = %s", (new_data, discuss_id))
db.commit()
except pymysql.Error:
db.rollback()
sys.stderr.write("SQL Error in DiscussManager: Modify_Discuss\n")
db.close()
return
def get_author(self, discuss_id: int):
db = db_connect()
cursor = db.cursor()
cursor.execute("SELECT Username FROM Discuss WHERE ID = %s", discuss_id)
ret = cursor.fetchone()
return ret[0]
def get_discuss_for_problem(self, problem_id: int):
db = db_connect()
cursor = db.cursor()
cursor.execute("SELECT ID, Username, DATA, Time FROM Discuss WHERE Problem_ID = %s", problem_id)
ret = cursor.fetchall()
db.close()
return ret
def delete_discuss(self, discuss_id: int):
db = db_connect()
cursor = db.cursor()
try:
cursor.execute("DELETE FROM Discuss WHERE ID = %s", discuss_id)
db.commit()
except pymysql.Error:
db.rollback()
sys.stderr.write("SQL Error in DiscussManager: Erase_Discuss\n")
db.close()
return
Discuss_Manager = DiscussManager()
| 32 | 106 | 0.577604 |
0128d2dc205aef3ebf52d764565e44d09f889dd0 | 1,977 | py | Python | casimir/force_calc/force_calc_mc.py | charlesblakemore/opt_lev_analysis | 704f174e9860907de349688ed82b5812bbb07c2d | [
"MIT"
] | null | null | null | casimir/force_calc/force_calc_mc.py | charlesblakemore/opt_lev_analysis | 704f174e9860907de349688ed82b5812bbb07c2d | [
"MIT"
] | null | null | null | casimir/force_calc/force_calc_mc.py | charlesblakemore/opt_lev_analysis | 704f174e9860907de349688ed82b5812bbb07c2d | [
"MIT"
] | 1 | 2019-11-27T19:10:25.000Z | 2019-11-27T19:10:25.000Z | import math, sys, random, mcint
from scipy import integrate
import numpy as np
gap = float(sys.argv[1])
lam = float(sys.argv[2])
print(gap, lam)
## calculate the yukawa force over a distributed test mass assumed to be cube
D = 5 # diameter of bead (um)
rhob = 2e3 # density bead (kg/m^3)
rhoa = 19.3e3 # density attractor
rhosi = 2.3e3 # density attractor
a = 10 # length of attractor cube side (um)
a_depth = 200 # depth of attractor cube side (um)
au_thick = 0.2 # shield layer thickness (um)
def dV(phi,theta,r):
return r**2 * math.sin(theta)
alpha = 1.0
G = 6.67398e-11
#def Fg(phi, theta, r, currx, curry, currz):
def integrand(xin):
r = xin[0]
theta = xin[1]
phi = xin[2]
currx = xin[3]
curry = xin[4]
currz = xin[5]
## distance between r,theta,phi point and currx,curry,currz measured relative to center of cube at (gap + a/2, 0, 0)
dx = r*math.sin(theta)*math.cos(phi) - (gap + a_depth/2.0 + currx)
dy = r*math.sin(theta)*math.sin(phi) - curry
dz = r*math.cos(theta) - currz
dist = math.sqrt( dx**2 + dy**2 + dz**2 )
##only want component in x direction (all others cancel)
rhat_dot_xhat = abs(dx)/dist
return (alpha*G*(rhoa-rhosi)/dist**2)*math.exp(-dist/lam)*(1.0 + dist/lam)*rhob*dV(phi,theta,r)*rhat_dot_xhat
def sampler():
while True:
r = random.uniform(0.,D/2.)
theta = random.uniform(0.,2.*math.pi)
phi = random.uniform(0.,math.pi)
x = random.uniform(-a_depth/2.0+au_thick,a_depth/2.0+au_thick)
y = random.uniform(-a/2., a/2.)
z = random.uniform(-a/2., a/2.)
yield (r, theta, phi, x, y, z)
nmc = 100000000
domainsize = D * math.pi**2 * a_depth * a**2
random.seed(1)
result, error = mcint.integrate(integrand, sampler(), measure=domainsize, n=nmc)
print("integral is: ", result, error)
#fname = 'data/lam_arr_%.3f_%.3f.npy' % (gap*1e6,lam*1e6)
#np.save(fname,intval)
| 27.082192 | 121 | 0.616591 |
012a1022a18b104991ad25c5bfeca0df7e5858c1 | 8,823 | py | Python | robot-server/robot_server/robot/calibration/tip_length/user_flow.py | Axel-Jacobsen/opentrons | c543d95c25003f2e784560efaa6a91f051d4cd33 | [
"Apache-2.0"
] | 1 | 2022-03-17T20:38:04.000Z | 2022-03-17T20:38:04.000Z | robot-server/robot_server/robot/calibration/tip_length/user_flow.py | Axel-Jacobsen/opentrons | c543d95c25003f2e784560efaa6a91f051d4cd33 | [
"Apache-2.0"
] | null | null | null | robot-server/robot_server/robot/calibration/tip_length/user_flow.py | Axel-Jacobsen/opentrons | c543d95c25003f2e784560efaa6a91f051d4cd33 | [
"Apache-2.0"
] | null | null | null | import logging
from typing import (
Dict, Awaitable, Callable, Any, Set, List, Optional,
TYPE_CHECKING)
from opentrons.types import Mount, Point, Location
from opentrons.config import feature_flags as ff
from opentrons.hardware_control import ThreadManager, CriticalPoint
from opentrons.protocol_api import labware
from opentrons.protocols.geometry import deck
from robot_server.robot.calibration import util
from robot_server.service.errors import RobotServerError
from robot_server.service.session.models.command import CalibrationCommand
from ..errors import CalibrationError
from ..helper_classes import RequiredLabware, AttachedPipette
from ..constants import (
TIP_RACK_LOOKUP_BY_MAX_VOL,
SHORT_TRASH_DECK,
STANDARD_DECK,
CAL_BLOCK_SETUP_BY_MOUNT,
MOVE_TO_TIP_RACK_SAFETY_BUFFER,
)
from .constants import TipCalibrationState as State, TIP_RACK_SLOT
from .state_machine import TipCalibrationStateMachine
if TYPE_CHECKING:
from opentrons_shared_data.labware import LabwareDefinition
MODULE_LOG = logging.getLogger(__name__)
"""
A collection of functions that allow a consumer to prepare and update
calibration data associated with the combination of a pipette tip type and a
unique (by serial number) physical pipette.
"""
# TODO: BC 2020-07-08: type all command logic here with actual Model type
COMMAND_HANDLER = Callable[..., Awaitable]
COMMAND_MAP = Dict[str, COMMAND_HANDLER]
class TipCalibrationUserFlow:
def __init__(self,
hardware: ThreadManager,
mount: Mount,
has_calibration_block: bool,
tip_rack: 'LabwareDefinition'):
self._hardware = hardware
self._mount = mount
self._has_calibration_block = has_calibration_block
self._hw_pipette = self._hardware._attached_instruments[mount]
if not self._hw_pipette:
raise RobotServerError(
definition=CalibrationError.NO_PIPETTE_ON_MOUNT,
mount=mount)
self._tip_origin_pt: Optional[Point] = None
self._nozzle_height_at_reference: Optional[float] = None
deck_load_name = SHORT_TRASH_DECK if ff.short_fixed_trash() \
else STANDARD_DECK
self._deck = deck.Deck(load_name=deck_load_name)
self._tip_rack = self._get_tip_rack_lw(tip_rack)
self._initialize_deck()
self._current_state = State.sessionStarted
self._state_machine = TipCalibrationStateMachine()
self._command_map: COMMAND_MAP = {
CalibrationCommand.load_labware: self.load_labware,
CalibrationCommand.jog: self.jog,
CalibrationCommand.pick_up_tip: self.pick_up_tip,
CalibrationCommand.invalidate_tip: self.invalidate_tip,
CalibrationCommand.save_offset: self.save_offset,
CalibrationCommand.move_to_reference_point: self.move_to_reference_point, # noqa: E501
CalibrationCommand.move_to_tip_rack: self.move_to_tip_rack, # noqa: E501
CalibrationCommand.exit: self.exit_session,
}
def _set_current_state(self, to_state: State):
self._current_state = to_state
@property
def current_state(self) -> State:
return self._current_state
def get_pipette(self) -> AttachedPipette:
# TODO(mc, 2020-09-17): s/tip_length/tipLength
return AttachedPipette( # type: ignore[call-arg]
model=self._hw_pipette.model,
name=self._hw_pipette.name,
tip_length=self._hw_pipette.config.tip_length,
mount=str(self._mount),
serial=self._hw_pipette.pipette_id
)
def get_required_labware(self) -> List[RequiredLabware]:
slots = self._deck.get_non_fixture_slots()
lw_by_slot = {s: self._deck[s] for s in slots if self._deck[s]}
return [
RequiredLabware.from_lw(lw, s) # type: ignore
for s, lw in lw_by_slot.items()]
async def handle_command(self,
name: Any,
data: Dict[Any, Any]):
"""
Handle a client command
:param name: Name of the command
:param data: Data supplied in command
:return: None
"""
next_state = self._state_machine.get_next_state(self._current_state,
name)
handler = self._command_map.get(name)
if handler is not None:
await handler(**data)
self._set_current_state(next_state)
MODULE_LOG.debug(f'TipCalUserFlow handled command {name}, transitioned'
f'from {self._current_state} to {next_state}')
async def load_labware(self):
pass
async def move_to_tip_rack(self):
# point safely above target tip well in tip rack
pt_above_well = self._tip_rack.wells()[0].top().point + \
MOVE_TO_TIP_RACK_SAFETY_BUFFER
if self._tip_origin_pt is not None:
# use jogged to x and y offsets only if returning tip to rack
await self._move(Location(Point(self._tip_origin_pt.x,
self._tip_origin_pt.y,
pt_above_well.z),
None))
else:
await self._move(Location(pt_above_well, None))
async def save_offset(self):
if self._current_state == State.measuringNozzleOffset:
# critical point would default to nozzle for z height
cur_pt = await self._get_current_point(
critical_point=None)
self._nozzle_height_at_reference = cur_pt.z
elif self._current_state == State.measuringTipOffset:
assert self._hw_pipette.has_tip
assert self._nozzle_height_at_reference is not None
# set critical point explicitly to nozzle
cur_pt = await self._get_current_point(
critical_point=CriticalPoint.NOZZLE)
util.save_tip_length_calibration(
pipette_id=self._hw_pipette.pipette_id,
tip_length_offset=cur_pt.z - self._nozzle_height_at_reference,
tip_rack=self._tip_rack)
def _get_default_tip_length(self) -> float:
tiprack: labware.Labware = self._deck[TIP_RACK_SLOT] # type: ignore
full_length = tiprack.tip_length
overlap_dict: Dict = \
self._hw_pipette.config.tip_overlap
default = overlap_dict['default']
overlap = overlap_dict.get(tiprack.uri, default)
return full_length - overlap
def _get_critical_point_override(self) -> Optional[CriticalPoint]:
return (CriticalPoint.FRONT_NOZZLE if
self._hw_pipette.config.channels == 8 else None)
async def _get_current_point(
self,
critical_point: CriticalPoint = None) -> Point:
return await self._hardware.gantry_position(self._mount,
critical_point)
async def jog(self, vector):
await self._hardware.move_rel(mount=self._mount,
delta=Point(*vector))
async def move_to_reference_point(self):
ref_loc = util.get_reference_location(
mount=self._mount,
deck=self._deck,
has_calibration_block=self._has_calibration_block)
await self._move(ref_loc)
async def pick_up_tip(self):
await util.pick_up_tip(self, tip_length=self._get_default_tip_length())
async def invalidate_tip(self):
await util.invalidate_tip(self)
async def exit_session(self):
await self.move_to_tip_rack()
await self._return_tip()
def _get_tip_rack_lw(self,
tip_rack_def: 'LabwareDefinition') -> labware.Labware:
try:
return labware.load_from_definition(
tip_rack_def,
self._deck.position_for(TIP_RACK_SLOT))
except Exception:
raise RobotServerError(definition=CalibrationError.BAD_LABWARE_DEF)
def _get_alt_tip_racks(self) -> Set[str]:
pip_vol = self._hw_pipette.config.max_volume
return set(TIP_RACK_LOOKUP_BY_MAX_VOL[str(pip_vol)].alternatives)
def _initialize_deck(self):
self._deck[TIP_RACK_SLOT] = self._tip_rack
if self._has_calibration_block:
cb_setup = CAL_BLOCK_SETUP_BY_MOUNT[self._mount]
self._deck[cb_setup.slot] = labware.load(
cb_setup.load_name,
self._deck.position_for(cb_setup.slot))
async def _return_tip(self):
await util.return_tip(self, tip_length=self._get_default_tip_length())
async def _move(self, to_loc: Location):
await util.move(self, to_loc)
| 38.867841 | 99 | 0.656693 |
012aa2038cdc99acbfdd28f90d56ceb7c6e6b261 | 1,572 | py | Python | OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/WGL/ARB/pbuffer.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/WGL/ARB/pbuffer.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/WGL/ARB/pbuffer.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | '''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.WGL import _types as _cs
# End users want this...
from OpenGL.raw.WGL._types import *
from OpenGL.raw.WGL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'WGL_ARB_pbuffer'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.WGL,'WGL_ARB_pbuffer',error_checker=_errors._error_checker)
WGL_DRAW_TO_PBUFFER_ARB=_C('WGL_DRAW_TO_PBUFFER_ARB',0x202D)
WGL_MAX_PBUFFER_HEIGHT_ARB=_C('WGL_MAX_PBUFFER_HEIGHT_ARB',0x2030)
WGL_MAX_PBUFFER_PIXELS_ARB=_C('WGL_MAX_PBUFFER_PIXELS_ARB',0x202E)
WGL_MAX_PBUFFER_WIDTH_ARB=_C('WGL_MAX_PBUFFER_WIDTH_ARB',0x202F)
WGL_PBUFFER_HEIGHT_ARB=_C('WGL_PBUFFER_HEIGHT_ARB',0x2035)
WGL_PBUFFER_LARGEST_ARB=_C('WGL_PBUFFER_LARGEST_ARB',0x2033)
WGL_PBUFFER_LOST_ARB=_C('WGL_PBUFFER_LOST_ARB',0x2036)
WGL_PBUFFER_WIDTH_ARB=_C('WGL_PBUFFER_WIDTH_ARB',0x2034)
@_f
@_p.types(_cs.HPBUFFERARB,_cs.HDC,_cs.c_int,_cs.c_int,_cs.c_int,ctypes.POINTER(_cs.c_int))
def wglCreatePbufferARB(hDC,iPixelFormat,iWidth,iHeight,piAttribList):pass
@_f
@_p.types(_cs.BOOL,_cs.HPBUFFERARB)
def wglDestroyPbufferARB(hPbuffer):pass
@_f
@_p.types(_cs.HDC,_cs.HPBUFFERARB)
def wglGetPbufferDCARB(hPbuffer):pass
@_f
@_p.types(_cs.BOOL,_cs.HPBUFFERARB,_cs.c_int,ctypes.POINTER(_cs.c_int))
def wglQueryPbufferARB(hPbuffer,iAttribute,piValue):pass
@_f
@_p.types(_cs.c_int,_cs.HPBUFFERARB,_cs.HDC)
def wglReleasePbufferDCARB(hPbuffer,hDC):pass
| 42.486486 | 111 | 0.811705 |
012c3b0eb2f715797ab316942c7962c44dea54d6 | 1,885 | py | Python | dj_warning_forms/forms.py | dnmellen/dj-warning-forms | 25213821f41ad6864cb7eda7bd2f6640d4418561 | [
"BSD-3-Clause"
] | 3 | 2022-03-15T09:09:08.000Z | 2022-03-23T12:30:47.000Z | dj_warning_forms/forms.py | dnmellen/dj-warning-forms | 25213821f41ad6864cb7eda7bd2f6640d4418561 | [
"BSD-3-Clause"
] | 1 | 2022-03-16T08:04:07.000Z | 2022-03-18T21:18:38.000Z | dj_warning_forms/forms.py | dnmellen/dj-warning-forms | 25213821f41ad6864cb7eda7bd2f6640d4418561 | [
"BSD-3-Clause"
] | null | null | null | from collections import namedtuple
import inspect
from django import forms
FormFieldWarning = namedtuple("FormFieldWarning", ["message", "description"])
class WarningFormMixin:
"""Classes using WarningFormMixin should implement methods to catch warnings
>>> def warning_mailboxes(self) -> List[FormFieldWarning]:
if some condition:
return FormFieldWarning(message, description)
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["ignore_warnings"] = forms.BooleanField(
required=False, initial=False, widget=forms.HiddenInput()
)
self.warnings = []
def get_warnings(self):
self.warnings = []
if self.cleaned_data.get("ignore_warnings"):
return []
else:
warning_methods = [
(method_name, method)
for method_name, method in inspect.getmembers(
self, predicate=inspect.ismethod
)
if method_name.startswith("warning_")
]
for method_name, method in warning_methods:
warnings = method()
if warnings:
self.warnings.extend(
[
{
"field": method_name.split("warning_")[1],
"message": warning.message,
"description": warning.description,
}
for warning in warnings
]
)
self.data._mutable = True
self.data["ignore_warnings"] = True
self.data._mutable = False
return self.warnings
def is_valid(self):
return super().is_valid() and not self.get_warnings()
| 33.660714 | 80 | 0.523607 |
012f17bafc339e27fe0149bdbf1a7b12a681ef93 | 29 | py | Python | demo2022.py | finaleo83/demo01 | 579782f564ab0f5cc95f6b5e63644c5f930c0019 | [
"Unlicense"
] | null | null | null | demo2022.py | finaleo83/demo01 | 579782f564ab0f5cc95f6b5e63644c5f930c0019 | [
"Unlicense"
] | null | null | null | demo2022.py | finaleo83/demo01 | 579782f564ab0f5cc95f6b5e63644c5f930c0019 | [
"Unlicense"
] | null | null | null | print("Hello, World! Again!") | 29 | 29 | 0.689655 |
01314db002cc9b5ea847e74d9af1164332434719 | 8,791 | py | Python | ai/ai.py | TED-996/pro-evolution-foosball | ced46dd7340664d7c7ca7679c6582c7636e2c2a8 | [
"MIT"
] | null | null | null | ai/ai.py | TED-996/pro-evolution-foosball | ced46dd7340664d7c7ca7679c6582c7636e2c2a8 | [
"MIT"
] | null | null | null | ai/ai.py | TED-996/pro-evolution-foosball | ced46dd7340664d7c7ca7679c6582c7636e2c2a8 | [
"MIT"
] | null | null | null | from numpy import array, arange, argmax
from numpy.random import choice
from itertools import product
from ai.NN import NN
import pickle
from random import random, randrange, randint
from collections import deque
from math import floor
class AI:
MEMORY_DIMENSION = 90000
def __init__(self, load: bool = False,
state_size: int = 0,
rods_number: int = 0,
offset=None,
angle_velocity=None,
hidden_layers=(100, 100),
log_size=10,
nn_file: str = "save.model",
actions_file: str = "save.actions"):
"""
:param load: specify if model should be load
:param state_size: how many attributes has a state
:param rods_number: number of rods of a player
:param offset: represents a vector with scalars that will move rod to left/right
:param angle_velocity: represents a vector with rates of change of a rodsman angle
:param hidden_layers: a vector of values that represents how many units have each layer
:param nn_file: file to save neural network
:param actions_file: file to save actions_file
"""
self.actions = None
self.model = None
self.last_predictions = deque(maxlen=2 * log_size)
self.last_states = deque(maxlen=2 * log_size)
self.last_actions_index = deque(maxlen=2 * log_size)
self.last_reward_sums = deque([0, 0], maxlen=2 * log_size + 2)
self.log_size = log_size
self.lamda = 0.6
self.alpha = 0.9
self.epsilon = 0.5 # greedy policy
self.__epsilon_backup = self.epsilon
# decreasing_rate will decrease epsilon such that in the future, when nn learned something
# to not make anymore random choices
self.__decreasing_rate = 0.99997
# memory replay
self.memory_state = deque(maxlen=AI.MEMORY_DIMENSION)
self.memory_target = deque(maxlen=AI.MEMORY_DIMENSION)
# with save_probability save a memory with consist of a state and a target
self.save_probability = 0.3
if load:
self.__load(nn_file, actions_file)
return
self.rods_number = rods_number
self.actions = array(
[action for action in product(arange(rods_number),
offset,
angle_velocity)]
)
self.batch_size = int(floor((2 * log_size) ** 0.5))
self.model = NN(input_dim=state_size,
hidden_layers=hidden_layers,
output_dim=len(self.actions),
batch_size=self.batch_size)
self.model.compile()
def __load(self, nn_file, actions_file):
self.model = NN(load_file=nn_file)
fd = open(actions_file, "rb")
self.rods_number, self.actions, self.epsilon, self.lamda, self.batch_size = pickle.load(fd)
self.model.batch_size = self.batch_size
fd.close()
def save(self, nn_file: str="save.model", actions_file: str="save.actions"):
self.model.save(nn_file)
print("saving ai...")
fd = open(actions_file, "wb")
to_save = (self.rods_number, self.actions, self.epsilon, self.lamda, self.batch_size)
pickle.dump(to_save, fd, protocol=0) # protocol 0 for compatibility
fd.close()
def __compute_and_backup(self, state):
self.last_predictions.append(self.model.predict_action(state))
self.last_states.append(state)
# noinspection PyMethodMayBeStatic
def one_action(self, q_values):
return [argmax(q_values)]
def multiple_actions(self, q_values):
actions_idxs = []
slice_size = int(len(self.actions) // self.rods_number)
for i in range(self.rods_number):
actions_idxs.append(i * slice_size + argmax(q_values[i * slice_size:(i + 1) * slice_size]))
return actions_idxs
def get_action(self, state, action_selector):
"""
:param state: a state of the current game
:param action_selector: may be one of the following functions: ane_action, multiple_actions
:return:
"""
self.__compute_and_backup(state)
self.last_actions_index.append(action_selector(self.last_predictions[-1]))
return [self.actions[i] for i in self.last_actions_index[-1]]
def one_action_off_policy(self, rand, q_values):
if rand:
return [randrange(0, len(self.actions))]
else:
return self.one_action(q_values)
def multiple_actions_off_policy(self, rand, q_values):
slice_size = int(len(self.actions) // self.rods_number)
if rand:
return [i * slice_size + randrange(0, slice_size)
for i
in range(self.rods_number)]
else:
return self.multiple_actions(q_values)
def get_action_off_policy(self, state, action_selector):
self.__compute_and_backup(state)
if random() < self.epsilon: # should choose an action random
self.epsilon *= self.__decreasing_rate
self.last_actions_index.append(action_selector(True, None))
return [self.actions[i] for i in self.last_actions_index[-1]]
self.last_actions_index.append(action_selector(False, self.last_predictions[-1]))
return [self.actions[i] for i in self.last_actions_index[-1]]
def update(self, action_based_reward, new_states):
assert len(action_based_reward) == len(new_states), "must have reward for each new_state"
assert len(action_based_reward) == 2, "exactly 2 players supported ATM"
for idx in range(0, len(self.last_reward_sums), 2):
self.last_reward_sums[idx] += action_based_reward[0]
self.last_reward_sums[idx + 1] += action_based_reward[1]
self.last_reward_sums.extend(action_based_reward)
q_values = [[self.last_predictions[i][j]
for j in self.last_actions_index[i]]
for i in range(len(self.last_actions_index))]
action_selector = self.one_action if len(q_values[0]) == 1 else self.multiple_actions
for i in range(len(q_values) - 2):
next_q_values = q_values[i + 2]
q_values_updated = [
# TODO: not sure about self.last_rewards[i]
(1 - self.alpha) * q + self.alpha * (self.last_reward_sums[i] / ((len(q_values) + 1 - i) // 2)
+ (self.lamda * next_q))
for q, next_q
in zip(q_values[i], next_q_values)]
for j, update in zip(self.last_actions_index[i], q_values_updated):
self.last_predictions[i][j] = update
for i in range(len(new_states)):
next_max_q_values = action_selector(self.model.predict_action(new_states[i]))
q_values_updated = [
(1 - self.alpha) * q + self.alpha * (action_based_reward[i] + self.lamda * next_q)
for q, next_q
in zip(q_values[-2 + i], next_max_q_values)]
for j, update in zip(self.last_actions_index[-2 + i], q_values_updated):
self.last_predictions[-2 + i][j] = update
if random() < self.save_probability:
self.memory_state.append(self.last_states[-1])
self.memory_target.appendleft(self.last_predictions[-1])
if random() <= 0.5:
self.model.update(self.last_states, self.last_predictions)
else:
self.from_memory_update()
# we trust more in next move when network learn more
self.lamda += self.lamda * 1.e-7
if self.lamda > 1:
print("lambda = {}".format(self.lamda))
self.lamda = 1
def predict_action(self, state, action_selector):
actions_idxs = action_selector(self.model.predict_action(state))
return [self.actions[i] for i in actions_idxs]
def flush_last_actions(self):
self.last_states.clear()
self.last_actions_index.clear()
self.last_predictions.clear()
def switch_random_action(self, activate):
if not activate:
self.__epsilon_backup = self.epsilon
self.epsilon = 0
else:
self.epsilon = self.__epsilon_backup
def from_memory_update(self):
if len(self.memory_state) < 1000:
return
idxs = arange(len(self.memory_state))
size = randint(200, 1000)
sample = choice(idxs, size, replace=False)
self.model.update(array([self.memory_state[i] for i in sample]),
array([self.memory_target[i] for i in sample]),
False, 3)
| 41.079439 | 110 | 0.615402 |
013309a59e2c92190292c61529ffae1f691b50cb | 243 | py | Python | Desafio49.py | VictorCastao/Curso-em-Video-Python | aeee8baaa73c04b839a27ae37ba24ecc0b863075 | [
"MIT"
] | null | null | null | Desafio49.py | VictorCastao/Curso-em-Video-Python | aeee8baaa73c04b839a27ae37ba24ecc0b863075 | [
"MIT"
] | null | null | null | Desafio49.py | VictorCastao/Curso-em-Video-Python | aeee8baaa73c04b839a27ae37ba24ecc0b863075 | [
"MIT"
] | null | null | null | print('=' * 12 + 'Desafio 49' + '=' * 12)
numero = int(input('Digite o número para a tabuada: '))
print('=' * 13)
print(f'Tabuada do {numero}')
print('=' * 13)
for i in range(1,11):
print(f'{numero} x {i:2} = {numero * i}')
print('=' * 13) | 30.375 | 55 | 0.555556 |
01343ef465fcba8903301425b1e7414924d1fd27 | 1,686 | py | Python | Shared_Files/Music_Pallete.py | EricCacciavillani/LyreBird | 858657faef39d1adcba19ff0213210ba490b4afa | [
"MIT"
] | 1 | 2019-05-04T02:34:20.000Z | 2019-05-04T02:34:20.000Z | Shared_Files/Music_Pallete.py | EricCacciavillani/LyreBird | 858657faef39d1adcba19ff0213210ba490b4afa | [
"MIT"
] | null | null | null | Shared_Files/Music_Pallete.py | EricCacciavillani/LyreBird | 858657faef39d1adcba19ff0213210ba490b4afa | [
"MIT"
] | 1 | 2019-04-04T19:14:09.000Z | 2019-04-04T19:14:09.000Z | import pretty_midi
import sys
import numpy as np
from tqdm import tqdm
from collections import Counter
sys.path.append('..')
from Pre_Production.Midi_Pre_Processor import *
from Shared_Files.Global_Util import *
class MusicPallete:
def __init__(self, pre_processor_obj):
self.__all_possible_instr_note_pairs = pre_processor_obj.return_all_possible_instr_note_pairs()
self.__centroid_instr_note = None
self.__instr_note_pair_attributes = dict()
set_test_eval = set()
full_matrix = []
count = 0
for instr_note_pair_str in (self.__all_possible_instr_note_pairs):
if "True" in instr_note_pair_str:
count += 1
instr_note_pair_obj = convert_string_to_instr_note_pair(instr_note_pair_str)
# Evaluate instr/note pair
fluid_synth, synth, note_hz = extract_instr_note_pair_attributes(instr_note_pair_obj)
# print(fluid_synth)
# ---
print(fluid_synth)
if self.__centroid_instr_note is None:
self.__centroid_instr_note = fluid_synth
self.__instr_note_pair_attributes[
instr_note_pair_str] = float(calculate_wave_mse(self.__centroid_instr_note, fluid_synth))
set_test_eval.add(calculate_wave_mse(self.__centroid_instr_note, fluid_synth))
print(len(set_test_eval))
print(count)
cnt = Counter([v for k,v in self.__instr_note_pair_attributes.items()])
print(cnt.most_common()[:20])
print()
print([k for k,v in self.__instr_note_pair_attributes.items() if v in set([test[0] for test in cnt.most_common()[:20]])]) | 33.058824 | 129 | 0.682681 |
01347244fcb03a5c18ef357b962ba9072723419d | 1,408 | py | Python | test/SIT/Bootstrap/TestBootstrap.py | antonelloceravola/ToolBOSCore | b03414a867a9f0585e06bb8e4f299c4be1357f3a | [
"BSD-3-Clause"
] | null | null | null | test/SIT/Bootstrap/TestBootstrap.py | antonelloceravola/ToolBOSCore | b03414a867a9f0585e06bb8e4f299c4be1357f3a | [
"BSD-3-Clause"
] | null | null | null | test/SIT/Bootstrap/TestBootstrap.py | antonelloceravola/ToolBOSCore | b03414a867a9f0585e06bb8e4f299c4be1357f3a | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# launches the unit testing
#
# Copyright (C)
# Honda Research Institute Europe GmbH
# Carl-Legien-Str. 30
# 63073 Offenbach/Main
# Germany
#
# UNPUBLISHED PROPRIETARY MATERIAL.
# ALL RIGHTS RESERVED.
#
#
import os
import tempfile
import unittest
from ToolBOSCore.Storage import SIT
from ToolBOSCore.Storage import CopyTreeFilter
from ToolBOSCore.Util import FastScript
from ToolBOSCore.Util import Any
class TestBootstrap( unittest.TestCase ):
def setUp( self ):
if not FastScript.getEnv( 'VERBOSE' ) == 'TRUE':
Any.setDebugLevel( 1 )
def test_bootstrapSIT( self ):
basePkgList = SIT.getMinRequirements()
outputDir = tempfile.mkdtemp( prefix='test-' )
# create test SIT
copyFilter = CopyTreeFilter.CopyTreeFilter( [] ) # skip copying binaries
copyFilter.filterDocu = True
SIT.bootstrap( outputDir, True, True, copyFilter.callback, False )
# check if all essential packages are available
for package in basePkgList:
Any.requireIsFile( os.path.join( outputDir, package, 'packageVar.cmake' ) )
# check for Module-Index directory
Any.requireIsDir( os.path.join( outputDir, 'Modules', 'Index' ) )
# clean-up
FastScript.remove( outputDir )
if __name__ == '__main__':
unittest.main()
# EOF
| 22.349206 | 87 | 0.665483 |
0139cdb76dae20ff11cf54b33120c3d867395d2f | 9,566 | py | Python | GoogleCalendar.py | InRong/Glance | cc15659436bba2b4bee396b4a3e595a157f31401 | [
"Apache-2.0"
] | null | null | null | GoogleCalendar.py | InRong/Glance | cc15659436bba2b4bee396b4a3e595a157f31401 | [
"Apache-2.0"
] | null | null | null | GoogleCalendar.py | InRong/Glance | cc15659436bba2b4bee396b4a3e595a157f31401 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
#
# Module for adding Google Calendar Functionality.
#
# by Peter Juett
# References:https://developers.google.com/calendar/quickstart/python
#
# Copyright 2018
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
import httplib2
import os
from apiclient import discovery
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
import logging
from logging.handlers import RotatingFileHandler
import paho.mqtt.client as mqtt
import DB
import datetime
import time
import pytz
import os
from dateutil import parser
SLEEP_TIME = 60
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/calendar-python-quickstart.json
SCOPES = 'https://www.googleapis.com/auth/calendar.readonly'
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'Google Calendar API Python Quickstart'
class Calendar(object):
def __init__(self, main_app_log):
if main_app_log is None:
self.log_formatter = logging.Formatter('%(asctime)s %(levelname)s %(funcName)s(%(lineno)d) %(message)s')
self.logFile = 'logs/GoogleCalendar.log'
self.my_handler = RotatingFileHandler(self.logFile, mode='a', maxBytes=5*1024*1024, backupCount=1, encoding=None, delay=0)
self.my_handler.setFormatter(self.log_formatter)
self.my_handler.setLevel(logging.INFO)
self.app_log = logging.getLogger('root')
self.app_log.setLevel(logging.INFO)
self.app_log.addHandler(self.my_handler)
else:
self.app_log = main_app_log
self._db = DB.DB()
self._db.load_settings()
self.start_mosquito()
self.process_loop()
def on_connect(self, mosclient, userdata, flags, rc):
self.app_log.info("Subscribing to topic: " + self._db.get_value("mostopic"))
mosclient.subscribe(self._db.get_value("mostopic"))
def on_disconnect(self, client, userdata, rc):
if rc != 0:
self.app_log.info("Unexpected disconnection")
def on_publish(self, client, userdata, mid):
self.app_log.info("on_publish - published " + str(mid))
def start_mosquito(self):
self.mos_client = mqtt.Client()
self.mos_client.on_connect = self.on_connect
self.mos_client.on_disconnect = self.on_disconnect
self.mos_client.on_publish = self.on_publish
if len(self._db.get_value("mospassword"))>0:
self.mos_client.username_pw_set(self._db.get_value("mosusername"),self._db.get_value("mospassword"))
mos_broker_address = self._db.get_value("mosbrokeraddress")
self.app_log.info("Connecting to: " + mos_broker_address)
self.mos_client.connect(mos_broker_address, int(self._db.get_value("mosbrokerport")), 60)
self.app_log.info("Connected")
self.mos_client.loop_start()
def broadcast_send(self, data_item, value):
result = 0
mid = 0
if data_item is None:
self.app_log.info("data_item is None")
return
if value is None:
self.app_log.info("value is None")
return
self.app_log.info("publishing: " + data_item + " " + value)
try:
message = self._db.get_value("name") + "/" + data_item + "/" + value
result, mid = self.mos_client.publish(self._db.get_value("mostopic"), message)
if result == mqtt.MQTT_ERR_SUCCESS:
self.app_log.info("published OK, Message ID = " + str(mid))
elif result == mqtt.MQTT_ERR_NO_CONN:
self.app_log.info("publish failed, no connection")
else:
self.app_log.info("publish failed, result code = " + str(result))
except Exception as e:
self.app_log.exception('Exception: %s', e)
def process_loop(self):
while(1):
try:
self.get()
except Exception as e:
self.app_log.exception('Exception: %s', e)
finally:
time.sleep(SLEEP_TIME)
def get_credentials(self):
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'calendar-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
def get(self):
"""Shows basic usage of the Google Calendar API.
Creates a Google Calendar API service object and outputs a list of the next
10 events on the user's calendar.
"""
credentials = self.get_credentials()
http = credentials.authorize(httplib2.Http())
service = discovery.build('calendar', 'v3', http=http)
now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time
self.app_log.info('Getting the upcoming 5 events')
eventsResult = service.events().list(
calendarId='primary', timeMin=now, maxResults=5, singleEvents=True,
orderBy='startTime').execute()
events = eventsResult.get('items', [])
x = 1
if not events:
self.app_log.info('No upcoming events found.')
for event in events:
start = event['start'].get('dateTime', event['start'].get('date'))
next_event = self.process_text(start, event['summary'])
self.broadcast_send('Calendar' + str(x), next_event)
if x == 1:
next_two_events = next_event
elif x == 2:
next_two_events = next_two_events + ", " + next_event
self.broadcast_send('Calendar1and2', next_two_events)
x = x + 1
def process_text(self, start_time, summary):
self.app_log.info(start_time)
this_time = parser.parse(start_time)
self.app_log.info(this_time.strftime("%a %b %d %Y %H:%M:%S --- %z"))
tz_len = len(this_time.strftime("%z"))
out_text = start_time[5:16].replace("T"," ")
if tz_len > 0:
tz = pytz.timezone(self._db.get_value("pytztimezone"))
now = datetime.datetime.now(tz)
else:
now = datetime.datetime.now() #daily events can have no timezone info....
self.app_log.info (now.strftime("%a %b %d %Y %H:%M:%S --- %z"))
today_date = now.strftime("%m-%d")
tomorrow_date = (datetime.date.today() + datetime.timedelta(days=1)).strftime("%m-%d")
delta = this_time - now
#Replace with today or tomorrow, as applicable
out_text = out_text.replace(today_date,"").replace(tomorrow_date,"Tomorrow")
if (delta.days < 7): #if it is soon, then use the day of the week instead of date...
out_text = out_text.replace(this_time.strftime("%m-%d"),this_time.strftime("%a"))
out_text = out_text + " " + summary
return out_text
def run_program(main_app_log):
Calendar(main_app_log)
if __name__ == '__main__':
run_program(None)
| 37.810277 | 146 | 0.569935 |
013c4fb306411e4299bc53f9a104c56ba9d36105 | 1,705 | py | Python | pypub/scrapers/jneuroscience.py | ScholarTools/pypub | 1fcdf895d4777aea7882a1812fef307255702a80 | [
"MIT"
] | 1 | 2016-07-03T17:53:54.000Z | 2016-07-03T17:53:54.000Z | pypub/scrapers/jneuroscience.py | ScholarTools/pypub | 1fcdf895d4777aea7882a1812fef307255702a80 | [
"MIT"
] | 8 | 2015-12-28T19:53:36.000Z | 2021-12-13T19:41:39.000Z | pypub/scrapers/jneuroscience.py | ScholarTools/pypub | 1fcdf895d4777aea7882a1812fef307255702a80 | [
"MIT"
] | 1 | 2016-06-21T15:08:46.000Z | 2016-06-21T15:08:46.000Z | # -*- coding: utf-8 -*-
"""
For an example page see:
http://www.jneurosci.org/content/23/10/4355.long#ref-list-1
Note that JNeuroscience seems to be part of a consortium with J Physiology,
J Neurophysiology, PNS, etc
perhaps check out HighWire Press?
"""
import requests
from bs4 import BeautifulSoup
class JNeuroscienceRef(object):
def __init__(self):
pass
"""
<li><span class="ref-label ref-label-empty"></span>
<a class="rev-xref-ref" href="#xref-ref-40-1" id="ref-40" title="View reference in text">↵</a>
<div class="cit ref-cit ref-other" id="cit-23.10.4355.40">
<div class="cit-metadata"><cite>Yoshimura N, Seki S,
de Groat WC (<span class="cit-pub-date">2001b</span>)
Nitric oxide modulates Ca<sup>2</sup><sup>+</sup> channels in
dorsal root ganglion neurons innervating rat urinary bladder.
<span class="cit-source">J Neurophysiol</span> <span class="cit-vol">86
</span>: <span class="cit-fpage">304</span>–311.</cite></div>
<div class="cit-extra"><a class="cit-ref-sprinkles cit-ref-sprinkles-ijlinks"
href="/cgi/ijlink?linkType=ABST&journalCode=jn&resid=86/1/304">
<span class="cit-reflinks-abstract">Abstract</span><span class="cit-sep
cit-reflinks-variant-name-sep">/</span><span class="cit-reflinks-full-text">
<span class="free-full-text">FREE </span>Full Text</span></a></div>
</div>
</li>
"""
def get_references(url):
r = requests.get(url)
soup = BeautifulSoup(r.text)
ref_section = soup.find('div',{'class':'section ref-list'})
ref_entries = ref_section.find_all('li')
import pdb
pdb.set_trace()
pass | 31.574074 | 100 | 0.649853 |
013c7675c37f149bc9d81184f6cde81bed1535d6 | 1,089 | py | Python | fastapi-master-api/app/api/models/create_metric.py | SionAbes/fullstack-porfolio | 6ca74da425a0f6e2d9b65b2aeb8d5452ff1565a9 | [
"MIT"
] | 1 | 2021-12-25T09:19:25.000Z | 2021-12-25T09:19:25.000Z | fastapi-master-api/app/api/models/create_metric.py | SionAbes/fullstack-porfolio | 6ca74da425a0f6e2d9b65b2aeb8d5452ff1565a9 | [
"MIT"
] | null | null | null | fastapi-master-api/app/api/models/create_metric.py | SionAbes/fullstack-porfolio | 6ca74da425a0f6e2d9b65b2aeb8d5452ff1565a9 | [
"MIT"
] | null | null | null | # coding: utf-8
from __future__ import annotations
import re # noqa: F401
from datetime import date, datetime # noqa: F401
from typing import Any, Dict, List, Optional # noqa: F401
from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401
class CreateMetric(BaseModel):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
CreateMetric - a model defined in OpenAPI
processed_at: The processed_at of this CreateMetric [Optional].
event_at: The event_at of this CreateMetric [Optional].
machine_id: The machine_id of this CreateMetric.
metric: The metric of this CreateMetric [Optional].
value: The value of this CreateMetric [Optional].
unit: The unit of this CreateMetric [Optional].
"""
processed_at: Optional[datetime] = None
event_at: Optional[datetime] = None
machine_id: int
metric: Optional[str] = None
value: Optional[str] = None
unit: Optional[str] = None
CreateMetric.update_forward_refs()
| 30.25 | 96 | 0.708907 |
013c77d6a4350f96399efe1ca86c27a469b9fa59 | 32 | py | Python | src/logic_analyzer_bfms/__init__.py | pybfms/pybfms_logic_analyzer | 7696e16c53a7248a0660ba1cc8f108cda03c1e08 | [
"Apache-2.0"
] | null | null | null | src/logic_analyzer_bfms/__init__.py | pybfms/pybfms_logic_analyzer | 7696e16c53a7248a0660ba1cc8f108cda03c1e08 | [
"Apache-2.0"
] | null | null | null | src/logic_analyzer_bfms/__init__.py | pybfms/pybfms_logic_analyzer | 7696e16c53a7248a0660ba1cc8f108cda03c1e08 | [
"Apache-2.0"
] | 1 | 2020-11-22T08:37:39.000Z | 2020-11-22T08:37:39.000Z |
from .la_initiator_bfm import * | 16 | 31 | 0.8125 |
013c7f4681cad6f22cf85afe8bfe0932af367f65 | 6,940 | py | Python | postprocessing/science/compute_diff_seissol_data.py | jrekoske/SeisSol | 63087cf5fabc6e1b09a4d6b1e0ac46aaee2a1dfe | [
"BSD-3-Clause"
] | null | null | null | postprocessing/science/compute_diff_seissol_data.py | jrekoske/SeisSol | 63087cf5fabc6e1b09a4d6b1e0ac46aaee2a1dfe | [
"BSD-3-Clause"
] | null | null | null | postprocessing/science/compute_diff_seissol_data.py | jrekoske/SeisSol | 63087cf5fabc6e1b09a4d6b1e0ac46aaee2a1dfe | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
import h5py
import numpy as np
import argparse
import os
import seissolxdmf as sx
import seissolxdmfwriter as sw
# These 2 latter modules are on pypi (e.g. pip install seissolxdmf)
def read_reshape2d(sx, dataname):
"""read seissol dataset
and if there is only one time stamp
create a second dimension of size 1"""
myData = sx.ReadData(dataname)
if len(myData.shape) == 1:
myData = myData.reshape((1, myData.shape[0]))
return myData
def fuzzysort(arr, idx, dim=0, tol=1e-6):
"""
return indexes of sorted points robust to small perturbations of individual components.
https://stackoverflow.com/questions/19072110/numpy-np-lexsort-with-fuzzy-tolerant-comparisons
note that I added dim<arr.shape[0]-1 in some if statement (else it will crash sometimes)
"""
arrd = arr[dim]
srtdidx = sorted(idx, key=arrd.__getitem__)
i, ix = 0, srtdidx[0]
for j, jx in enumerate(srtdidx[1:], start=1):
if arrd[jx] - arrd[ix] >= tol:
if j - i > 1 and dim < arr.shape[0] - 1:
srtdidx[i:j] = fuzzysort(arr, srtdidx[i:j], dim + 1, tol)
i, ix = j, jx
if i != j and dim < arr.shape[0] - 1:
srtdidx[i:] = fuzzysort(arr, srtdidx[i:], dim + 1, tol)
return srtdidx
def lookup_sorted_geom(geom):
"""return the indices to sort the
geometry array by x, then y, then z
and the associated inverse look-up table
"""
ind = fuzzysort(geom.T, list(range(0, geom.shape[0])), tol=1e-4)
# generate inverse look-up table
dic = {i: index for i, index in enumerate(ind)}
ind_inv = np.zeros_like(ind)
for k, v in dic.items():
ind_inv[v] = k
return ind, ind_inv
def read_geom_connect(sx):
return sx.ReadGeometry(), sx.ReadConnect()
def return_sorted_geom_connect(sx):
"""sort geom array and reindex connect array to match the new geom array"""
geom, connect = read_geom_connect(sx)
nv = geom.shape[0]
try:
import pymesh
geom, connect, inf = pymesh.remove_duplicated_vertices_raw(
geom, connect, tol=1e-4
)
print(f"removed {inf['num_vertex_merged']} duplicates out of {nv}")
except ModuleNotFoundError:
print("pymesh not found, trying trimesh...")
import trimesh
trimesh.tol.merge = 1e-4
mesh = trimesh.Trimesh(geom, connect)
mesh.merge_vertices()
geom = mesh.vertices
connect = mesh.faces
print(f"removed {nv-geom.shape[0]} duplicates out of {nv}")
ind, ind_inv = lookup_sorted_geom(geom)
geom = geom[ind, :]
connect = np.array([ind_inv[x] for x in connect.flatten()]).reshape(connect.shape)
# sort along line (then we can use multidim_intersect)
connect = np.sort(connect, axis=1)
return geom, connect
def multidim_intersect(arr1, arr2):
"""find indexes of same triangles in 2 connect arrays
(associated with the same geom array)
generate 1D arrays of tuples and use numpy function
https://stackoverflow.com/questions/9269681/intersection-of-2d-numpy-ndarrays
"""
arr1_view = arr1.view([("", arr1.dtype)] * arr1.shape[1])
arr2_view = arr2.view([("", arr2.dtype)] * arr2.shape[1])
intersected, ind1, ind2 = np.intersect1d(arr1_view, arr2_view, return_indices=True)
ni, n1, n2 = intersected.shape[0], arr1.shape[0], arr2.shape[0]
print(
f"{ni} faces in common, n faces connect 1:{n1}, 2:{n2} (diff: {n1-ni}, {n2-ni})"
)
return ind1, ind2
def same_geometry(sx1, sx2):
geom1 = sx1.ReadGeometry()
geom2 = sx2.ReadGeometry()
if geom1.shape[0] != geom2.shape[0]:
return False
else:
return np.all(np.isclose(geom1, geom2, rtol=1e-3, atol=1e-4))
parser = argparse.ArgumentParser(
description="make difference between 2 (paraview) output files: f2-f1. \
The output must be from the same mesh, but the partionning may differ."
)
parser.add_argument("xdmf_filename1", help="filename1")
parser.add_argument("xdmf_filename2", help="filename2")
parser.add_argument(
"--idt",
nargs="+",
required=True,
help="list of time step to differenciate (1st = 0); -1 = all",
type=int,
)
parser.add_argument(
"--Data",
nargs="+",
required=True,
metavar=("variable"),
help="Data to differenciate (example SRs); all for all stored quantities",
)
parser.add_argument(
"--ratio",
dest="ratio",
default=False,
action="store_true",
help="compute relative ratio (f1-f2)/f1 instead of f2-f1",
)
args = parser.parse_args()
sx1 = sx.seissolxdmf(args.xdmf_filename1)
sx2 = sx.seissolxdmf(args.xdmf_filename2)
same_geom = same_geometry(sx1, sx2)
if same_geom:
print("same indexing detected, no need to reindex arrays")
geom1, connect1 = read_geom_connect(sx1)
geom2, connect2 = read_geom_connect(sx2)
else:
geom1, connect1 = return_sorted_geom_connect(sx1)
geom2, connect2 = return_sorted_geom_connect(sx2)
if not np.all(np.isclose(geom1, geom2, rtol=1e-3, atol=1e-4)):
raise ValueError("geometry arrays differ")
ind1, ind2 = multidim_intersect(connect1, connect2)
connect1 = connect1[ind1, :]
if args.idt[0] == -1:
args.idt = list(range(0, sx1.ndt))
aData = []
if args.Data == ["all"]:
variable_names = set()
for elem in sx1.tree.iter():
if elem.tag == "Attribute":
variable_names.add(elem.get("Name"))
variable_names2 = set()
for elem in sx2.tree.iter():
if elem.tag == "Attribute":
variable_names2.add(elem.get("Name"))
# return only variables in common
variable_names = variable_names.intersection(variable_names2)
for to_remove in ["partition", "locationFlag"]:
if to_remove in variable_names:
variable_names.remove(to_remove)
else:
variable_names = args.Data
for dataname in variable_names:
print(dataname)
myData1 = read_reshape2d(sx1, dataname)
myData2 = read_reshape2d(sx2, dataname)
ndt = min(myData1.shape[0], myData2.shape[0])
if same_geom:
myData = myData1[0:ndt, :] - myData2[0:ndt, :]
if args.ratio:
myData = myData / myData1[0:ndt, :]
else:
myData = myData1[0:ndt, ind1] - myData2[0:ndt, ind2]
if args.ratio:
myData = myData / myData1[0:ndt, ind1]
for idt in args.idt:
if idt < ndt:
print(idt, np.amin(myData[idt, :]), np.amax(myData[idt, :]))
else:
print(f"removing idt={idt}>{ndt} from args.idt")
args.idt.pop(idt)
aData.append(myData)
prefix, ext = os.path.splitext(args.xdmf_filename1)
add2prefix = "ratio" if args.ratio else "diff"
fname = f"{add2prefix}_{os.path.basename(prefix)}"
try:
dt = sx1.ReadTimeStep()
except NameError:
dt = 0.0
out_names = ["diff_" + name for name in variable_names]
sw.write_seissol_output(fname, geom1, connect1, out_names, aData, dt, args.idt)
| 31.689498 | 97 | 0.65072 |
013d0a444d8fcc0b669cdc39b7c00090f97916cd | 110 | py | Python | iFarm/iFarmapp/urls.py | vmakar0v/smart-farm | 47bd7be4b40bbca57492ae5b8da09cc0635bfa2a | [
"Apache-2.0"
] | null | null | null | iFarm/iFarmapp/urls.py | vmakar0v/smart-farm | 47bd7be4b40bbca57492ae5b8da09cc0635bfa2a | [
"Apache-2.0"
] | null | null | null | iFarm/iFarmapp/urls.py | vmakar0v/smart-farm | 47bd7be4b40bbca57492ae5b8da09cc0635bfa2a | [
"Apache-2.0"
] | null | null | null | from django.urls import path
from . import views
urlpatterns = [
path('', views.home_P, name='home_P')
]
| 15.714286 | 41 | 0.681818 |
013dad2ac33defe55487634726ea099424ed06bd | 538 | py | Python | yandex/yandex2016_b_b.py | knuu/competitive-programming | 16bc68fdaedd6f96ae24310d697585ca8836ab6e | [
"MIT"
] | 1 | 2018-11-12T15:18:55.000Z | 2018-11-12T15:18:55.000Z | yandex/yandex2016_b_b.py | knuu/competitive-programming | 16bc68fdaedd6f96ae24310d697585ca8836ab6e | [
"MIT"
] | null | null | null | yandex/yandex2016_b_b.py | knuu/competitive-programming | 16bc68fdaedd6f96ae24310d697585ca8836ab6e | [
"MIT"
] | null | null | null | def count_bit_sum(N):
bit = [0] * max_bitlen
if N <= 0:
return bit[:]
for i in range(N.bit_length() - 1):
bit[i] = 1 << (N.bit_length() - 2)
bit[N.bit_length()-1] = N - (1 << (N.bit_length() - 1)) + 1
return [bit[i] + b for i, b in enumerate(count_bit_sum(N - (1 << (N.bit_length() - 1))))]
max_bitlen = int(1<<31).bit_length()
L, R = map(int, input().split())
ans = [(r - l) * 2 > (R - L + 1) for l, r in zip(count_bit_sum(L-1), count_bit_sum(R))]
print(sum(x * (1 << i) for i, x in enumerate(ans)))
| 38.428571 | 93 | 0.539033 |
013ea56ed2c4a516c4e4235a0ba8239ea63b5e56 | 2,639 | py | Python | gnuradio-3.7.13.4/gr-filter/python/filter/design/api_object.py | v1259397/cosmic-gnuradio | 64c149520ac6a7d44179c3f4a38f38add45dd5dc | [
"BSD-3-Clause"
] | 1 | 2021-03-09T07:32:37.000Z | 2021-03-09T07:32:37.000Z | gnuradio-3.7.13.4/gr-filter/python/filter/design/api_object.py | v1259397/cosmic-gnuradio | 64c149520ac6a7d44179c3f4a38f38add45dd5dc | [
"BSD-3-Clause"
] | null | null | null | gnuradio-3.7.13.4/gr-filter/python/filter/design/api_object.py | v1259397/cosmic-gnuradio | 64c149520ac6a7d44179c3f4a38f38add45dd5dc | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
class ApiObject():
'''
Filter count variable if the filter design tool has to
return multiple filter parameters in future
e.g Cascaded Filters
'''
def __init__(self, filtcount = 1):
self.filtercount = filtcount
self.restype = [''] * self.filtercount
self.params = [''] * self.filtercount
self.taps = [''] * self.filtercount
'''
Updates params dictionary for the given filter number
'''
def update_params(self, params, filtno):
if (filtno <= self.filtercount):
self.params[filtno - 1] = params
'''
Updates filter type for the given filter number
'''
def update_filttype(self, filttype, filtno):
if (filtno <= self.filtercount):
self.filttype[filtno - 1] = filttype
'''
updates taps for the given filter number. taps will
contain a list of coefficients in the case of fir design
and (b,a) tuple in the case of iir design
'''
def update_taps(self, taps, filtno):
if (filtno <= self.filtercount):
self.taps[filtno - 1] = taps
'''
updates all of them in a single call
'''
def update_all(self, filttype, params, taps, filtno):
if (filtno <= self.filtercount):
self.taps[filtno - 1] = taps
self.params[filtno - 1] = params
self.restype[filtno - 1] = filttype
def get_filtercount(self):
return self.filtercount
def get_restype(self, filtno=1):
if (filtno <= self.filtercount):
return self.restype[filtno - 1]
def get_params(self, filtno=1):
if (filtno <= self.filtercount):
return self.params[filtno - 1]
def get_taps(self, filtno=1):
if (filtno <= self.filtercount):
return self.taps[filtno - 1]
| 32.9875 | 70 | 0.635847 |
013f03299525978dd3d212a41f6f91fa29b63b14 | 1,262 | py | Python | classes/announcement.py | jamflcjamflc/TopRace | 5c02941c8787884302a91f33f6b26bbdc13d79ce | [
"Apache-2.0"
] | null | null | null | classes/announcement.py | jamflcjamflc/TopRace | 5c02941c8787884302a91f33f6b26bbdc13d79ce | [
"Apache-2.0"
] | null | null | null | classes/announcement.py | jamflcjamflc/TopRace | 5c02941c8787884302a91f33f6b26bbdc13d79ce | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf8 -*-
# announcement
# helper class for toprace
# Alfredo Martin 2021
import pygame
version = 'announcement.v.1.0.0'
class Announcement:
def __init__(self, screen=None, text='', time=5, color=(255, 0, 0)):
"""initiallizes the Announcement instance
screen: pygame canvas instance
text: str
time: int (number of frames during which the announcement will be active
color: color of the text"""
_, _, sw, sh = screen.get_rect()
self.t = 0
self.color = color
self.time = time
self.active = True
style = pygame.font.SysFont('comicsans', 70)
self.text = style.render(text, False, self.color)
_, _, w, h = self.text.get_rect()
text_x = (sw / 2) - (w / 2)
text_y = (sh / 2) - (h / 2)
self.pos = (text_x, text_y)
def update(self, screen):
"""updates the announcements (whether they are active or not) and blits
announcements in the screen
screen: pygame canvas instance
returns: screen after being updated"""
self.t += 1
self.active = self.t <= self.time
screen.blit(self.text, self.pos)
return screen
if __name__ == '__main__':
print(version)
| 28.681818 | 80 | 0.594295 |
013f53227578c72e73628747ce0b65e0ad1aa92b | 3,633 | py | Python | policies/regularizers.py | IST-DASLab/ACDC | ac53210b6adc1f2506ff909de08172ed9cad25d5 | [
"Apache-2.0"
] | 6 | 2021-11-26T01:21:03.000Z | 2022-01-10T15:41:50.000Z | policies/regularizers.py | IST-DASLab/ACDC | ac53210b6adc1f2506ff909de08172ed9cad25d5 | [
"Apache-2.0"
] | 1 | 2021-11-28T10:51:08.000Z | 2021-11-30T01:30:29.000Z | policies/regularizers.py | IST-DASLab/ACDC | ac53210b6adc1f2506ff909de08172ed9cad25d5 | [
"Apache-2.0"
] | 1 | 2021-12-21T13:25:43.000Z | 2021-12-21T13:25:43.000Z | """
Implement regularization policies here
"""
import torch
import torch.nn as nn
from policies.policy import PolicyBase
import logging
def build_reg_from_config(model, reg_config):
"""
This function build regularizer given the model (only need for weigths typically)
and regularizer configuration.
"""
reg_class = reg_config['class']
reg_args = {k: v for k, v in reg_config.items() if k != 'class'}
reg = globals()[reg_class](model, **reg_args)
return reg
def build_regs_from_config(model, config):
"""
This function takes *general* config file for current run and model
and returns a list of regularizers which are build by build_reg_from_config.
Example config.yaml for pruner instances:
>>> regularizers:
>>> reg1:
>>> class: Hoyer # regularization method to use
>>> lambda: 1e-6 # regularization coefficient
>>> modules: [net.0] # modules to apply regularization
>>> weight_only: True # if regularizer is applied only to weights of module (no bias)
>>> reg2:
>>> class: HoyerSquare
>>> lambda: 1e-6
>>> modules: [net.2]
>>> weight_only: True
"""
if 'regularizers' not in config: return []
regs_config = config['regularizers']
regs = [build_reg_from_config(model, reg_config)
for reg_config in regs_config.values()]
return regs
class Regularizer(PolicyBase):
def __init__(self, model, **kwargs):
self._model = model
if not isinstance(self._model, nn.Module):
raise ValueError('model should be an instance of nn.Module')
modules_dict = dict(self._model.named_modules())
self._weight_only, self._lambda = kwargs['weight_only'], float(kwargs['lambda'])
prefix = ''
if isinstance(self._model, torch.nn.DataParallel):
prefix = 'module.'
self._module_names = [prefix + _name for _name in kwargs['modules']]
self._modules = [
modules_dict[module_name] for module_name in self._module_names
]
logging.debug(f'Constructed {self.__class__.__name__} with config:')
logging.debug('\n'.join([f' -{k}:{v}' for k,v in kwargs.items()]) + '\n')
def on_minibatch_begin(self, **kwargs):
reg_loss = self._lambda * self._compute_penalty()
return reg_loss
def _compute_penalty(self):
"""
Base regularizer method which computes regularization penalty.
"""
raise ValueError('Implement in a child class')
class Hoyer(Regularizer):
def __init__(self, model, **kwargs):
super(Hoyer, self).__init__(model, **kwargs)
def _compute_penalty(self):
penalty = 0.
for module in self._modules:
dim = module.weight.numel()
vector = module.weight.view(-1)
if not self._weight_only and module.bias:
dim += self.module.bias.numel()
vector = torch.cat([vector, module.bias])
penalty += (dim ** 0.5 - vector.abs().sum() / vector.norm()) / (dim ** 0.5 - 1)
return penalty
class SquaredHoyer(Regularizer):
def __init__(self, model, **kwargs):
super(SquaredHoyer, self).__init__(model, **kwargs)
def _compute_penalty(self):
penalty = 0.
for module in self._modules:
vector = module.weight.view(-1)
if not self._weight_only and module.bias:
vector = torch.cat([vector, module.bias])
penalty += vector.abs().sum().pow(2) / vector.norm().pow(2)
return penalty
if __name__ == '__main__':
pass | 33.330275 | 93 | 0.624002 |
013fd9370c82568c8a3976632bf333b9c95292c6 | 938 | py | Python | coupons/models.py | sLeeNguyen/sales-support | 3f0a6977c8c26743373a70b4296516b7a71ccf4a | [
"Apache-2.0"
] | 1 | 2021-03-22T14:07:30.000Z | 2021-03-22T14:07:30.000Z | coupons/models.py | sLeeNguyen/sales-support | 3f0a6977c8c26743373a70b4296516b7a71ccf4a | [
"Apache-2.0"
] | null | null | null | coupons/models.py | sLeeNguyen/sales-support | 3f0a6977c8c26743373a70b4296516b7a71ccf4a | [
"Apache-2.0"
] | null | null | null | from django.db import models
from django.utils import timezone
from stores.models import Store
class Coupon(models.Model):
SCOPE_CHOICES = [
('I', 'Toàn bộ hoá đơn'),
('P', 'Sản phẩm'),
]
BENEFICIARY_CHOICES = [
('E', 'Mọi khách hàng'),
('N', 'Khách hàng hệ thống'),
('V', 'Khách hàng VIP')
]
title = models.CharField(max_length=100, blank=False, null=False)
scope = models.CharField(max_length=1, choices=SCOPE_CHOICES, blank=False, null=False)
beneficiary = models.IntegerField(choices=BENEFICIARY_CHOICES, default='E')
discount_percent = models.FloatField(blank=True)
max_discount_money = models.PositiveIntegerField(blank=True)
app_date = models.DateTimeField(default=timezone.now)
exp_date = models.DateTimeField(blank=False, null=False)
note = models.TextField(default='')
store = models.ForeignKey(to=Store, on_delete=models.CASCADE)
| 33.5 | 90 | 0.686567 |
014128990469de76bed3ac346da105ea98a96243 | 5,527 | py | Python | web_api/bearings/resources/inventory.py | zhanghe06/flask_restful | 6ef54f3f7efbbaff6169e963dcf45ab25e11e593 | [
"MIT"
] | 1 | 2020-12-04T03:15:47.000Z | 2020-12-04T03:15:47.000Z | web_api/bearings/resources/inventory.py | zhanghe06/flask_restful | 6ef54f3f7efbbaff6169e963dcf45ab25e11e593 | [
"MIT"
] | 1 | 2021-06-01T22:24:27.000Z | 2021-06-01T22:24:27.000Z | web_api/bearings/resources/inventory.py | zhanghe06/flask_restful | 6ef54f3f7efbbaff6169e963dcf45ab25e11e593 | [
"MIT"
] | 2 | 2020-12-04T03:16:18.000Z | 2021-09-04T14:10:12.000Z | #!/usr/bin/env python
# encoding: utf-8
"""
@author: zhanghe
@software: PyCharm
@file: inventory.py
@time: 2018-06-28 00:22
"""
from __future__ import unicode_literals
from flask import jsonify, make_response
from flask_restful import Resource, marshal, reqparse
from web_api.bearings.outputs.inventory import fields_item_inventory
from web_api.bearings.reqparsers.inventory import (
request_parser,
request_parser_item_post,
request_parser_item_put,
structure_key_item,
structure_key_items,
)
from web_api.commons.exceptions import BadRequest, NotFound
from web_api.bearings.apis.inventory import (
get_inventory_row_by_id,
edit_inventory,
delete_inventory,
get_inventory_limit_rows_by_last_id,
add_inventory,
get_inventory_pagination,
)
from web_api.commons.http_token_auth import token_auth
from web_api import app
SUCCESS_MSG = app.config['SUCCESS_MSG']
FAILURE_MSG = app.config['FAILURE_MSG']
class InventoryResource(Resource):
"""
InventoryResource
"""
decorators = [token_auth.login_required]
def get(self, pk):
"""
Example:
curl http://0.0.0.0:5000/bearings/inventory/1
:param pk:
:return:
"""
data = get_inventory_row_by_id(pk)
if not data:
raise NotFound
result = marshal(data, fields_item_inventory, envelope=structure_key_item)
return jsonify(result)
def delete(self, pk):
"""
Example:
curl http://0.0.0.0:5000/bearings/inventory/1 -X DELETE
:param pk:
:return:
"""
result = delete_inventory(pk)
if result:
success_msg = SUCCESS_MSG.copy()
return make_response(jsonify(success_msg), 204)
else:
failure_msg = FAILURE_MSG.copy()
return make_response(jsonify(failure_msg), 400)
def put(self, pk):
"""
Example:
curl http://0.0.0.0:5000/bearings/inventory/1 -H "Content-Type: application/json" -X PUT -d '
{
"inventory": {
"product_id": 1,
"warehouse_id": 1,
"rack_id": 1,
"stock_qty": 101
}
}'
:param pk:
:return:
"""
request_args = request_parser.parse_args()
request_item_args = request_parser_item_put.parse_args(req=request_args)
if not request_item_args:
raise BadRequest('Bad request.')
request_data = request_item_args
result = edit_inventory(pk, request_data)
if result:
success_msg = SUCCESS_MSG.copy()
return make_response(jsonify(success_msg), 200)
else:
failure_msg = FAILURE_MSG.copy()
return make_response(jsonify(failure_msg), 400)
class InventoryListResource(Resource):
"""
InventoryListResource
"""
decorators = [token_auth.login_required]
def get(self):
"""
Example:
curl http://0.0.0.0:5000/bearings/inventories
curl http://0.0.0.0:5000/bearings/inventories?last_pk=2&limit_num=2
:return:
"""
# 条件参数
filter_parser = reqparse.RequestParser(bundle_errors=True)
filter_parser.add_argument('last_pk', type=int, default=0, location='args')
filter_parser.add_argument('limit_num', type=int, default=20, location='args')
filter_parser_args = filter_parser.parse_args()
data = get_inventory_limit_rows_by_last_id(**filter_parser_args)
result = marshal(data, fields_item_inventory, envelope=structure_key_items)
return jsonify(result)
def post(self):
"""
Example:
curl http://0.0.0.0:5000/bearings/inventories -H "Content-Type: application/json" -X POST -d '
{
"inventory": {
"product_id": 1,
"warehouse_id": 1,
"rack_id": "1",
"stock_qty": 100,
"note": "SNFA"
}
}'
:return:
"""
request_args = request_parser.parse_args()
request_item_args = request_parser_item_post.parse_args(req=request_args)
if not request_item_args:
raise BadRequest('Bad request.')
request_data = request_item_args
result = add_inventory(request_data)
if result:
SUCCESS_MSG['id'] = result
return make_response(jsonify(SUCCESS_MSG), 201)
raise NotFound
class InventoryPaginationResource(Resource):
"""
InventoryPaginationResource
"""
decorators = [token_auth.login_required]
def get(self):
"""
Example:
curl http://0.0.0.0:5000/bearings/inventories/pagination
curl http://0.0.0.0:5000/bearings/inventories/pagination?page=2&per_page=2
:return:
"""
# 条件参数
filter_parser = reqparse.RequestParser(bundle_errors=True)
filter_parser.add_argument('page', type=int, default=1, location='args')
filter_parser.add_argument('per_page', type=int, default=20, location='args')
filter_parser_args = filter_parser.parse_args()
pagination_obj = get_inventory_pagination(**filter_parser_args)
result = marshal(pagination_obj.items, fields_item_inventory, envelope=structure_key_items)
result['total'] = pagination_obj.total
return jsonify(result)
| 30.368132 | 106 | 0.616971 |
01417896f15a60c73ebefea3a8b55b1d8117b7c4 | 2,141 | py | Python | bob/db/swan/query_bio.py | bioidiap/bob.db.swan | 676510d47cb08b65be04f51d45746127c36bf2ce | [
"BSD-3-Clause"
] | null | null | null | bob/db/swan/query_bio.py | bioidiap/bob.db.swan | 676510d47cb08b65be04f51d45746127c36bf2ce | [
"BSD-3-Clause"
] | null | null | null | bob/db/swan/query_bio.py | bioidiap/bob.db.swan | 676510d47cb08b65be04f51d45746127c36bf2ce | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# from bob.bio.spear.database import AudioBioFile
import bob.bio.base
import bob.io.base
import bob.io.video
from bob.extension import rc
from .common import SwanVideoFile, SwanAudioFile, SwanVideoDatabase
# class SwanAudioBioFile(SwanAudioFile, AudioBioFile):
# """SwanAudioBioFile are video files actually"""
class SwanVideoBioFile(SwanVideoFile):
"""SwanVideoBioFile are video files actually"""
class Database(bob.bio.base.database.FileListBioDatabase, SwanVideoDatabase):
"""Wrapper class for the SWAN database for speaker recognition
(http://www.idiap.ch/dataset/swan). This class defines a simple protocol
for training, dev and and by splitting the audio files of the database in
three main parts.
"""
def __init__(self, original_directory=rc['bob.db.swan.directory'],
bio_file_class=SwanVideoBioFile,
annotation_directory=rc['bob.db.swan.annotation_dir'],
annotation_extension='.json',
annotation_type='json',
name='swan', **kwargs):
# call base class constructor
from pkg_resources import resource_filename
folder = resource_filename(__name__, 'lists')
super(Database, self).__init__(
folder, name=name, bio_file_class=bio_file_class,
annotation_directory=annotation_directory,
annotation_extension=annotation_extension,
annotation_type=annotation_type,
original_directory=original_directory,
training_depends_on_protocol=False, models_depend_on_protocol=True,
**kwargs
)
def objects(self, groups=None, protocol=None, purposes=None,
model_ids=None, classes=None, filter_samples=None, **kwargs):
files = super(Database, self).objects(
groups=groups, protocol=protocol, purposes=purposes,
model_ids=model_ids, classes=classes, **kwargs)
files = self.update_files(files)
if filter_samples is None:
return files
files = list(filter(filter_samples, files))
return files
| 38.927273 | 79 | 0.683793 |
0145f0c317a0b7c0a803c27a724e95a60c8be847 | 226 | py | Python | example.py | austitech/Tokenizer | 005bd6772ef3298b222c05e3357bf22961978a57 | [
"MIT"
] | null | null | null | example.py | austitech/Tokenizer | 005bd6772ef3298b222c05e3357bf22961978a57 | [
"MIT"
] | null | null | null | example.py | austitech/Tokenizer | 005bd6772ef3298b222c05e3357bf22961978a57 | [
"MIT"
] | null | null | null | from tokenizer import StringTokenizer
from token import tokentype
text = open('test.ex', 'r').read()
t = StringTokenizer(text=text, tokentype=tokentype)
token_generator = t.create_token_generator()
print(token_generator)
| 18.833333 | 51 | 0.783186 |
014639d9ec97c6ecdda8d9da8e1c9b7abc9b6d28 | 2,008 | py | Python | scripts/smilx_pv_visualisation.py | aneube/smili-spine | 3cd8f95077d4bc1f5ac6146bc5356c3131f22e4b | [
"BSD-2-Clause"
] | 17 | 2015-03-09T19:22:07.000Z | 2021-05-24T20:25:08.000Z | scripts/smilx_pv_visualisation.py | oddway/smili | 4205b08e5fdcf5ae4fa94747a6dbeac0bb5e0cf0 | [
"BSD-2-Clause"
] | 16 | 2015-08-20T03:30:15.000Z | 2019-10-22T12:21:14.000Z | scripts/smilx_pv_visualisation.py | oddway/smili | 4205b08e5fdcf5ae4fa94747a6dbeac0bb5e0cf0 | [
"BSD-2-Clause"
] | 11 | 2015-06-22T00:11:01.000Z | 2021-12-26T21:29:52.000Z | #!/usr/bin/smilx
'''=========================================================================
The Software is copyright (c) Commonwealth Scientific and Industrial Research Organisation (CSIRO)
ABN 41 687 119 230.
All rights reserved.
Licensed under the CSIRO BSD 3-Clause License
You may not use this file except in compliance with the License.
You may obtain a copy of the License in the file LICENSE.md or at
https://stash.csiro.au/projects/SMILI/repos/smili/browse/license.txt
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
========================================================================='''
'''
This script opens a pv map and its corresponding image, thresholds and overlays the two, loads camera view, links windows and disables interpolation.
To use it, run smilx, in python console type "execfile('smilx_pv_visualisation.py')"
'''
#names of images, set them to empty string to get popup dialogs
imageName = "PAVEL_FLAWS_INV2.nii.gz"
pvImageName = "PAVEL_BiExp_Combined_PV_GM.nii.gz"
tmpOutName = "thresholded_pvimage.nii.gz"
MainWindow.loadFile(imageName) # load image
image = MainWindow.activeImage() # get pointer to image window
MainWindow.loadFile(pvImageName) # load image
pvImage = MainWindow.activeImage() # get pointer to image window
#process pv map for display
belowLevel = 0.01
aboveLevel = 0.25
pvImage.binaryThreshold(255, belowLevel, aboveLevel)
milxQtFile.saveImage(tmpOutName, pvImage) # save result
#overlay the pv processed result
pvImage.viewToSagittal()
pvImage.loadView("camera.cam")
pvImage.interpolateDisplay()
image.overlay(tmpOutName)
image.viewToSagittal()
image.loadView("camera.cam")
image.interpolateDisplay()
MainWindow.tileTab()
MainWindow.link() #link all the windows
| 38.615385 | 149 | 0.728586 |
014640d43ebc7a1a3b58c97712655fc4e51f491b | 6,887 | py | Python | src/genie/libs/parser/iosxe/tests/ShowPlatformSoftwareFed/cli/equal/golden_output3_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 204 | 2018-06-27T00:55:27.000Z | 2022-03-06T21:12:18.000Z | src/genie/libs/parser/iosxe/tests/ShowPlatformSoftwareFed/cli/equal/golden_output3_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 468 | 2018-06-19T00:33:18.000Z | 2022-03-31T23:23:35.000Z | src/genie/libs/parser/iosxe/tests/ShowPlatformSoftwareFed/cli/equal/golden_output3_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 309 | 2019-01-16T20:21:07.000Z | 2022-03-30T12:56:41.000Z | expected_output ={
"lentry_label": {
24: {
"aal": {
"deagg_vrf_id": 0,
"eos0": {"adj_hdl": "0xf9000002", "hw_hdl": "0x7f02737e2ca8"},
"eos1": {"adj_hdl": "0xf9000002", "hw_hdl": "0x7f02737e2a98"},
"id": 1996488716,
"lbl": 24,
"lspa_handle": "0",
},
"adj": {
109: {
"adj": "0xdf000026",
"ifnum": "0x33",
"link_type": "MPLS",
"si": "0x7f0273423ab8",
},
139: {
"adj": "0x5c000037",
"ifnum": "0x36",
"link_type": "MPLS",
"si": "0x7f02737a2348",
},
},
"backwalk_cnt": 0,
"label": {
31: {
"adj_handle": "0x62000061",
"bwalk_cnt": 0,
"collapsed_oce": 0,
"flags": {"0x1": ["REAL"]},
"label_aal": {
1644167265: {
"adj_flags": "0",
"di_id": "0x526d",
"dmac": "0027.90bf.2ee7",
"label_type": 2,
"lbl": 0,
"link_type": 2,
"phdl": "0xab000447",
"ref_cnt": 1,
"rewrite_type": "PSH1(119)",
"ri": "0x7f02737e8a98",
"ri_id": "0x4e",
"si": "0x7f02737c1b08",
"si_id": "0x4034",
"smac": "00a7.42d6.c41f",
"sub_type": 0,
"vlan_id": 0,
"vrf_id": 0,
}
},
"link_type": "MPLS",
"local_adj": 0,
"local_label": 24,
"modify_cnt": 0,
"olbl_changed": 0,
"outlabel": "(34, 0)",
"pdflags": {"0": ["INSTALL_HW_OK"]},
"subwalk_cnt": 0,
"unsupported_recursion": 0,
},
32: {
"adj_handle": "0x89000062",
"bwalk_cnt": 0,
"collapsed_oce": 0,
"flags": {"0x1": ["REAL"]},
"label_aal": {
2298478690: {
"adj_flags": "0",
"di_id": "0x5268",
"dmac": "00a7.42ce.f69f",
"label_type": 2,
"lbl": 0,
"link_type": 2,
"phdl": "0x7c000442",
"ref_cnt": 1,
"rewrite_type": "PSH1(119)",
"ri": "0x7f027379b138",
"ri_id": "0x24",
"si": "0x7f02737a4d58",
"si_id": "0x4035",
"smac": "00a7.42d6.c41f",
"sub_type": 0,
"vlan_id": 0,
"vrf_id": 0,
}
},
"link_type": "MPLS",
"local_adj": 0,
"local_label": 24,
"modify_cnt": 0,
"olbl_changed": 0,
"outlabel": "(29, 0)",
"pdflags": {"0": ["INSTALL_HW_OK"]},
"subwalk_cnt": 0,
"unsupported_recursion": 0,
},
},
"lb": {
38: {
"aal": {
"af": 0,
"ecr_id": 4177526786,
"ecr_type": "0",
"ecrh": "0x7f02737e49f8(28:2)",
"hwhdl": ":1937656312 "
"::0x7f02737e11c8,0x7f02737e2728,0x7f02737e11c8,0x7f02737e2728",
"ref": 3,
},
"bwalk": {"in_prog": 0, "nested": 0, "req": 0},
"bwalk_cnt": 0,
"ecr_map_objid": 0,
"ecrh": "0xf9000002",
"finish_cnt": 0,
"flags": "0",
"link_type": "IP",
"local_label": 24,
"modify_cnt": 0,
"mpls_ecr": 1,
"num_choices": 2,
"old_ecrh": "0",
"path_inhw": 2,
"subwalk_cnt": 0,
}
},
"lentry_hdl": "0x7700000c",
"lspa_handle": "0",
"modify_cnt": 8,
"nobj": ["LB", " 38"],
"sw_enh_ecr_scale": {
38: {
"adjs": 2,
"ecr_adj": {
1644167265: {
"adj_lentry": "[eos0:0x7f02734123b8 "
"eos1:0x7f02737ec5e8]",
"di_id": 20499,
"is_mpls_adj": 1,
"l3adj_flags": "0x100000",
"recirc_adj_id": 3120562239,
"rih": "0x7f02737e0bf8(74)",
"sih": "0x7f02737e11c8(182)",
},
2298478690: {
"adj_lentry": "[eos0:0x7f02737e6dd8 "
"eos1:0x7f02737b21d8]",
"di_id": 20499,
"is_mpls_adj": 1,
"l3adj_flags": "0x100000",
"recirc_adj_id": 1442840640,
"rih": "0x7f02737dcbe8(75)",
"sih": "0x7f02737e2728(183)",
},
2483028067: {
"di_id": 20499,
"rih": "0x7f02737eaa18(52)",
"sih": "0x7f02737e4c08(170)",
},
},
"ecr_hwhdl": "0x7f02737e49f8",
"ecrhdl": "0xf9000002",
"eos": 1,
"llabel": 24,
"mixed_adj": "0",
"mod_cnt": 0,
"pmismatch": 0,
"pordermatch": 0,
"prev_npath": 0,
"reprogram_hw": "0",
}
},
}
}
}
| 38.909605 | 88 | 0.283287 |
0146de801330617849875f5bc5ee04e2b90625fa | 3,610 | py | Python | tests/test_simulator_utility.py | tancheng/cache-sim | adfcbec961543a8424988cbadacb575c551f3cc3 | [
"MIT"
] | 27 | 2016-02-06T20:49:19.000Z | 2021-11-02T03:11:26.000Z | tests/test_simulator_utility.py | tancheng/cache-sim | adfcbec961543a8424988cbadacb575c551f3cc3 | [
"MIT"
] | 5 | 2020-02-24T18:57:11.000Z | 2021-09-01T00:27:18.000Z | tests/test_simulator_utility.py | tancheng/cache-sim | adfcbec961543a8424988cbadacb575c551f3cc3 | [
"MIT"
] | 16 | 2016-02-06T20:49:06.000Z | 2022-01-14T02:49:14.000Z | #!/usr/bin/env python3
import nose.tools as nose
from cachesimulator.bin_addr import BinaryAddress
from cachesimulator.word_addr import WordAddress
def test_get_bin_addr_unpadded():
"""get_bin_addr should return unpadded binary address of word address"""
nose.assert_equal(
BinaryAddress(word_addr=WordAddress(180)),
'10110100')
def test_get_bin_addr_padded():
"""get_bin_addr should return padded binary address of word address"""
nose.assert_equal(
BinaryAddress(word_addr=WordAddress(44), num_addr_bits=8),
'00101100')
def test_prettify_bin_addr_16_bit():
"""prettify_bin_addr should prettify 8-bit string into groups of 3"""
nose.assert_equal(
BinaryAddress.prettify('1010101110101011', min_bits_per_group=3),
'1010 1011 1010 1011')
def test_prettify_bin_addr_8_bit():
"""prettify_bin_addr should prettify 8-bit string into groups of 3"""
nose.assert_equal(
BinaryAddress.prettify('10101011', min_bits_per_group=3),
'1010 1011')
def test_prettify_bin_addr_7_bit():
"""prettify_bin_addr should prettify 7-bit string into groups of 3"""
nose.assert_equal(
BinaryAddress.prettify('1011010', min_bits_per_group=3),
'101 1010')
def test_prettify_bin_addr_6_bit():
"""prettify_bin_addr should prettify 6-bit string into groups of 3"""
nose.assert_equal(
BinaryAddress.prettify('101011', min_bits_per_group=3),
'101 011')
def test_prettify_bin_addr_5_bit():
"""prettify_bin_addr should prettify 5-bit string into groups of 3"""
nose.assert_equal(
BinaryAddress.prettify('10110', min_bits_per_group=3),
'10110')
def test_get_tag_5_bit():
"""get_tag should return correct 5 tag bits for an address"""
nose.assert_equal(
BinaryAddress('10110100').get_tag(num_tag_bits=5),
'10110')
def test_get_tag_0_bit():
"""get_tag should return None if no bits are allocated to a tag"""
nose.assert_is_none(
BinaryAddress('10110100').get_tag(num_tag_bits=0))
def test_get_index_2_bit():
"""get_index should return correct 2 index bits for an address"""
nose.assert_equal(
BinaryAddress('11111101').get_index(
num_offset_bits=1, num_index_bits=2), '10')
def test_get_index_0_bit():
"""get_index should return None if no bits are allocated to an index"""
nose.assert_is_none(
BinaryAddress('11111111').get_index(
num_offset_bits=1, num_index_bits=0))
def test_get_offset_2_bit():
"""get_offset should return correct 2 offset bits for an address"""
nose.assert_equal(
BinaryAddress('11111101').get_offset(num_offset_bits=2), '01')
def test_get_offset_0_bit():
"""get_offset should return None if no bits are allocated to an offset"""
nose.assert_is_none(
BinaryAddress('10110100').get_offset(num_offset_bits=0))
def test_get_consecutive_words_1_word():
"""get_consecutive_words should return same word for 1-word blocks"""
nose.assert_equal(
WordAddress(23).get_consecutive_words(num_words_per_block=1),
[23])
def test_get_consecutive_words_2_word():
"""get_consecutive_words should return correct words for 2-word blocks"""
nose.assert_equal(
WordAddress(22).get_consecutive_words(num_words_per_block=2),
[22, 23])
def test_get_consecutive_words_4_word():
"""get_consecutive_words should return correct words for 4-word blocks"""
nose.assert_equal(
WordAddress(21).get_consecutive_words(num_words_per_block=4),
[20, 21, 22, 23])
| 31.12069 | 77 | 0.713573 |
0146ed0b2f2154ab28c5b238a6e0624fb4e5747e | 10,347 | py | Python | stubs.min/System/Windows/__init___parts/Vector.py | denfromufa/ironpython-stubs | 4d2b405eda3ceed186e8adca55dd97c332c6f49d | [
"MIT"
] | 1 | 2017-07-07T11:15:45.000Z | 2017-07-07T11:15:45.000Z | stubs.min/System/Windows/__init___parts/Vector.py | hdm-dt-fb/ironpython-stubs | 4d2b405eda3ceed186e8adca55dd97c332c6f49d | [
"MIT"
] | null | null | null | stubs.min/System/Windows/__init___parts/Vector.py | hdm-dt-fb/ironpython-stubs | 4d2b405eda3ceed186e8adca55dd97c332c6f49d | [
"MIT"
] | null | null | null | class Vector(object,IFormattable):
"""
Represents a displacement in 2-D space.
Vector(x: float,y: float)
"""
@staticmethod
def Add(*__args):
"""
Add(vector: Vector,point: Point) -> Point
Translates the specified point by the specified vector and returns the
resulting point.
vector: The amount to translate the specified point.
point: The point to translate.
Returns: The result of translating point by vector.
Add(vector1: Vector,vector2: Vector) -> Vector
Adds two vectors and returns the result as a System.Windows.Vector structure.
vector1: The first vector to add.
vector2: The second vector to add.
Returns: The sum of vector1 and vector2.
"""
pass
@staticmethod
def AngleBetween(vector1,vector2):
"""
AngleBetween(vector1: Vector,vector2: Vector) -> float
Retrieves the angle,expressed in degrees,between the two specified vectors.
vector1: The first vector to evaluate.
vector2: The second vector to evaluate.
Returns: The angle,in degrees,between vector1 and vector2.
"""
pass
@staticmethod
def CrossProduct(vector1,vector2):
"""
CrossProduct(vector1: Vector,vector2: Vector) -> float
Calculates the cross product of two vectors.
vector1: The first vector to evaluate.
vector2: The second vector to evaluate.
Returns: The cross product of vector1 and vector2. The following formula is used to
calculate the cross product: (Vector1.X * Vector2.Y) - (Vector1.Y * Vector2.X)
"""
pass
@staticmethod
def Determinant(vector1,vector2):
"""
Determinant(vector1: Vector,vector2: Vector) -> float
Calculates the determinant of two vectors.
vector1: The first vector to evaluate.
vector2: The second vector to evaluate.
Returns: The determinant of vector1 and vector2.
"""
pass
@staticmethod
def Divide(vector,scalar):
"""
Divide(vector: Vector,scalar: float) -> Vector
Divides the specified vector by the specified scalar and returns the result as
a System.Windows.Vector.
vector: The vector structure to divide.
scalar: The amount by which vector is divided.
Returns: The result of dividing vector by scalar.
"""
pass
@staticmethod
def Equals(*__args):
"""
Equals(self: Vector,value: Vector) -> bool
Compares two vectors for equality.
value: The vector to compare with this vector.
Returns: true if value has the same System.Windows.Vector.X and System.Windows.Vector.Y
values as this vector; otherwise,false.
Equals(self: Vector,o: object) -> bool
Determines whether the specified System.Object is a System.Windows.Vector
structure and,if it is,whether it has the same System.Windows.Vector.X and
System.Windows.Vector.Y values as this vector.
o: The vector to compare.
Returns: true if o is a System.Windows.Vector and has the same System.Windows.Vector.X
and System.Windows.Vector.Y values as this vector; otherwise,false.
Equals(vector1: Vector,vector2: Vector) -> bool
Compares the two specified vectors for equality.
vector1: The first vector to compare.
vector2: The second vector to compare.
Returns: true if t he System.Windows.Vector.X and System.Windows.Vector.Y components of
vector1 and vector2 are equal; otherwise,false.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: Vector) -> int
Returns the hash code for this vector.
Returns: The hash code for this instance.
"""
pass
@staticmethod
def Multiply(*__args):
"""
Multiply(vector: Vector,matrix: Matrix) -> Vector
Transforms the coordinate space of the specified vector using the specified
System.Windows.Media.Matrix.
vector: The vector structure to transform.
matrix: The transformation to apply to vector.
Returns: The result of transforming vector by matrix.
Multiply(vector1: Vector,vector2: Vector) -> float
Calculates the dot product of the two specified vectors and returns the result
as a System.Double.
vector1: The first vector to multiply.
vector2: The second vector structure to multiply.
Returns: A System.Double containing the scalar dot product of vector1 and vector2,which
is calculated using the following formula: (vector1.X * vector2.X) + (vector1.Y
* vector2.Y)
Multiply(vector: Vector,scalar: float) -> Vector
Multiplies the specified vector by the specified scalar and returns the
resulting System.Windows.Vector.
vector: The vector to multiply.
scalar: The scalar to multiply.
Returns: The result of multiplying vector and scalar.
Multiply(scalar: float,vector: Vector) -> Vector
Multiplies the specified scalar by the specified vector and returns the
resulting System.Windows.Vector.
scalar: The scalar to multiply.
vector: The vector to multiply.
Returns: The result of multiplying scalar and vector.
"""
pass
def Negate(self):
"""
Negate(self: Vector)
Negates this vector. The vector has the same magnitude as before,but its
direction is now opposite.
"""
pass
def Normalize(self):
"""
Normalize(self: Vector)
Normalizes this vector.
"""
pass
@staticmethod
def Parse(source):
"""
Parse(source: str) -> Vector
Converts a string representation of a vector into the equivalent
System.Windows.Vector structure.
source: The string representation of the vector.
Returns: The equivalent System.Windows.Vector structure.
"""
pass
@staticmethod
def Subtract(vector1,vector2):
"""
Subtract(vector1: Vector,vector2: Vector) -> Vector
Subtracts the specified vector from another specified vector.
vector1: The vector from which vector2 is subtracted.
vector2: The vector to subtract from vector1.
Returns: The difference between vector1 and vector2.
"""
pass
def ToString(self,provider=None):
"""
ToString(self: Vector,provider: IFormatProvider) -> str
Returns the string representation of this System.Windows.Vector structure with
the specified formatting information.
provider: The culture-specific formatting information.
Returns: A string that represents the System.Windows.Vector.X and
System.Windows.Vector.Y values of this System.Windows.Vector.
ToString(self: Vector) -> str
Returns the string representation of this System.Windows.Vector structure.
Returns: A string that represents the System.Windows.Vector.X and
System.Windows.Vector.Y values of this System.Windows.Vector.
"""
pass
def __add__(self,*args):
""" x.__add__(y) <==> x+yx.__add__(y) <==> x+y """
pass
def __div__(self,*args):
""" x.__div__(y) <==> x/y """
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __format__(self,*args):
""" __format__(formattable: IFormattable,format: str) -> str """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __mul__(self,*args):
""" x.__mul__(y) <==> x*yx.__mul__(y) <==> x*yx.__mul__(y) <==> x*y """
pass
def __neg__(self,*args):
""" x.__neg__() <==> -x """
pass
@staticmethod
def __new__(self,x,y):
"""
__new__(cls: type,x: float,y: float)
__new__[Vector]() -> Vector
"""
pass
def __ne__(self,*args):
pass
def __radd__(self,*args):
"""
__radd__(vector1: Vector,vector2: Vector) -> Vector
Adds two vectors and returns the result as a vector.
vector1: The first vector to add.
vector2: The second vector to add.
Returns: The sum of vector1 and vector2.
"""
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
def __rmul__(self,*args):
"""
__rmul__(vector1: Vector,vector2: Vector) -> float
Calculates the dot product of the two specified vector structures and returns
the result as a System.Double.
vector1: The first vector to multiply.
vector2: The second vector to multiply.
Returns: Returns a System.Double containing the scalar dot product of vector1 and
vector2,which is calculated using the following formula:vector1.X * vector2.X
+ vector1.Y * vector2.Y
__rmul__(scalar: float,vector: Vector) -> Vector
Multiplies the specified scalar by the specified vector and returns the
resulting vector.
scalar: The scalar to multiply.
vector: The vector to multiply.
Returns: The result of multiplying scalar and vector.
"""
pass
def __rsub__(self,*args):
"""
__rsub__(vector1: Vector,vector2: Vector) -> Vector
Subtracts one specified vector from another.
vector1: The vector from which vector2 is subtracted.
vector2: The vector to subtract from vector1.
Returns: The difference between vector1 and vector2.
"""
pass
def __str__(self,*args):
pass
def __sub__(self,*args):
""" x.__sub__(y) <==> x-y """
pass
Length=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the length of this vector.
Get: Length(self: Vector) -> float
"""
LengthSquared=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the square of the length of this vector.
Get: LengthSquared(self: Vector) -> float
"""
X=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Vector.X component of this vector.
Get: X(self: Vector) -> float
Set: X(self: Vector)=value
"""
Y=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Vector.Y component of this vector.
Get: Y(self: Vector) -> float
Set: Y(self: Vector)=value
"""
| 30.612426 | 215 | 0.667053 |
0148eac89948dbf7e99bb94cb77565537c9937b2 | 6,869 | py | Python | binary_eight_queens_enhanced_num.py | MiniMarvin/oito-rainhas | 67be60d798a99ac218b2bbc53eabf47a563dce7b | [
"MIT"
] | null | null | null | binary_eight_queens_enhanced_num.py | MiniMarvin/oito-rainhas | 67be60d798a99ac218b2bbc53eabf47a563dce7b | [
"MIT"
] | null | null | null | binary_eight_queens_enhanced_num.py | MiniMarvin/oito-rainhas | 67be60d798a99ac218b2bbc53eabf47a563dce7b | [
"MIT"
] | null | null | null | import random
import math
from collections import OrderedDict
class BinaryEightQueensEnhancedNum:
def __init__(self, populationSize, crossOverMethod=1, selectMethod=1, mutationMethod=1, entireFit=False):
'''
inicializa a classe.
@params populationSize o tamanho da população.
'''
self.populationSize = populationSize
self.population = []
self.crossOverMethod = self.crossOver
if crossOverMethod == 2:
self.crossOverMethod = self.crossOver2
self.selectionMethod = self.selectParents
if selectMethod == 2:
self.selectionMethod = self.selectRoulette
self.mutate = self.mutate1
if mutationMethod == 2:
self.mutate = self.mutate2
while len(self.population) < populationSize:
gen = list(range(0,8))
random.shuffle(gen)
if gen not in self.population:
self.population.append(gen)
self.history = [self.population]
if not entireFit:
self.checkSolution = self.checkSolution1
else:
self.checkSolution = self.checkSolution2
def buildFenotype(self, gen):
'''
Constroi o fenotipo a partir do genoma (posição das rainhas).
@params gen string o gene a ser calculado.
'''
return gen
def mutate1(self, gen):
# swap
# todo: verificar se vale trocar o fitness
mutationProbability = 0.4
for i in range(len(gen)):
if random.random() < mutationProbability:
a = random.randrange(len(gen))
gen[i] = a
output = gen
return output
def mutate2(self, gen):
# swap
# todo: verificar se vale trocar o fitness
mutationProbability = 0.4
if random.random() < mutationProbability:
a = random.randrange(len(gen))
b = random.randrange(len(gen))
gen[a], gen[b] = gen[b], gen[a]
output = gen
return output
def crossOver(self, gen1, gen2):
pos = random.randrange(len(gen1))
child1 = gen1[:pos] + gen2[pos:]
child2 = gen2[:pos] + gen1[pos:]
return child1, child2
def crossOver2(self, gen1, gen2):
pos = random.randrange(len(gen1))
child1 = gen1[:pos] + gen2[pos:] + gen2[:pos]
child2 = gen2[:pos] + gen1[pos:] + gen1[:pos]
# remove repetitions
child1 = list(OrderedDict.fromkeys(child1))
child2 = list(OrderedDict.fromkeys(child2))
return child1, child2
def selectRoulette(self, population):
fitnesses = [self.fitness(gen) for gen in population]
total = sum(fitnesses)
roulette = []
prevProb = 0
for fit in fitnesses:
roulette.append(prevProb + fit/total)
prevProb += fit/total
prob1 = random.random()
prob2 = random.random()
prev = 0
parents = []
# print(prob1, prob2)
for i in range(len(roulette)):
if prev <= prob1 and prob1 <= roulette[i]:
parents.append(population[i])
if prev <= prob2 and prob2 <= roulette[i]:
parents.append(population[i])
prev = roulette[i]
# print(parents)
return parents[:2]
def selectParents(self, population):
gens = random.sample(population, 5)
fenotypes = [(self.buildFenotype(gen), gen) for gen in gens]
fitness = [(self.fitness(fen), gen) for fen, gen in fenotypes]
fitness.sort(reverse=True)
selectedGens = [gen for fitness, gen in fitness]
return selectedGens[:2]
def checkSolution1(self, population):
found = False
ans = []
for gen in population:
if self.fitness(gen) == 28:
found = True
ans = gen
break
return found, ans
def checkSolution2(self, population):
found = True
ans = []
for gen in population:
if self.fitness(gen) != 28:
found = False
ans = gen
break
return found, ans
def fitness(self, gen):
maxHits = 8*(8-1)//2
fenotypes = self.buildFenotype(gen)
hits = 0
for i in range(len(fenotypes)):
for j in range(i):
# print(fenotypes[i], fenotypes[j])
if fenotypes[i] == fenotypes[j]:
hits += 1
elif abs(fenotypes[i] - fenotypes[j]) == abs(i - j):
hits += 1
return maxHits - hits
def fit(self):
didFinish = False
countGenerations = 0
populationFitness = []
convergentNumber = 0
for i in range(10000):
found, gen = self.checkSolution(self.population)
if found:
print('alcançou a solução com ' + str(i) + ' iterações')
# print(self.population)
values = [(self.fitness(gen), self.buildFenotype(gen)) for gen in self.population if self.fitness(gen) == 28]
# print(values)
##############################
## measurement of metrics
##############################
countGenerations = i
didFinish = True
populationFitness = [self.fitness(gen) for gen in self.population]
convergentNumber = len(values)
##############################
break
fitElements = [(self.fitness(gen), gen) for gen in self.population]
fitElements.sort(reverse=True)
if i%100 == 0:
print('passo', i)
# print(fitElements[0][0], self.buildFenotype(fitElements[0][1]))
limit = math.floor(0.9*len(fitElements))
newPopulation = []
for i in range(limit, -1, -2):
parents = self.selectionMethod(self.population)
# aaa|bbbbbbb
# bbb|aaaaaaa
# print(parents)
gens = self.crossOverMethod(parents[0], parents[1])
m1, m2 = self.mutate(gens[0]), self.mutate(gens[1])
newPopulation.append(m1)
newPopulation.append(m2)
population = [gen for fitness,gen in fitElements]
population = population[:len(population) - limit] + newPopulation
self.population = population
return didFinish, countGenerations, populationFitness, convergentNumber
# print('finalizou com:', [self.buildFenotype(gen) for gen in self.population])
| 32.709524 | 125 | 0.526569 |
014a513ce67d34f43fbb3d224139ad39fdd0d756 | 639 | py | Python | problems/07/solution_07.py | r1cc4rdo/daily_coding_problem | 6ac85309fad2f64231ac7ab94aa4158e18bdec40 | [
"Unlicense"
] | 158 | 2018-01-25T06:33:30.000Z | 2022-03-14T23:18:05.000Z | problems/07/solution_07.py | r1cc4rdo/daily_coding_problem | 6ac85309fad2f64231ac7ab94aa4158e18bdec40 | [
"Unlicense"
] | 9 | 2018-07-04T00:31:57.000Z | 2020-05-16T21:02:30.000Z | problems/07/solution_07.py | r1cc4rdo/daily_coding_problem | 6ac85309fad2f64231ac7ab94aa4158e18bdec40 | [
"Unlicense"
] | 50 | 2018-06-22T16:48:44.000Z | 2022-01-11T16:45:48.000Z | def coding_problem_07(s):
"""
Given the mapping a = 1, b = 2, ... z = 26, and an encoded message, count the number of ways it can be decoded.
Examples:
>>> coding_problem_07('111') # possible interpretations: 'aaa', 'ka', 'ak'
3
>>> coding_problem_07('2626') # 'zz', 'zbf', 'bfz', 'bfbf'
4
"""
symbols = map(str, range(1, 27))
if not s:
return 1
matches = filter(lambda symbol: s.startswith(symbol), symbols)
encodings = [coding_problem_07(s[len(m):]) for m in matches]
return sum(encodings)
if __name__ == '__main__':
import doctest
doctest.testmod(verbose=True)
| 26.625 | 115 | 0.610329 |
014ad83f5b068ba0e043359ccb1ae8ec77fba56c | 1,201 | py | Python | lfs/manage/views/criteria.py | michael-hahn/django-lfs | 26c3471a8f8d88269c84f714f507b952dfdb6397 | [
"BSD-3-Clause"
] | 345 | 2015-01-03T19:19:27.000Z | 2022-03-20T11:00:50.000Z | lfs/manage/views/criteria.py | mxins/django-lfs | bf42ed80ce0e1ec96db6ab985adcc614ea79dfc8 | [
"BSD-3-Clause"
] | 73 | 2015-01-06T14:54:02.000Z | 2022-03-11T23:11:34.000Z | lfs/manage/views/criteria.py | mxins/django-lfs | bf42ed80ce0e1ec96db6ab985adcc614ea79dfc8 | [
"BSD-3-Clause"
] | 148 | 2015-01-07T16:30:08.000Z | 2022-03-25T21:20:58.000Z | # django imports
from django.conf import settings
from django.contrib.auth.decorators import permission_required
from django.http import HttpResponse
# lfs imports
from lfs.core.utils import import_symbol
@permission_required("core.manage_shop")
def add_criterion(request):
"""
Adds a new criterion form.
"""
try:
default_criterion = settings.LFS_CRITERIA[0]
default_criterion = import_symbol(default_criterion[0])
result = default_criterion().render(request, 10)
except:
result = ""
default_criterion = settings.LFS_CRITERIA[0]
default_criterion = import_symbol(default_criterion[0])
result = default_criterion().render(request, 10)
return HttpResponse(result)
@permission_required("core.manage_shop")
def change_criterion_form(request):
"""
Changes the changed criterion form to the given type (via request body)
form.
This is called via an AJAX request. The result is injected into the right
DOM node.
"""
type = request.POST.get("type", "price")
criterion = import_symbol(type)
# create dummy criterion
result = criterion(pk=1).render(request, 10)
return HttpResponse(result)
| 27.295455 | 77 | 0.717735 |
014b1316bbc807d3a6c4e323a2ddb1f35250a147 | 428 | py | Python | components/studio/apps/migrations/0055_appinstance_access.py | aitmlouk/stackn | c8029394a15b03796a4864938f9db251b65c7354 | [
"Apache-2.0"
] | 25 | 2020-05-08T22:24:54.000Z | 2022-03-11T18:16:58.000Z | components/studio/apps/migrations/0055_appinstance_access.py | aitmlouk/stackn | c8029394a15b03796a4864938f9db251b65c7354 | [
"Apache-2.0"
] | 75 | 2020-05-08T22:15:59.000Z | 2021-11-22T10:00:04.000Z | components/studio/apps/migrations/0055_appinstance_access.py | aitmlouk/stackn | c8029394a15b03796a4864938f9db251b65c7354 | [
"Apache-2.0"
] | 12 | 2020-11-04T13:09:46.000Z | 2022-03-14T16:22:40.000Z | # Generated by Django 3.1.7 on 2021-05-26 12:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('apps', '0054_auto_20210525_1114'),
]
operations = [
migrations.AddField(
model_name='appinstance',
name='access',
field=models.CharField(blank=True, default='private', max_length=20, null=True),
),
]
| 22.526316 | 92 | 0.61215 |
014c661b4ee9f1765fe9a71fd14a56e7f1881ebd | 1,512 | py | Python | modulo-3/aulas/funcao_2.py | Luis-Felipe-N/curso-em-video-python | 09ff58ae31ae0360ebec74de609011d527956065 | [
"MIT"
] | null | null | null | modulo-3/aulas/funcao_2.py | Luis-Felipe-N/curso-em-video-python | 09ff58ae31ae0360ebec74de609011d527956065 | [
"MIT"
] | null | null | null | modulo-3/aulas/funcao_2.py | Luis-Felipe-N/curso-em-video-python | 09ff58ae31ae0360ebec74de609011d527956065 | [
"MIT"
] | null | null | null | # COMO TRABALHAMOS MUITO COM ROTINAS, CRIAR UMA FUNÇÃO E CHAMA-LA NO CÓDIGO FACILITA MUITO NÃO TER QUE ESCREVER A MESMA LINHA VÁRIAS VEZES
# USAMOS O COMANDO DEF PARA DECLARAR UMA FUNCAO, DEPOIS UM NOME PARA A FUNÇÃO EX:
# def nomeDaFuncao():
# # BLOCO COM O CÓDIGO QUE DESEJA QUE SEJÁ EXECULTADO NA CHAMADA DA FUNÇÃO
# print('Funcionando')
# nomeDaFuncao()# O RESULTADO FUI ("Funcionando")
# SEMPRE QUE PRESCISAMOS MOSTRAR UMA LINHA ESCREVEMOS TODA VEZ O MESMO CÓDIGO
# PARA FACILITAR PODEMOS CRIAR UMA FUNÇÃO QUE MOSTRE ESSA LINHA SEM PRESCISAR ESCREVER O CÓDIGO PRINT VÁRIAS VEZES
def caixa(tipo, msg):
print(tipo * (len(msg) * 2))
print(msg)
print(tipo * (len(msg) * 2))
caixa('#', 'Python')# O RESULTADO É
'''
############
Python
############'''
# MAS PODE VARIAER DE ACORDO COM O TEXTO SOLICITADO
def soma(a, b):# DOIS PARAMENTROS a E b QUE SEMPRE QUANDO A FUNÇÃO FOR CHAMADA DEVE INSIRIR ESSES DOIS VALORES
s = a + b
print(s)
soma(1, 4)# RESULTADO É 5
soma(b=3, a=1)# RESULTADO É 4, RECLARAMOS QUE O B=3 E A=1
def contador(*n):# O PARAMETRO AGORA PODE RECEBER VÁRIOS VALORES, QUANDDO COLOCAMOS O * O PYTHON SABE QUE VAI RECEBER VALORES VARIADOS
# TODOS OS VALORES VÃO SER GUARDADO DENTRO DE UMA TUPLA
print(len(n))
contador(1, 5, 7, 7)
def dobrar(list):
p = 0
while p < len(list):
list[p] *= 2
p += 1
valoresd = list[:]
return valoresd
valores = [1, 2, 9, 7, 4]
print(valores)
dobrar(valores)
print(valores)
| 22.909091 | 138 | 0.671958 |
014ca2e3d1790bdd33dbe16cd9e24261132907eb | 5,160 | py | Python | ph5/utilities/tests/test_seg2toph5.py | iris-edu-int/PH5 | 2056191ec3db1dbcbbd18facba56750d3c5cc5b4 | [
"MIT"
] | 21 | 2016-12-07T20:09:31.000Z | 2022-03-07T22:23:57.000Z | ph5/utilities/tests/test_seg2toph5.py | iris-edu-int/PH5 | 2056191ec3db1dbcbbd18facba56750d3c5cc5b4 | [
"MIT"
] | 395 | 2016-11-03T03:43:55.000Z | 2022-03-08T20:54:22.000Z | ph5/utilities/tests/test_seg2toph5.py | iris-edu-int/PH5 | 2056191ec3db1dbcbbd18facba56750d3c5cc5b4 | [
"MIT"
] | 6 | 2016-10-25T22:22:38.000Z | 2021-05-10T18:19:45.000Z | '''
Tests for seg2toph5
'''
import os
import sys
import unittest
import logging
from mock import patch
from testfixtures import OutputCapture, LogCapture
from ph5.utilities import seg2toph5, initialize_ph5
from ph5.core.tests.test_base import LogTestCase, TempDirTestCase,\
initialize_ex
from ph5.core import ph5api
class TestSeg2toPH5_main(TempDirTestCase, LogTestCase):
def tearDown(self):
self.ph5object.ph5close()
super(TestSeg2toPH5_main, self).tearDown()
def test_main(self):
# check external links created
data_nodes = ['Das_g_0000SV01', 'Das_g_0000SV02', 'Das_g_0000SV03',
'Das_g_0000SV04', 'Das_g_0000SV05', 'Das_g_0000SV06',
'Das_g_0000SV07', 'Das_g_0000SV08', 'Das_g_0000SV09',
'Das_g_0000SV10', 'Das_g_0000SV11', 'Das_g_0000SV12',
'Das_g_0000SV13', 'Das_g_0000SV14', 'Das_g_0000SV15',
'Das_g_0000SV16', 'Das_g_0000SV17', 'Das_g_0000SV18',
'Das_g_0000SV19', 'Das_g_0000SV20', 'Das_g_0000SV21',
'Das_g_0000SV22', 'Das_g_0000SV23', 'Das_g_0000SV24',
'Das_g_0000SV25', 'Das_g_0000SV26', 'Das_g_0000SV27',
'Das_g_0000SV28', 'Das_g_0000SV29', 'Das_g_0000SV30',
'Das_g_0000SV31', 'Das_g_0000SV32', 'Das_g_0000SV33',
'Das_g_0000SV34', 'Das_g_0000SV35', 'Das_g_0000SV36',
'Das_g_0000SV37', 'Das_g_0000SV38', 'Das_g_0000SV39',
'Das_g_0000SV40', 'Das_g_0000SV41', 'Das_g_0000SV42',
'Das_g_0000SV43', 'Das_g_0000SV44', 'Das_g_0000SV45',
'Das_g_0000SV46', 'Das_g_0000SV47', 'Das_g_0000SV48',
'Das_g_0000SV49', 'Das_g_0000SV50', 'Das_g_0000SV51',
'Das_g_0000SV52', 'Das_g_0000SV53', 'Das_g_0000SV54',
'Das_g_0000SV55', 'Das_g_0000SV56', 'Das_g_0000SV57',
'Das_g_0000SV58', 'Das_g_0000SV59', 'Das_g_0000SV60']
testargs = ['initialize_ph5', '-n', 'master.ph5']
with patch.object(sys, 'argv', testargs):
initialize_ph5.main()
# add seg2 to ph5
testargs = ['seg2toph5', '-n', 'master.ph5', '-r',
os.path.join(self.home, "ph5/test_data/seg2/15001.dat")]
with patch.object(sys, 'argv', testargs):
with OutputCapture():
with LogCapture() as log:
log.setLevel(logging.ERROR)
seg2toph5.main()
# before commit caf6978, there would be 1 error log if run this in
# environment that uses Obspy 1.2.2
self.assertEqual(len(log.records), 0)
self.ph5object = ph5api.PH5(path=self.tmpdir, nickname='master.ph5')
target_p1 = 'miniPH5_00001.ph5:/Experiment_g/Maps_g/'
targets = [target_p1 + n for n in data_nodes]
node = self.ph5object.ph5.get_node("/Experiment_g/Maps_g/")
i = 0
ret_targets = []
for n in self.ph5object.ph5.list_nodes(node):
if hasattr(n, 'target'):
ret_targets.append(n.target)
i += 1
self.assertEqual(ret_targets, targets)
target_p1 = 'miniPH5_00001.ph5:/Experiment_g/Receivers_g/'
targets = [target_p1 + n for n in data_nodes]
node = self.ph5object.ph5.get_node("/Experiment_g/Receivers_g/")
i = 0
ret_targets = []
for n in self.ph5object.ph5.list_nodes(node):
if hasattr(n, 'target'):
ret_targets.append(n.target)
i += 1
self.assertEqual(ret_targets, targets)
def test_update_external_references(self):
self.ph5object = seg2toph5.EX = \
initialize_ex('master.ph5', '.', True)
keys = ['external_file_name_s', 'hdf5_path_s', 'serial_number_s']
INDEX_T_DAS_rows = \
[{'external_file_name_s': './miniPH5_00001.ph5',
'hdf5_path_s': '/Experiment_g/Receivers_g/Das_g_0000SV01'}]
seg2toph5.INDEX_T_DAS = seg2toph5.Rows_Keys(INDEX_T_DAS_rows, keys)
INDEX_T_MAP_rows = \
[{'external_file_name_s': './miniPH5_00001.ph5',
'hdf5_path_s': '/Experiment_g/Maps_g/Das_g_0000SV01'}]
seg2toph5.INDEX_T_MAP = seg2toph5.Rows_Keys(INDEX_T_MAP_rows, keys)
seg2toph5.update_external_references()
# check if external links are created
node = self.ph5object.ph5.get_node("/Experiment_g/Receivers_g/")
target = 'miniPH5_00001.ph5:/Experiment_g/Receivers_g/Das_g_0000SV01'
for n in self.ph5object.ph5.list_nodes(node):
if hasattr(n, 'target'):
self.assertEqual(n.target, target)
break
node = self.ph5object.ph5.get_node("/Experiment_g/Maps_g/")
target = 'miniPH5_00001.ph5:/Experiment_g/Maps_g/Das_g_0000SV01'
for n in self.ph5object.ph5.list_nodes(node):
if hasattr(n, 'target'):
self.assertEqual(n.target, target)
break
if __name__ == "__main__":
unittest.main()
| 43 | 77 | 0.605426 |
014d029371edfc926a3b46e79008ce4486f7ec74 | 29 | py | Python | pydreamer/models/__init__.py | rogerscristo/pydreamer | e44fdf8b35fe48662ed619100fdd5d9d6858f6db | [
"MIT"
] | 75 | 2021-10-12T13:17:48.000Z | 2022-03-04T14:43:30.000Z | pydreamer/models/__init__.py | LvZut/pydreamer | e3a522e13319d3667b526abb5f5747ab68e3c04e | [
"MIT"
] | 2 | 2022-01-17T06:49:50.000Z | 2022-02-17T19:45:24.000Z | pydreamer/models/__init__.py | LvZut/pydreamer | e3a522e13319d3667b526abb5f5747ab68e3c04e | [
"MIT"
] | 10 | 2021-11-27T18:20:26.000Z | 2022-03-14T09:06:52.000Z | from .dreamer import Dreamer
| 14.5 | 28 | 0.827586 |
014d7f5347c2db899cb4f07a82d8e90ce6a5c1f4 | 1,251 | py | Python | zcash/test_node.py | gwynethallwright/cs291d_project | 7d9bbb32acec855e777b93b88153869393d458d3 | [
"Apache-2.0"
] | null | null | null | zcash/test_node.py | gwynethallwright/cs291d_project | 7d9bbb32acec855e777b93b88153869393d458d3 | [
"Apache-2.0"
] | null | null | null | zcash/test_node.py | gwynethallwright/cs291d_project | 7d9bbb32acec855e777b93b88153869393d458d3 | [
"Apache-2.0"
] | null | null | null | import time
from zcash.node import *
node1 = Node(8002, "node 1")
node1.start()
# wait for string
time.sleep(2)
node1.print_blockchain()
time.sleep(2)
node2 = Node(8003, "node 2")
node2.start()
# wait for string
time.sleep(2)
node2.print_blockchain()
node1.get_balance()
node2.get_balance()
# node1 mint and pour to send to node2
tx1 = node1.mint_coin(1)
node1.broadcast_new_transaction(tx1)
# waiting for tx broadcast
time.sleep(2)
node1.show_coin()
tx2 = node1.mint_coin(1)
node1.broadcast_new_transaction(tx2)
# waiting for tx broadcast
time.sleep(2)
node1.show_coin()
coin_old_1 = list(node1.coin_set)[0]
coin_old_2 = list(node1.coin_set)[1]
tx3 = node1.pour_coin(coin_old_1, coin_old_2, node1.addr_sk, node1.addr_sk, 1, 1, node2.addr_pk, node2.addr_pk, 0, "")
sn_list = [tx3.tx_pour[1], tx3.tx_pour[2]]
# tx = Transaction(sender=node1.wallet.address, receiver=node2.wallet.address, amount=0.3)
# sig = node1.wallet.sign(str(tx))
# tx.set_sign(node1.wallet.pubkey, sig)
node1.broadcast_new_transaction(tx3)
# waiting for tx broadcast
time.sleep(2)
node1.show_coin()
node2.show_coin()
node2.receive_coin(node2.addr_pk, node2.addr_sk)
node2.print_blockchain()
node2.get_balance()
node1.get_balance()
node1.print_blockchain()
| 18.397059 | 118 | 0.751399 |
014e3e728d1d9378b4810966f0de08c19709bfa8 | 822 | py | Python | archived_lectures/Fall_2019/common_python/common_python/tests/tellurium/test_util.py | ModelEngineering/topics-course | cd0d73e4056663d170465669ecd699e8e74e35a0 | [
"MIT"
] | 2 | 2018-10-24T21:31:30.000Z | 2019-10-23T20:29:22.000Z | archived_lectures/Fall_2019/common_python/common_python/tests/tellurium/test_util.py | ModelEngineering/topics-course | cd0d73e4056663d170465669ecd699e8e74e35a0 | [
"MIT"
] | 1 | 2019-05-31T21:59:30.000Z | 2019-05-31T21:59:30.000Z | archived_lectures/Fall_2019/common_python/common_python/tests/tellurium/test_util.py | ModelEngineering/topics-course | cd0d73e4056663d170465669ecd699e8e74e35a0 | [
"MIT"
] | 9 | 2018-10-31T20:48:42.000Z | 2019-11-20T21:47:43.000Z | from common_python.tellurium import util
import pandas as pd
import numpy as np
import unittest
class TestFunctions(unittest.TestCase):
def testDfToSer(self):
data = range(5)
df = pd.DataFrame({'a': data, 'b': [2*d for d in data]})
ser = util.dfToSer(df)
assert(len(ser) == len(df.columns)*len(df))
def testDfToSer(self):
data = range(5)
df = pd.DataFrame({'a': data, 'b': [2*d for d in data]})
ser = util.dfToSer(df)
assert(len(ser) == len(df.columns)*len(df))
def testInterpolateTime(self):
MAX = 10
SER = pd.Series(range(MAX), index=range(MAX))
self.assertEqual(util.interpolateTime(SER, 0.4), 0.4)
self.assertEqual(util.interpolateTime(SER, -1), 0)
self.assertEqual(util.interpolateTime(SER, MAX), MAX-1)
if __name__ == '__main__':
unittest.main()
| 25.6875 | 60 | 0.656934 |
014f313e34df031bf86f179460a6096cdeb0e1a1 | 535 | py | Python | hr_timesheet_invoice_release/account_invoice.py | sunflowerit/odoo-modules | 77e11c4868c3f94c031542b48e5d83797cf4a28d | [
"MIT"
] | null | null | null | hr_timesheet_invoice_release/account_invoice.py | sunflowerit/odoo-modules | 77e11c4868c3f94c031542b48e5d83797cf4a28d | [
"MIT"
] | 4 | 2016-10-19T17:01:04.000Z | 2018-01-12T18:34:58.000Z | hr_timesheet_invoice_release/account_invoice.py | sunflowerit/odoo-modules | 77e11c4868c3f94c031542b48e5d83797cf4a28d | [
"MIT"
] | 1 | 2018-03-08T16:23:52.000Z | 2018-03-08T16:23:52.000Z | # -*- coding: utf-8 -*-
# © 2017 Sunflower IT (http://sunflowerweb.nl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import fields, models, api, _
from openerp.exceptions import Warning
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def release_timesheet_lines(self):
self.ensure_one()
self.env['hr.analytic.timesheet'].search([
('invoice_id', '=', self.id)
]).with_context(override_check=True).write({'invoice_id': None})
| 29.722222 | 72 | 0.665421 |
01507b0361d9bced76109c780b90eccee027d206 | 4,205 | py | Python | blocklenium/selenium_worker.py | jpunkt/blocklenium | dbe81b900d9c9781443d2cac2920815cb5f0a779 | [
"MIT"
] | null | null | null | blocklenium/selenium_worker.py | jpunkt/blocklenium | dbe81b900d9c9781443d2cac2920815cb5f0a779 | [
"MIT"
] | 1 | 2020-07-17T10:11:42.000Z | 2020-07-17T14:44:59.000Z | blocklenium/selenium_worker.py | jpunkt/blocklenium | dbe81b900d9c9781443d2cac2920815cb5f0a779 | [
"MIT"
] | null | null | null | import configparser
import logging
import threading
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium import webdriver
logger = logging.getLogger(__name__)
class SeleniumWorker(threading.Thread):
def __init__(self, queue, config):
threading.Thread.__init__(self)
self.chromedriver_path = config['CHROMEDRIVER_PATH']
self.desk_url = config['DESK_URL']
self.login_required = config['DESK_LOGIN_REQUIRED']
self.queue = queue
self.chromedriver = None
# Set options for chromedriver
self.chromecaps = webdriver.DesiredCapabilities.CHROME.copy()
# Accept insecure connections
self.chromecaps['acceptInsecureCerts'] = \
config['BROWSER_INSECURE_CERTS']
# Load javascript file
# If the filename ends with '.url', read with config-parser
bookmarklet_path = config['BOOKMARKLET_PATH']
if bookmarklet_path.endswith('.url'):
# parse with config parser
parser = configparser.ConfigParser()
parser.read(bookmarklet_path)
if 'InternetShortcut' in parser:
self.js = parser['InternetShortcut']['URL']
else:
raise ValueError('Bookmarklet file must be a web link!')
else:
with open(bookmarklet_path, "r") as f:
self.js = f.read()
def run(self):
"""Runs in an endless loop until False was put on the queue.
If True is on the queue, opens a browser and runs bookmarklet.
If None is on the queue, closes the browser."""
logger.debug('Thread running.')
while True:
q = self.queue.get()
if q:
logger.info('Starting browser...')
# Instantiate driver (opens browser)
if self.chromedriver is None:
logger.debug('No browser running. Starting browser...')
self.chromedriver = webdriver.Chrome(
self.chromedriver_path,
desired_capabilities=self.chromecaps)
# Open a website
logger.debug('Calling url')
self.chromedriver.get(self.desk_url)
# Log in if needed
if self.login_required:
self.desk_login()
# Execute JavaScript
if self.js is not None:
logger.info('Executing JavaScript...')
# Execute javascript
self.chromedriver.execute_script(self.js)
else:
logger.info('Closing browser...')
# Close browser
if self.chromedriver is not None:
self.chromedriver.quit()
self.chromedriver = None
if q is False:
logger.info('Exiting worker loop...')
break
def desk_login(self):
logger.info('attempting login to desk...')
# the user-input fields have weird ids, so we need to select
# them by searching for ids containing 'Username' or 'Password'
userfields = self.chromedriver.find_elements_by_css_selector(
"input[id*='Username']")
pwdfields = self.chromedriver.find_elements_by_css_selector(
"input[id*='Password']")
if (len(userfields) > 0) and (len(pwdfields) > 0):
userfields[0].send_keys(self.desk_username)
pwdfields[0].send_keys(self.desk_password)
loginbtn = self.chromedriver.find_element_by_xpath(
"//button[@type='submit']")
loginbtn.click()
# Wait for the new page to be fully loaded
WebDriverWait(self.chromedriver, 10).until(
EC.presence_of_element_located((By.CLASS_NAME,
"timeline-header"))
)
else:
logger.info(
'Expected Login page but found no login fields. Ignored')
| 36.885965 | 75 | 0.572889 |
0150a51530726ef3c2430cedd19b6ce0d322142d | 1,595 | py | Python | setup.py | LaudateCorpus1/evohome-async | 333223df05b7881d6d9b831eb41d209846dd9a98 | [
"Apache-2.0"
] | 2 | 2020-11-18T14:33:49.000Z | 2021-12-27T14:52:54.000Z | setup.py | LaudateCorpus1/evohome-async | 333223df05b7881d6d9b831eb41d209846dd9a98 | [
"Apache-2.0"
] | 4 | 2021-03-10T16:54:31.000Z | 2022-01-21T10:16:33.000Z | setup.py | LaudateCorpus1/evohome-async | 333223df05b7881d6d9b831eb41d209846dd9a98 | [
"Apache-2.0"
] | 9 | 2020-12-06T08:07:45.000Z | 2022-02-08T07:03:53.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
"""The setup.py file."""
import os
import sys
from setuptools import find_packages, setup
from setuptools.command.install import install
VERSION = "0.3.15"
URL = "https://github.com/zxdavb/evohome-async"
with open("README.md", "r") as fh:
LONG_DESCRIPTION = fh.read()
class VerifyVersionCommand(install):
"""Custom command to verify that the git tag matches our VERSION."""
def run(self):
tag = os.getenv("CIRCLE_TAG")
if tag != VERSION:
info = f"The git tag: '{tag}' does not match the package ver: '{VERSION}'"
sys.exit(info)
setup(
name="evohome-async",
description="An async client for connecting to Honeywell's TCC RESTful API.",
keywords=["evohome", "total connect comfort", "round thermostat"],
author="Andrew Stock & David Bonnes",
author_email="zxdavb@gmail.com",
url=URL,
download_url=f"{URL}/archive/{VERSION}.tar.gz",
install_requires=[val.strip() for val in open("requirements.txt")],
long_description=LONG_DESCRIPTION,
long_description_content_type="text/markdown",
packages=find_packages(exclude=["test", "docs"]),
version=VERSION,
license="Apache 2",
python_requires=">=3.7",
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.7",
"Topic :: Home Automation",
],
cmdclass={
"verify": VerifyVersionCommand,
},
)
| 28.482143 | 86 | 0.648276 |
0153baca680ffab8056f0a843270c4080b86ff69 | 74,403 | py | Python | tests/test_model_factory.py | TForce1/pcg_gazebo | 9ff88016b7b6903236484958ca7c6ed9f8ffb346 | [
"ECL-2.0",
"Apache-2.0"
] | 40 | 2020-02-04T18:16:49.000Z | 2022-02-22T11:36:34.000Z | tests/test_model_factory.py | awesomebytes/pcg_gazebo | 4f335dd460ef7c771f1df78b46a92fad4a62cedc | [
"ECL-2.0",
"Apache-2.0"
] | 75 | 2020-01-23T13:40:50.000Z | 2022-02-09T07:26:01.000Z | tests/test_model_factory.py | GimpelZhang/gazebo_world_generator | eb7215499d0ddc972d804c988fadab1969579b1b | [
"ECL-2.0",
"Apache-2.0"
] | 18 | 2020-09-10T06:35:41.000Z | 2022-02-20T19:08:17.000Z | #!/usr/bin/env python
# Copyright (c) 2019 - The Procedural Generation for Gazebo authors
# For information on the respective copyright owner see the NOTICE file
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import os
import shutil
from pcg_gazebo import random
from pcg_gazebo.utils import generate_random_string
from pcg_gazebo.generators.creators import create_models_from_config, extrude
from pcg_gazebo.simulation.properties import Material, Pose
from pcg_gazebo.simulation import SimulationModel
from pcg_gazebo.path import Path
from shapely.geometry import Polygon, MultiPoint, LineString
def _get_colors():
color_names = list(Material.get_xkcd_colors_list().keys())
return [None, 'xkcd', 'random'] + \
[color_names[random.choice(range(len(color_names)))]
for _ in range(2)] + \
[[random.rand() for _ in range(4)] for _ in range(2)]
def _delete_generated_meshes(sdf):
for i in range(len(sdf.links)):
for j in range(len(sdf.links[i].collisions)):
if sdf.links[i].collisions[j].geometry.mesh is not None:
uri = Path(sdf.links[i].collisions[j].geometry.mesh.uri.value)
if os.path.isfile(uri.absolute_uri):
os.remove(uri.absolute_uri)
for j in range(len(sdf.links[i].visuals)):
if sdf.links[i].visuals[j].geometry.mesh is not None:
uri = Path(sdf.links[i].visuals[j].geometry.mesh.uri.value)
if os.path.isfile(uri.absolute_uri):
os.remove(uri.absolute_uri)
class TestModelFactory(unittest.TestCase):
def test_box_after_random_seed(self):
box_name = generate_random_string(5)
seed = random.randint(0, 10000)
random.init_random_state(seed)
random_box = dict(
type='box',
args=dict(
size="__import__('pcg_gazebo').random.rand(3)",
mass="__import__('pcg_gazebo').random.rand()",
name=box_name
)
)
ref = create_models_from_config([random_box])[0]
for _ in range(3):
random.init_random_state(seed)
model = create_models_from_config([random_box])[0]
self.assertEqual(ref.to_sdf(), model.to_sdf())
random.init_random_state(seed)
refs = create_models_from_config(
[random_box for _ in range(3)])
for _ in range(3):
random.init_random_state(seed)
models = create_models_from_config(
[random_box for _ in range(3)])
for r, m in zip(refs, models):
self.assertEqual(r.to_sdf(), m.to_sdf())
def test_sphere_after_random_seed(self):
sphere_name = generate_random_string(5)
seed = random.randint(0, 10000)
random.init_random_state(seed)
random_sphere = dict(
type='sphere',
args=dict(
radius="__import__('pcg_gazebo').random.rand()",
mass="__import__('pcg_gazebo').random.rand()",
name=sphere_name
)
)
ref = create_models_from_config([random_sphere])[0]
for _ in range(3):
random.init_random_state(seed)
model = create_models_from_config([random_sphere])[0]
self.assertEqual(ref.to_sdf(), model.to_sdf())
random.init_random_state(seed)
refs = create_models_from_config(
[random_sphere for _ in range(3)])
for _ in range(3):
random.init_random_state(seed)
models = create_models_from_config(
[random_sphere for _ in range(3)])
for r, m in zip(refs, models):
self.assertEqual(r.to_sdf(), m.to_sdf())
def test_cylinder_after_random_seed(self):
cyl_name = generate_random_string(5)
seed = random.randint(0, 10000)
random.init_random_state(seed)
random_cyl = dict(
type='cylinder',
args=dict(
radius="__import__('pcg_gazebo').random.rand()",
length="__import__('pcg_gazebo').random.rand()",
mass="__import__('pcg_gazebo').random.rand()",
name=cyl_name
)
)
ref = create_models_from_config([random_cyl])[0]
for _ in range(3):
random.init_random_state(seed)
model = create_models_from_config([random_cyl])[0]
self.assertEqual(ref.to_sdf(), model.to_sdf())
random.init_random_state(seed)
refs = create_models_from_config(
[random_cyl for _ in range(3)])
for _ in range(3):
random.init_random_state(seed)
models = create_models_from_config(
[random_cyl for _ in range(3)])
for r, m in zip(refs, models):
self.assertEqual(r.to_sdf(), m.to_sdf())
def test_static_box_model(self):
for color in _get_colors():
name = generate_random_string(3)
pose = [random.rand() for _ in range(6)]
size = [random.rand() for _ in range(3)]
model_config = [
dict(
type='box',
args=dict(
size=size,
name=name,
pose=pose,
color=color,
collision_parameters=dict(
mu=random.uniform(0, 10),
mu2=random.uniform(0, 10),
friction=random.uniform(0, 10),
friction2=random.uniform(0, 10),
slip1=random.uniform(0, 1),
slip2=random.uniform(0, 1),
rolling_friction=random.uniform(0, 1),
fdir1=[0, 0, 0],
max_contacts=1,
soft_cfm=random.uniform(0, 10),
soft_erp=random.uniform(0, 10),
kp=random.uniform(0, 100000),
kd=random.uniform(0, 10),
max_vel=random.uniform(0, 0.1),
min_depth=random.uniform(0, 0.1),
split_impulse=False,
split_impulse_penetration_threshold=-0.01,
restitution_coefficient=random.uniform(0, 1),
threshold=random.uniform(0, 1)
)
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), 1)
self.assertIsInstance(models[0], SimulationModel)
# Test pose of the model
self.assertEqual(models[0].pose.position.tolist(), pose[0:3])
q = Pose.rpy2quat(*pose[3::])
diff = Pose.get_transform(models[0].pose.quat, q)
# Test model properties
self.assertAlmostEqual(np.sum(diff[0:3]), 0)
self.assertTrue(models[0].static)
self.assertEqual(len(models[0].links), 1)
link_name = models[0].link_names[0]
# Test visual element
self.assertEqual(len(models[0].links[link_name].visuals), 1)
geometry = models[0].links[link_name].visuals[0].geometry
self.assertEqual(geometry.get_type(), 'box')
self.assertEqual(geometry.get_param('size'), size)
# Test collision element
self.assertEqual(len(models[0].links[link_name].collisions), 1)
collision = models[0].links[link_name].get_collision_by_name(
'collision')
tags = ['mu', 'mu2', 'slip1', 'slip2', 'fdir1']
for tag in tags:
self.assertEqual(
collision.get_ode_friction_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['friction', 'friction2', 'rolling_friction', 'fdir1']
for tag in tags:
self.assertEqual(
collision.get_bullet_friction_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['soft_cfm', 'soft_erp', 'kp', 'kd', 'max_vel', 'min_depth']
for tag in tags:
self.assertEqual(
collision.get_ode_contact_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['soft_cfm', 'soft_erp', 'kp', 'kd', 'split_impulse',
'split_impulse_penetration_threshold']
for tag in tags:
self.assertEqual(
collision.get_bullet_contact_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['restitution_coefficient', 'threshold']
print(collision.sdf)
for tag in tags:
self.assertEqual(
collision.get_bounce_param(tag),
model_config[0]['args']['collision_parameters'][tag])
geometry = collision.geometry
self.assertEqual(geometry.get_type(), 'box')
self.assertEqual(geometry.get_param('size'), size)
# Test color exists
material = models[0].links[link_name].visuals[0].material
self.assertIsNotNone(material)
self.assertIsNotNone(material.ambient)
self.assertIsNotNone(material.diffuse)
if not isinstance(
color, str) and isinstance(
color, list) and color is not None:
self.assertEqual(material.ambient.value, color)
self.assertEqual(material.diffuse.value, color)
def test_dynamic_box_model(self):
for color in _get_colors():
name = generate_random_string(3)
pose = [random.rand() for _ in range(6)]
size = [random.rand() for _ in range(3)]
mass = random.rand()
model_config = [
dict(
type='box',
args=dict(
size=size,
name=name,
pose=pose,
mass=mass,
color=color,
collision_parameters=dict(
mu=random.uniform(0, 10),
mu2=random.uniform(0, 10),
friction=random.uniform(0, 10),
friction2=random.uniform(0, 10),
slip1=random.uniform(0, 1),
slip2=random.uniform(0, 1),
rolling_friction=random.uniform(0, 1),
fdir1=[0, 0, 0],
max_contacts=1,
soft_cfm=random.uniform(0, 10),
soft_erp=random.uniform(0, 10),
kp=random.uniform(0, 100000),
kd=random.uniform(0, 10),
max_vel=random.uniform(0, 0.1),
min_depth=random.uniform(0, 0.1),
split_impulse=False,
split_impulse_penetration_threshold=-0.01,
restitution_coefficient=random.uniform(0, 1),
threshold=random.uniform(0, 1)
)
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), 1)
self.assertIsInstance(models[0], SimulationModel)
# Test pose of the model
self.assertEqual(models[0].pose.position.tolist(), pose[0:3])
q = Pose.rpy2quat(*pose[3::])
diff = Pose.get_transform(models[0].pose.quat, q)
# Test model properties
self.assertAlmostEqual(np.sum(diff[0:3]), 0)
self.assertFalse(models[0].static)
self.assertEqual(len(models[0].links), 1)
link_name = models[0].link_names[0]
# Test link properties
link = models[0].links[link_name]
self.assertEqual(link.inertial.mass, mass)
self.assertAlmostEqual(link.inertial.ixx,
1. / 12 * mass * (size[1]**2 + size[2]**2))
self.assertAlmostEqual(link.inertial.iyy,
1. / 12 * mass * (size[0]**2 + size[2]**2))
self.assertAlmostEqual(link.inertial.izz,
1. / 12 * mass * (size[0]**2 + size[1]**2))
self.assertEqual(link.inertial.ixy, 0)
self.assertEqual(link.inertial.ixz, 0)
self.assertEqual(link.inertial.iyz, 0)
# Test visual element
self.assertEqual(len(link.visuals), 1)
geometry = link.visuals[0].geometry
self.assertEqual(geometry.get_type(), 'box')
self.assertEqual(geometry.get_param('size'), size)
# Test collision element
self.assertEqual(len(link.collisions), 1)
collision = link.get_collision_by_name('collision')
tags = ['mu', 'mu2', 'slip1', 'slip2', 'fdir1']
for tag in tags:
self.assertEqual(
collision.get_ode_friction_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['friction', 'friction2', 'rolling_friction', 'fdir1']
for tag in tags:
self.assertEqual(
collision.get_bullet_friction_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['soft_cfm', 'soft_erp', 'kp', 'kd', 'max_vel', 'min_depth']
for tag in tags:
self.assertEqual(
collision.get_ode_contact_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['soft_cfm', 'soft_erp', 'kp', 'kd', 'split_impulse',
'split_impulse_penetration_threshold']
for tag in tags:
self.assertEqual(
collision.get_bullet_contact_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['restitution_coefficient', 'threshold']
for tag in tags:
self.assertEqual(
collision.get_bounce_param(tag),
model_config[0]['args']['collision_parameters'][tag])
geometry = collision.geometry
self.assertEqual(geometry.get_type(), 'box')
self.assertEqual(geometry.get_param('size'), size)
# Test color exists
material = models[0].links[link_name].visuals[0].material
self.assertIsNotNone(material)
self.assertIsNotNone(material.ambient)
self.assertIsNotNone(material.diffuse)
if not isinstance(
color, str) and isinstance(
color, list) and color is not None:
self.assertEqual(material.ambient.value, color)
self.assertEqual(material.diffuse.value, color)
def test_box_factory_fixed_args_with_permutation(self):
for color in _get_colors():
n_sizes = random.randint(2, 5)
n_masses = random.randint(2, 5)
name = generate_random_string(3)
sizes = [[random.rand() for _ in range(3)]
for _ in range(n_sizes)]
pose = [random.rand() for _ in range(6)]
masses = [random.rand() for _ in range(n_masses)]
model_config = [
dict(
type='box_factory',
args=dict(
name=name,
size=sizes,
mass=masses,
pose=pose,
use_permutation=True,
color=color
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), n_sizes * n_masses)
for i in range(len(models)):
# Check the name generator with counter
self.assertIn(name + '_', models[i].name)
self.assertTrue(models[i].name.split('_')[-1].isdigit())
self.assertIsInstance(models[i], SimulationModel)
# Test pose of the model
self.assertEqual(models[i].pose.position.tolist(), pose[0:3])
q = Pose.rpy2quat(*pose[3::])
diff = Pose.get_transform(models[i].pose.quat, q)
# Test model properties
self.assertAlmostEqual(np.sum(diff[0:3]), 0)
self.assertFalse(models[i].static)
self.assertEqual(len(models[i].links), 1)
link_name = models[i].link_names[0]
self.assertIn(models[i].links[link_name].inertial.mass, masses)
# Test visual element
self.assertEqual(len(models[i].links[link_name].visuals), 1)
geometry = models[i].links[link_name].visuals[0].geometry
self.assertEqual(geometry.get_type(), 'box')
self.assertIn(geometry.get_param('size'), sizes)
# Test collision element
self.assertEqual(len(models[i].links[link_name].collisions), 1)
geometry = models[i].links[link_name].collisions[0].geometry
self.assertEqual(geometry.get_type(), 'box')
self.assertIn(geometry.get_param('size'), sizes)
# Test color exists
material = models[i].links[link_name].visuals[0].material
self.assertIsNotNone(material)
self.assertIsNotNone(material.ambient)
self.assertIsNotNone(material.diffuse)
if not isinstance(
color, str) and isinstance(
color, list) and color is not None:
self.assertEqual(material.ambient.value, color)
self.assertEqual(material.diffuse.value, color)
def test_box_factory_fixed_args_no_permutation(self):
for color in _get_colors():
n_models = random.randint(2, 5)
name = generate_random_string(3)
sizes = [[random.rand() for _ in range(3)]
for _ in range(n_models)]
pose = [random.rand() for _ in range(6)]
masses = [random.rand() for _ in range(n_models)]
model_config = [
dict(
type='box_factory',
args=dict(
name=name,
size=sizes,
mass=masses,
pose=pose,
use_permutation=False,
color=color
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), n_models)
for i in range(len(models)):
# Check the name generator with counter
self.assertIn(name + '_', models[i].name)
self.assertTrue(models[i].name.split('_')[-1].isdigit())
self.assertIsInstance(models[i], SimulationModel)
# Test pose of the model
self.assertEqual(models[i].pose.position.tolist(), pose[0:3])
q = Pose.rpy2quat(*pose[3::])
diff = Pose.get_transform(models[i].pose.quat, q)
# Test model properties
self.assertAlmostEqual(np.sum(diff[0:3]), 0)
self.assertFalse(models[i].static)
self.assertEqual(len(models[i].links), 1)
link_name = models[i].link_names[0]
self.assertIn(models[i].links[link_name].inertial.mass, masses)
# Test visual element
self.assertEqual(len(models[i].links[link_name].visuals), 1)
geometry = models[i].links[link_name].visuals[0].geometry
self.assertEqual(geometry.get_type(), 'box')
self.assertIn(geometry.get_param('size'), sizes)
# Test collision element
self.assertEqual(len(models[i].links[link_name].collisions), 1)
geometry = models[i].links[link_name].collisions[0].geometry
self.assertEqual(geometry.get_type(), 'box')
self.assertIn(geometry.get_param('size'), sizes)
# Test color exists
material = models[i].links[link_name].visuals[0].material
self.assertIsNotNone(material)
self.assertIsNotNone(material.ambient)
self.assertIsNotNone(material.diffuse)
if not isinstance(
color, str) and isinstance(
color, list) and color is not None:
self.assertEqual(material.ambient.value, color)
self.assertEqual(material.diffuse.value, color)
def test_box_factory_lambda_args_with_permutation(self):
for color in _get_colors():
n_sizes = random.randint(2, 5)
n_masses = random.randint(2, 5)
name = generate_random_string(3)
sizes = "__import__('numpy').random.random(({}, 3))".format(
n_sizes)
masses = "__import__('numpy').linspace(1, 10, {})".format(n_masses)
pose = [random.rand() for _ in range(6)]
model_config = [
dict(
type='box_factory',
args=dict(
name=name,
size=sizes,
mass=masses,
pose=pose,
use_permutation=True,
color=color
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), n_sizes * n_masses)
for i in range(len(models)):
# Check the name generator with counter
self.assertIn(name + '_', models[i].name)
self.assertTrue(models[i].name.split('_')[-1].isdigit())
self.assertIsInstance(models[i], SimulationModel)
# Test pose of the model
self.assertEqual(models[i].pose.position.tolist(), pose[0:3])
q = Pose.rpy2quat(*pose[3::])
diff = Pose.get_transform(models[i].pose.quat, q)
# Test model properties
self.assertAlmostEqual(np.sum(diff[0:3]), 0)
self.assertFalse(models[i].static)
self.assertEqual(len(models[i].links), 1)
link_name = models[i].link_names[0]
# Test visual element
self.assertEqual(len(models[i].links[link_name].visuals), 1)
geometry = models[i].links[link_name].visuals[0].geometry
self.assertEqual(geometry.get_type(), 'box')
# Test collision element
self.assertEqual(len(models[i].links[link_name].collisions), 1)
geometry = models[i].links[link_name].collisions[0].geometry
self.assertEqual(geometry.get_type(), 'box')
# Test color exists
material = models[i].links[link_name].visuals[0].material
self.assertIsNotNone(material)
self.assertIsNotNone(material.ambient)
self.assertIsNotNone(material.diffuse)
if not isinstance(
color, str) and isinstance(
color, list) and color is not None:
self.assertEqual(material.ambient.value, color)
self.assertEqual(material.diffuse.value, color)
def text_box_forced_permutation(self):
n_sizes = 2
n_masses = 3
name = generate_random_string(3)
# Test with lambda functions
sizes = "__import__('numpy').random.random(({}, 3))".format(n_sizes)
masses = "__import__('numpy').linspace(1, 10, {})".format(n_masses)
pose = [random.rand() for _ in range(6)]
model_config = [
dict(
type='box_factory',
args=dict(
name=name,
size=sizes,
mass=masses,
pose=pose,
use_permutation=False,
color=None
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), n_sizes * n_masses)
# Test with fixed arguments
sizes = [[random.rand() for _ in range(3)] for _ in range(n_sizes)]
masses = [random.rand() for _ in range(n_masses)]
pose = [random.rand() for _ in range(6)]
model_config = [
dict(
type='box_factory',
args=dict(
name=name,
size=sizes,
mass=masses,
pose=pose,
use_permutation=False,
color=None
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), n_sizes * n_masses)
def test_static_cylinder_model(self):
for color in _get_colors():
name = generate_random_string(3)
pose = [random.rand() for _ in range(6)]
radius = random.rand()
length = random.rand()
model_config = [
dict(
type='cylinder',
args=dict(
radius=radius,
length=length,
name=name,
pose=pose,
color=color,
collision_parameters=dict(
mu=random.uniform(0, 10),
mu2=random.uniform(0, 10),
friction=random.uniform(0, 10),
friction2=random.uniform(0, 10),
slip1=random.uniform(0, 1),
slip2=random.uniform(0, 1),
rolling_friction=random.uniform(0, 1),
fdir1=[0, 0, 0],
max_contacts=1,
soft_cfm=random.uniform(0, 10),
soft_erp=random.uniform(0, 10),
kp=random.uniform(0, 100000),
kd=random.uniform(0, 10),
max_vel=random.uniform(0, 0.1),
min_depth=random.uniform(0, 0.1),
split_impulse=False,
split_impulse_penetration_threshold=-0.01,
restitution_coefficient=random.uniform(0, 1),
threshold=random.uniform(0, 1)
)
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), 1)
self.assertIsInstance(models[0], SimulationModel)
# Test pose of the model
self.assertEqual(models[0].pose.position.tolist(), pose[0:3])
q = Pose.rpy2quat(*pose[3::])
diff = Pose.get_transform(models[0].pose.quat, q)
# Test model properties
self.assertAlmostEqual(np.sum(diff[0:3]), 0)
self.assertTrue(models[0].static)
self.assertEqual(len(models[0].links), 1)
link_name = models[0].link_names[0]
# Test visual element
self.assertEqual(len(models[0].links[link_name].visuals), 1)
geometry = models[0].links[link_name].visuals[0].geometry
self.assertEqual(geometry.get_type(), 'cylinder')
self.assertEqual(geometry.get_param('radius'), radius)
self.assertEqual(geometry.get_param('length'), length)
# Test collision element
self.assertEqual(len(models[0].links[link_name].collisions), 1)
collision = models[0].links[link_name].get_collision_by_name(
'collision')
tags = ['mu', 'mu2', 'slip1', 'slip2', 'fdir1']
for tag in tags:
self.assertEqual(
collision.get_ode_friction_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['friction', 'friction2', 'rolling_friction', 'fdir1']
for tag in tags:
self.assertEqual(
collision.get_bullet_friction_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['soft_cfm', 'soft_erp', 'kp', 'kd', 'max_vel', 'min_depth']
for tag in tags:
self.assertEqual(
collision.get_ode_contact_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['soft_cfm', 'soft_erp', 'kp', 'kd', 'split_impulse',
'split_impulse_penetration_threshold']
for tag in tags:
self.assertEqual(
collision.get_bullet_contact_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['restitution_coefficient', 'threshold']
for tag in tags:
self.assertEqual(
collision.get_bounce_param(tag),
model_config[0]['args']['collision_parameters'][tag])
geometry = models[0].links[link_name].collisions[0].geometry
self.assertEqual(geometry.get_type(), 'cylinder')
self.assertEqual(geometry.get_param('radius'), radius)
self.assertEqual(geometry.get_param('length'), length)
# Test color exists
material = models[0].links[link_name].visuals[0].material
self.assertIsNotNone(material)
self.assertIsNotNone(material.ambient)
self.assertIsNotNone(material.diffuse)
if not isinstance(
color, str) and isinstance(
color, list) and color is not None:
self.assertEqual(material.ambient.value, color)
self.assertEqual(material.diffuse.value, color)
def test_dynamic_cylinder_model(self):
for color in _get_colors():
name = generate_random_string(3)
pose = [random.rand() for _ in range(6)]
radius = random.rand()
length = random.rand()
mass = random.rand()
model_config = [
dict(
type='cylinder',
args=dict(
radius=radius,
length=length,
mass=mass,
name=name,
pose=pose,
color=color,
collision_parameters=dict(
mu=random.uniform(0, 10),
mu2=random.uniform(0, 10),
friction=random.uniform(0, 10),
friction2=random.uniform(0, 10),
slip1=random.uniform(0, 1),
slip2=random.uniform(0, 1),
rolling_friction=random.uniform(0, 1),
fdir1=[0, 0, 0],
max_contacts=1,
soft_cfm=random.uniform(0, 10),
soft_erp=random.uniform(0, 10),
kp=random.uniform(0, 100000),
kd=random.uniform(0, 10),
max_vel=random.uniform(0, 0.1),
min_depth=random.uniform(0, 0.1),
split_impulse=False,
split_impulse_penetration_threshold=-0.01,
restitution_coefficient=random.uniform(0, 1),
threshold=random.uniform(0, 1)
)
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), 1)
self.assertIsInstance(models[0], SimulationModel)
# Test pose of the model
self.assertEqual(models[0].pose.position.tolist(), pose[0:3])
q = Pose.rpy2quat(*pose[3::])
diff = Pose.get_transform(models[0].pose.quat, q)
# Test model properties
self.assertAlmostEqual(np.sum(diff[0:3]), 0)
self.assertFalse(models[0].static)
self.assertEqual(len(models[0].links), 1)
link_name = models[0].link_names[0]
# Test link properties
link = models[0].links[link_name]
self.assertEqual(link.inertial.mass, mass)
self.assertAlmostEqual(
link.inertial.ixx,
1. / 12 * mass * (3 * radius**2 + length**2))
self.assertAlmostEqual(
link.inertial.iyy,
1. / 12 * mass * (3 * radius**2 + length**2))
self.assertAlmostEqual(link.inertial.izz, 0.5 * mass * radius**2)
self.assertEqual(link.inertial.ixy, 0)
self.assertEqual(link.inertial.ixz, 0)
self.assertEqual(link.inertial.iyz, 0)
# Test visual element
self.assertEqual(len(models[0].links[link_name].visuals), 1)
geometry = models[0].links[link_name].visuals[0].geometry
self.assertEqual(geometry.get_type(), 'cylinder')
self.assertEqual(geometry.get_param('radius'), radius)
self.assertEqual(geometry.get_param('length'), length)
# Test collision element
self.assertEqual(len(models[0].links[link_name].collisions), 1)
collision = models[0].links[link_name].get_collision_by_name(
'collision')
tags = ['mu', 'mu2', 'slip1', 'slip2', 'fdir1']
for tag in tags:
self.assertEqual(
collision.get_ode_friction_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['friction', 'friction2', 'rolling_friction', 'fdir1']
for tag in tags:
self.assertEqual(
collision.get_bullet_friction_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['soft_cfm', 'soft_erp', 'kp', 'kd', 'max_vel', 'min_depth']
for tag in tags:
self.assertEqual(
collision.get_ode_contact_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['soft_cfm', 'soft_erp', 'kp', 'kd', 'split_impulse',
'split_impulse_penetration_threshold']
for tag in tags:
self.assertEqual(
collision.get_bullet_contact_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['restitution_coefficient', 'threshold']
for tag in tags:
self.assertEqual(
collision.get_bounce_param(tag),
model_config[0]['args']['collision_parameters'][tag])
geometry = models[0].links[link_name].collisions[0].geometry
self.assertEqual(geometry.get_type(), 'cylinder')
self.assertEqual(geometry.get_param('radius'), radius)
self.assertEqual(geometry.get_param('length'), length)
# Test color exists
material = models[0].links[link_name].visuals[0].material
self.assertIsNotNone(material)
self.assertIsNotNone(material.ambient)
self.assertIsNotNone(material.diffuse)
if not isinstance(
color, str) and isinstance(
color, list) and color is not None:
self.assertEqual(material.ambient.value, color)
self.assertEqual(material.diffuse.value, color)
def test_cylinder_factory_fixed_args_with_permutation(self):
for color in _get_colors():
n_radius = random.randint(2, 4)
n_length = random.randint(2, 4)
n_masses = random.randint(2, 4)
name = generate_random_string(3)
radius = [random.rand() for _ in range(n_radius)]
length = [random.rand() for _ in range(n_length)]
pose = [random.rand() for _ in range(6)]
masses = [random.rand() for _ in range(n_masses)]
model_config = [
dict(
type='cylinder_factory',
args=dict(
name=name,
radius=radius,
length=length,
mass=masses,
pose=pose,
use_permutation=True,
color=color
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), n_radius * n_length * n_masses)
for i in range(len(models)):
# Check the name generator with counter
self.assertIn(name + '_', models[i].name)
self.assertTrue(models[i].name.split('_')[-1].isdigit())
self.assertIsInstance(models[i], SimulationModel)
# Test pose of the model
self.assertEqual(models[i].pose.position.tolist(), pose[0:3])
q = Pose.rpy2quat(*pose[3::])
diff = Pose.get_transform(models[i].pose.quat, q)
# Test model properties
self.assertAlmostEqual(np.sum(diff[0:3]), 0)
self.assertFalse(models[i].static)
self.assertEqual(len(models[i].links), 1)
link_name = models[i].link_names[0]
self.assertIn(models[i].links[link_name].inertial.mass, masses)
# Test visual element
self.assertEqual(len(models[i].links[link_name].visuals), 1)
geometry = models[i].links[link_name].visuals[0].geometry
self.assertEqual(geometry.get_type(), 'cylinder')
self.assertIn(geometry.get_param('radius'), radius)
self.assertIn(geometry.get_param('length'), length)
# Test collision element
self.assertEqual(len(models[i].links[link_name].collisions), 1)
geometry = models[i].links[link_name].collisions[0].geometry
self.assertEqual(geometry.get_type(), 'cylinder')
self.assertIn(geometry.get_param('radius'), radius)
self.assertIn(geometry.get_param('length'), length)
# Test color exists
material = models[i].links[link_name].visuals[0].material
self.assertIsNotNone(material)
self.assertIsNotNone(material.ambient)
self.assertIsNotNone(material.diffuse)
if not isinstance(
color, str) and isinstance(
color, list) and color is not None:
self.assertEqual(material.ambient.value, color)
self.assertEqual(material.diffuse.value, color)
def test_cylinder_factory_fixed_args_no_permutation(self):
for color in _get_colors():
n_models = random.randint(2, 4)
name = generate_random_string(3)
radius = [random.rand() for _ in range(n_models)]
length = [random.rand() for _ in range(n_models)]
pose = [random.rand() for _ in range(6)]
masses = [random.rand() for _ in range(n_models)]
model_config = [
dict(
type='cylinder_factory',
args=dict(
name=name,
radius=radius,
length=length,
mass=masses,
pose=pose,
use_permutation=False,
color=color
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), n_models)
for i in range(len(models)):
# Check the name generator with counter
self.assertIn(name + '_', models[i].name)
self.assertTrue(models[i].name.split('_')[-1].isdigit())
self.assertIsInstance(models[i], SimulationModel)
# Test pose of the model
self.assertEqual(models[i].pose.position.tolist(), pose[0:3])
q = Pose.rpy2quat(*pose[3::])
diff = Pose.get_transform(models[i].pose.quat, q)
# Test model properties
self.assertAlmostEqual(np.sum(diff[0:3]), 0)
self.assertFalse(models[i].static)
self.assertEqual(len(models[i].links), 1)
link_name = models[i].link_names[0]
self.assertIn(models[i].links[link_name].inertial.mass, masses)
# Test visual element
self.assertEqual(len(models[i].links[link_name].visuals), 1)
geometry = models[i].links[link_name].visuals[0].geometry
self.assertEqual(geometry.get_type(), 'cylinder')
self.assertIn(geometry.get_param('radius'), radius)
self.assertIn(geometry.get_param('length'), length)
# Test collision element
self.assertEqual(len(models[i].links[link_name].collisions), 1)
geometry = models[i].links[link_name].collisions[0].geometry
self.assertEqual(geometry.get_type(), 'cylinder')
self.assertIn(geometry.get_param('radius'), radius)
self.assertIn(geometry.get_param('length'), length)
# Test color exists
material = models[i].links[link_name].visuals[0].material
self.assertIsNotNone(material)
self.assertIsNotNone(material.ambient)
self.assertIsNotNone(material.diffuse)
if not isinstance(
color, str) and isinstance(
color, list) and color is not None:
self.assertEqual(material.ambient.value, color)
self.assertEqual(material.diffuse.value, color)
def test_cylinder_factory_lambda_args_with_permutation(self):
for color in _get_colors():
n_radius = random.randint(2, 4)
n_length = random.randint(2, 4)
n_masses = random.randint(2, 4)
name = generate_random_string(3)
radius = "__import__('numpy').random.random({})".format(n_radius)
length = "__import__('numpy').linspace(1, 10, {})".format(n_length)
masses = "__import__('numpy').linspace(1, 10, {})".format(n_masses)
pose = [random.rand() for _ in range(6)]
model_config = [
dict(
type='cylinder_factory',
args=dict(
name=name,
radius=radius,
length=length,
mass=masses,
pose=pose,
use_permutation=True,
color=color
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), n_radius * n_length * n_masses)
for i in range(len(models)):
# Check the name generator with counter
self.assertIn(name + '_', models[i].name)
self.assertTrue(models[i].name.split('_')[-1].isdigit())
self.assertIsInstance(models[i], SimulationModel)
# Test pose of the model
self.assertEqual(models[i].pose.position.tolist(), pose[0:3])
q = Pose.rpy2quat(*pose[3::])
diff = Pose.get_transform(models[i].pose.quat, q)
# Test model properties
self.assertAlmostEqual(np.sum(diff[0:3]), 0)
self.assertFalse(models[i].static)
self.assertEqual(len(models[i].links), 1)
link_name = models[i].link_names[0]
# Test visual element
self.assertEqual(len(models[i].links[link_name].visuals), 1)
geometry = models[i].links[link_name].visuals[0].geometry
self.assertEqual(geometry.get_type(), 'cylinder')
# Test collision element
self.assertEqual(len(models[i].links[link_name].collisions), 1)
geometry = models[i].links[link_name].collisions[0].geometry
self.assertEqual(geometry.get_type(), 'cylinder')
# Test color exists
material = models[i].links[link_name].visuals[0].material
self.assertIsNotNone(material)
self.assertIsNotNone(material.ambient)
self.assertIsNotNone(material.diffuse)
if not isinstance(
color, str) and isinstance(
color, list) and color is not None:
self.assertEqual(material.ambient.value, color)
self.assertEqual(material.diffuse.value, color)
def test_cylinder_forced_permutation(self):
n_radius = 1
n_masses = 2
n_length = 3
name = generate_random_string(3)
# Test with lambda functions
radius = "__import__('numpy').random.random({})".format(n_radius)
length = "__import__('numpy').random.random({})".format(n_length)
masses = "__import__('numpy').linspace(1, 10, {})".format(n_masses)
pose = [random.rand() for _ in range(6)]
model_config = [
dict(
type='cylinder_factory',
args=dict(
name=name,
radius=radius,
length=length,
mass=masses,
pose=pose,
use_permutation=False,
color=None
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), n_radius * n_length * n_masses)
# Test with fixed arguments
radius = [random.rand() for _ in range(n_radius)]
length = [random.rand() for _ in range(n_length)]
masses = [random.rand() for _ in range(n_masses)]
pose = [random.rand() for _ in range(6)]
model_config = [
dict(
type='cylinder_factory',
args=dict(
name=name,
radius=radius,
length=length,
mass=masses,
pose=pose,
use_permutation=False,
color=None
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), n_radius * n_length * n_masses)
def test_static_sphere_model(self):
for color in _get_colors():
name = generate_random_string(3)
pose = [random.rand() for _ in range(6)]
radius = random.rand()
model_config = [
dict(
type='sphere',
args=dict(
radius=radius,
name=name,
pose=pose,
color=color,
collision_parameters=dict(
mu=random.uniform(0, 10),
mu2=random.uniform(0, 10),
friction=random.uniform(0, 10),
friction2=random.uniform(0, 10),
slip1=random.uniform(0, 1),
slip2=random.uniform(0, 1),
rolling_friction=random.uniform(0, 1),
fdir1=[0, 0, 0],
max_contacts=1,
soft_cfm=random.uniform(0, 10),
soft_erp=random.uniform(0, 10),
kp=random.uniform(0, 100000),
kd=random.uniform(0, 10),
max_vel=random.uniform(0, 0.1),
min_depth=random.uniform(0, 0.1),
split_impulse=False,
split_impulse_penetration_threshold=-0.01,
restitution_coefficient=random.uniform(0, 1),
threshold=random.uniform(0, 1)
)
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), 1)
self.assertIsInstance(models[0], SimulationModel)
# Test pose of the model
self.assertEqual(models[0].pose.position.tolist(), pose[0:3])
q = Pose.rpy2quat(*pose[3::])
diff = Pose.get_transform(models[0].pose.quat, q)
# Test model properties
self.assertAlmostEqual(np.sum(diff[0:3]), 0)
self.assertTrue(models[0].static)
self.assertEqual(len(models[0].links), 1)
link_name = models[0].link_names[0]
# Test visual element
self.assertEqual(len(models[0].links[link_name].visuals), 1)
geometry = models[0].links[link_name].visuals[0].geometry
self.assertEqual(geometry.get_type(), 'sphere')
self.assertEqual(geometry.get_param('radius'), radius)
# Test collision element
self.assertEqual(len(models[0].links[link_name].collisions), 1)
collision = models[0].links[link_name].get_collision_by_name(
'collision')
tags = ['mu', 'mu2', 'slip1', 'slip2', 'fdir1']
for tag in tags:
self.assertEqual(
collision.get_ode_friction_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['friction', 'friction2', 'rolling_friction', 'fdir1']
for tag in tags:
self.assertEqual(
collision.get_bullet_friction_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['soft_cfm', 'soft_erp', 'kp', 'kd', 'max_vel', 'min_depth']
for tag in tags:
self.assertEqual(
collision.get_ode_contact_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['soft_cfm', 'soft_erp', 'kp', 'kd', 'split_impulse',
'split_impulse_penetration_threshold']
for tag in tags:
self.assertEqual(
collision.get_bullet_contact_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['restitution_coefficient', 'threshold']
for tag in tags:
self.assertEqual(
collision.get_bounce_param(tag),
model_config[0]['args']['collision_parameters'][tag])
geometry = models[0].links[link_name].collisions[0].geometry
self.assertEqual(geometry.get_type(), 'sphere')
self.assertEqual(geometry.get_param('radius'), radius)
# Test color exists
material = models[0].links[link_name].visuals[0].material
self.assertIsNotNone(material)
self.assertIsNotNone(material.ambient)
self.assertIsNotNone(material.diffuse)
if not isinstance(
color, str) and isinstance(
color, list) and color is not None:
self.assertEqual(material.ambient.value, color)
self.assertEqual(material.diffuse.value, color)
def test_dynamic_sphere_model(self):
for color in _get_colors():
name = generate_random_string(3)
pose = [random.rand() for _ in range(6)]
radius = random.rand()
mass = random.rand()
model_config = [
dict(
type='sphere',
args=dict(
radius=radius,
mass=mass,
name=name,
pose=pose,
color=color,
collision_parameters=dict(
mu=random.uniform(0, 10),
mu2=random.uniform(0, 10),
friction=random.uniform(0, 10),
friction2=random.uniform(0, 10),
slip1=random.uniform(0, 1),
slip2=random.uniform(0, 1),
rolling_friction=random.uniform(0, 1),
fdir1=[0, 0, 0],
max_contacts=1,
soft_cfm=random.uniform(0, 10),
soft_erp=random.uniform(0, 10),
kp=random.uniform(0, 100000),
kd=random.uniform(0, 10),
max_vel=random.uniform(0, 0.1),
min_depth=random.uniform(0, 0.1),
split_impulse=True,
split_impulse_penetration_threshold=-0.01,
restitution_coefficient=random.uniform(0, 1),
threshold=random.uniform(0, 1)
)
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), 1)
self.assertIsInstance(models[0], SimulationModel)
# Test pose of the model
self.assertEqual(models[0].pose.position.tolist(), pose[0:3])
q = Pose.rpy2quat(*pose[3::])
diff = Pose.get_transform(models[0].pose.quat, q)
# Test model properties
self.assertAlmostEqual(np.sum(diff[0:3]), 0)
self.assertFalse(models[0].static)
self.assertEqual(len(models[0].links), 1)
link_name = models[0].link_names[0]
# Test link properties
link = models[0].links[link_name]
self.assertEqual(link.inertial.mass, mass)
inertia = 2. / 5 * mass * radius**2
self.assertAlmostEqual(link.inertial.ixx, inertia)
self.assertAlmostEqual(link.inertial.iyy, inertia)
self.assertAlmostEqual(link.inertial.izz, inertia)
self.assertEqual(link.inertial.ixy, 0)
self.assertEqual(link.inertial.ixz, 0)
self.assertEqual(link.inertial.iyz, 0)
# Test visual element
self.assertEqual(len(models[0].links[link_name].visuals), 1)
geometry = models[0].links[link_name].visuals[0].geometry
self.assertEqual(geometry.get_type(), 'sphere')
self.assertEqual(geometry.get_param('radius'), radius)
# Test collision element
self.assertEqual(len(models[0].links[link_name].collisions), 1)
collision = models[0].links[link_name].get_collision_by_name(
'collision')
tags = ['mu', 'mu2', 'slip1', 'slip2', 'fdir1']
for tag in tags:
self.assertEqual(
collision.get_ode_friction_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['friction', 'friction2', 'rolling_friction', 'fdir1']
for tag in tags:
self.assertEqual(
collision.get_bullet_friction_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['soft_cfm', 'soft_erp', 'kp', 'kd', 'max_vel', 'min_depth']
for tag in tags:
self.assertEqual(
collision.get_ode_contact_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['soft_cfm', 'soft_erp', 'kp', 'kd', 'split_impulse',
'split_impulse_penetration_threshold']
for tag in tags:
self.assertEqual(
collision.get_bullet_contact_param(tag),
model_config[0]['args']['collision_parameters'][tag])
tags = ['restitution_coefficient', 'threshold']
for tag in tags:
self.assertEqual(
collision.get_bounce_param(tag),
model_config[0]['args']['collision_parameters'][tag])
geometry = models[0].links[link_name].collisions[0].geometry
self.assertEqual(geometry.get_type(), 'sphere')
self.assertEqual(geometry.get_param('radius'), radius)
# Test color exists
material = models[0].links[link_name].visuals[0].material
self.assertIsNotNone(material)
self.assertIsNotNone(material.ambient)
self.assertIsNotNone(material.diffuse)
if not isinstance(
color, str) and isinstance(
color, list) and color is not None:
self.assertEqual(material.ambient.value, color)
self.assertEqual(material.diffuse.value, color)
def test_sphere_factory_fixed_args_with_permutation(self):
for color in _get_colors():
n_radius = random.randint(2, 4)
n_masses = random.randint(2, 4)
name = generate_random_string(3)
radius = [random.rand() for _ in range(n_radius)]
pose = [random.rand() for _ in range(6)]
masses = [random.rand() for _ in range(n_masses)]
model_config = [
dict(
type='sphere_factory',
args=dict(
name=name,
radius=radius,
mass=masses,
pose=pose,
use_permutation=True,
color=color
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), n_radius * n_masses)
for i in range(len(models)):
# Check the name generator with counter
self.assertIn(name + '_', models[i].name)
self.assertTrue(models[i].name.split('_')[-1].isdigit())
self.assertIsInstance(models[i], SimulationModel)
# Test pose of the model
self.assertEqual(models[i].pose.position.tolist(), pose[0:3])
q = Pose.rpy2quat(*pose[3::])
diff = Pose.get_transform(models[i].pose.quat, q)
# Test model properties
self.assertAlmostEqual(np.sum(diff[0:3]), 0)
self.assertFalse(models[i].static)
self.assertEqual(len(models[i].links), 1)
link_name = models[i].link_names[0]
self.assertIn(models[i].links[link_name].inertial.mass, masses)
# Test visual element
self.assertEqual(len(models[i].links[link_name].visuals), 1)
geometry = models[i].links[link_name].visuals[0].geometry
self.assertEqual(geometry.get_type(), 'sphere')
self.assertIn(geometry.get_param('radius'), radius)
# Test collision element
self.assertEqual(len(models[i].links[link_name].collisions), 1)
geometry = models[i].links[link_name].collisions[0].geometry
self.assertEqual(geometry.get_type(), 'sphere')
self.assertIn(geometry.get_param('radius'), radius)
# Test color exists
material = models[i].links[link_name].visuals[0].material
self.assertIsNotNone(material)
self.assertIsNotNone(material.ambient)
self.assertIsNotNone(material.diffuse)
if not isinstance(
color, str) and isinstance(
color, list) and color is not None:
self.assertEqual(material.ambient.value, color)
self.assertEqual(material.diffuse.value, color)
def test_sphere_factory_fixed_args_no_permutation(self):
for color in _get_colors():
n_models = random.randint(2, 4)
name = generate_random_string(3)
radius = [random.rand() for _ in range(n_models)]
pose = [random.rand() for _ in range(6)]
masses = [random.rand() for _ in range(n_models)]
model_config = [
dict(
type='sphere_factory',
args=dict(
name=name,
radius=radius,
mass=masses,
pose=pose,
use_permutation=False,
color=color
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), n_models)
for i in range(len(models)):
# Check the name generator with counter
self.assertIn(name + '_', models[i].name)
self.assertTrue(models[i].name.split('_')[-1].isdigit())
self.assertIsInstance(models[i], SimulationModel)
# Test pose of the model
self.assertEqual(models[i].pose.position.tolist(), pose[0:3])
q = Pose.rpy2quat(*pose[3::])
diff = Pose.get_transform(models[i].pose.quat, q)
# Test model properties
self.assertAlmostEqual(np.sum(diff[0:3]), 0)
self.assertFalse(models[i].static)
self.assertEqual(len(models[i].links), 1)
link_name = models[i].link_names[0]
self.assertIn(models[i].links[link_name].inertial.mass, masses)
# Test visual element
self.assertEqual(len(models[i].links[link_name].visuals), 1)
geometry = models[i].links[link_name].visuals[0].geometry
self.assertEqual(geometry.get_type(), 'sphere')
self.assertIn(geometry.get_param('radius'), radius)
# Test collision element
self.assertEqual(len(models[i].links[link_name].collisions), 1)
geometry = models[i].links[link_name].collisions[0].geometry
self.assertEqual(geometry.get_type(), 'sphere')
self.assertIn(geometry.get_param('radius'), radius)
# Test color exists
material = models[i].links[link_name].visuals[0].material
self.assertIsNotNone(material)
self.assertIsNotNone(material.ambient)
self.assertIsNotNone(material.diffuse)
if not isinstance(
color, str) and isinstance(
color, list) and color is not None:
self.assertEqual(material.ambient.value, color)
self.assertEqual(material.diffuse.value, color)
def test_sphere_factory_lambda_args_with_permutation(self):
for color in _get_colors():
n_radius = random.randint(2, 4)
n_masses = random.randint(2, 4)
name = generate_random_string(3)
radius = "__import__('numpy').random.random({})".format(n_radius)
masses = "__import__('numpy').linspace(1, 10, {})".format(n_masses)
pose = [random.rand() for _ in range(6)]
model_config = [
dict(
type='sphere_factory',
args=dict(
name=name,
radius=radius,
mass=masses,
pose=pose,
use_permutation=True,
color=color
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), n_radius * n_masses)
for i in range(len(models)):
# Check the name generator with counter
self.assertIn(name + '_', models[i].name)
self.assertTrue(models[i].name.split('_')[-1].isdigit())
self.assertIsInstance(models[i], SimulationModel)
# Test pose of the model
self.assertEqual(models[i].pose.position.tolist(), pose[0:3])
q = Pose.rpy2quat(*pose[3::])
diff = Pose.get_transform(models[i].pose.quat, q)
# Test model properties
self.assertAlmostEqual(np.sum(diff[0:3]), 0)
self.assertFalse(models[i].static)
self.assertEqual(len(models[i].links), 1)
link_name = models[i].link_names[0]
# Test visual element
self.assertEqual(len(models[i].links[link_name].visuals), 1)
geometry = models[i].links[link_name].visuals[0].geometry
self.assertEqual(geometry.get_type(), 'sphere')
# Test collision element
self.assertEqual(len(models[i].links[link_name].collisions), 1)
geometry = models[i].links[link_name].collisions[0].geometry
self.assertEqual(geometry.get_type(), 'sphere')
# Test color exists
material = models[i].links[link_name].visuals[0].material
self.assertIsNotNone(material)
self.assertIsNotNone(material.ambient)
self.assertIsNotNone(material.diffuse)
if not isinstance(
color, str) and isinstance(
color, list) and color is not None:
self.assertEqual(material.ambient.value, color)
self.assertEqual(material.diffuse.value, color)
def test_sphere_forced_permutation(self):
n_radius = random.randint(2, 3)
n_masses = 2 * n_radius
name = generate_random_string(3)
# Test with lambda functions
radius = "__import__('numpy').random.random({})".format(n_radius)
masses = "__import__('numpy').linspace(1, 10, {})".format(n_masses)
pose = [random.rand() for _ in range(6)]
model_config = [
dict(
type='sphere_factory',
args=dict(
name=name,
radius=radius,
mass=masses,
pose=pose,
use_permutation=False,
color=None
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), n_radius * n_masses)
# Test with fixed arguments
radius = [random.rand() for _ in range(n_radius)]
masses = [random.rand() for _ in range(n_masses)]
pose = [random.rand() for _ in range(6)]
model_config = [
dict(
type='sphere_factory',
args=dict(
name=name,
radius=radius,
mass=masses,
pose=pose,
use_permutation=False,
color=None
))
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), n_radius * n_masses)
def test_extrude_polygon(self):
# Create mesh by extruding a polygon
vertices = [(0, 0), (0, 2), (2, 2), (2, 0), (0, 0)]
poly = Polygon(vertices)
name = generate_random_string(3)
pose = [random.rand() for _ in range(6)]
mass = random.rand()
height = random.rand()
model_config = [
dict(
type='extrude',
args=dict(
polygon=poly,
name=name,
mass=mass,
height=height,
pose=pose,
color=None
)
)
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), 1)
for model in models:
_delete_generated_meshes(model.to_sdf())
# Extrude only the boundaries
cap_style = ['round', 'flat', 'square']
join_style = ['round', 'mitre', 'bevel']
for cs in cap_style:
for js in join_style:
model_config = [
dict(
type='extrude',
args=dict(
polygon=poly,
name=name,
mass=mass,
height=height,
pose=pose,
color=None,
extrude_boundaries=True,
thickness=random.rand(),
cap_style=cs,
join_style=js
)
)
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), 1)
for model in models:
_delete_generated_meshes(model.to_sdf())
# Create a mesh by dilating point
vertices = [(random.rand() * 5, random.rand() * 5)]
poly = MultiPoint(vertices)
name = generate_random_string(3)
pose = [random.rand() for _ in range(6)]
mass = random.rand()
height = random.rand()
model_config = [
dict(
type='extrude',
args=dict(
polygon=poly,
name=name,
mass=mass,
height=height,
pose=pose,
color=None,
thickness=random.rand()
)
)
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), 1)
for model in models:
_delete_generated_meshes(model.to_sdf())
# Create a mesh by dilating a line
vertices = [(random.rand() * 5, random.rand() * 5)
for _ in range(5)]
poly = LineString(vertices)
name = generate_random_string(3)
pose = [random.rand() for _ in range(6)]
mass = random.rand()
height = random.rand()
for cs in cap_style:
for js in join_style:
model_config = [
dict(
type='extrude',
args=dict(
polygon=poly,
name=name,
mass=mass,
height=height,
pose=pose,
color=None,
cap_style=cs,
join_style=js,
thickness=random.rand()
)
)
]
models = create_models_from_config(model_config)
self.assertEqual(len(models), 1)
for model in models:
_delete_generated_meshes(model.to_sdf())
def test_invalid_polygon_extrude_inputs(self):
vertices = [(random.rand() * 5, random.rand() * 5)]
model_config = [
dict(
type='extrude',
args=dict(
polygon=MultiPoint(vertices),
thickness=0,
height=random.rand()
)
)
]
with self.assertRaises(AssertionError):
create_models_from_config(model_config)
def test_export_to_gazebo_model(self):
# Create mesh by extruding a polygon
vertices = [(0, 0), (0, 2), (2, 2), (2, 0), (0, 0)]
poly = Polygon(vertices)
name = generate_random_string(3)
pose = [random.rand() for _ in range(6)]
mass = random.rand()
height = 10 * random.rand()
model_config = dict(
polygon=poly,
name=name,
mass=mass,
height=height,
pose=pose,
color=None
)
model = extrude(**model_config)
model_dir = model.to_gazebo_model()
self.assertTrue(os.path.isdir(model_dir))
shutil.rmtree(model_dir)
if __name__ == '__main__':
unittest.main()
| 41.705717 | 79 | 0.51342 |
015472fb9c391cf927e9fc40a91f9c791db806b8 | 3,646 | py | Python | scripts/server2ros.py | wjvanderlip/radbot_nuke | 7ea4bf049f4249ddbe033bd6453d80a4d6a604e2 | [
"MIT"
] | null | null | null | scripts/server2ros.py | wjvanderlip/radbot_nuke | 7ea4bf049f4249ddbe033bd6453d80a4d6a604e2 | [
"MIT"
] | null | null | null | scripts/server2ros.py | wjvanderlip/radbot_nuke | 7ea4bf049f4249ddbe033bd6453d80a4d6a604e2 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import rospy
import numpy as np
import pickle
import time
import socket
from rospy.numpy_msg import numpy_msg
from radbot_nuke.msg import detector_msg
rospy.init_node('detServer', anonymous=False)
pub = rospy.Publisher('/detector_data', detector_msg, queue_size=10)
def send_ros_msg(ts, adc, det_sn):
'''
takes output from client, packages it, and publishes as message
'''
msg = detector_msg()
now = time.time()
rnow = rospy.Time.now()
msg.header.stamp = rnow
msg.det_sn.data = det_sn
msg.detid = assign_det_num(det_sn)
msg.ts_sys = now
msg.ts_det = ts
msg.channel = adc
msg.event_rate = len(adc)*1.0
#process rate data if needed
pub.publish(msg)
print len(ts), len(adc), det_sn, " Published!!"
def assign_det_num(ser_no):
assignments = {'eRC4129':1, 'eRC4216':3, 'eRC4131':2, 'eRC4214':5, 'eRC4130':4}
return assignments[ser_no]
def process_rates(rates):
'''
If we start to hit the list mode buffer throughput, this parses
the "rate data" flag returned with each eMorpho buffer readout. Using the
live and dead time values you can assess higher count rate envs
https://www.bridgeportinstruments.com/products/mds/mds_doc/read_rates.php
'''
# old_acq_tme = 0
# old_ded_tme = 0
# old_dt = 0
# old_trig = 0
# old_event = 0
if rates[3] <= 0: #prevents dt frac from going to inf
rates[3] = 1
acq_tme = (rates[0] - old_acq_tme)
ded_tme = (rates[3] - old_ded_tme)
events = rates[1] - old_event
trigs = rates[2] - old_trig
active_time = (acq_tme * np.divide(65536, 80000000, dtype=np.float64)) # 16bit encoding, 80MHz, must use np.divide to return float64
msg.event_rate = events/active_time #event rate in seconds
msg.trig_rate = trigs/active_time #trigger rate in seconds
msg.true_rate = np.divide(trigs, active_time, dtype=np.float64) *(1 - np.divide(ded_tme, acq_tme, dtype=np.float64)) #true incident pulse rate i n seconds
old_acq_tme = rates[0]
old_ded_tme = rates[3]
old_event = rates[1]
old_trig = rates[2]
def start_server(port):
'''
Starts a server to constantly monior on some port for incoming data from one of the
multiprocessing subprocesses. The clinet will send a pickeled list consiting of the detectors
serial number, an array of event time stamps and an array of event ACD (channel) values
This data is then published on the /detector_data topic using the detector_msg message
'''
try:
serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
serv.bind(('0.0.0.0', port))
serv.listen(5)
buf = None
print "Waiting to receive connection..."
while True:
conn, addr = serv.accept()
while True:
data = conn.recv(4096)
if buf is None:
buf = data
else:
buf+=data
if '<-o->' in buf:
message = buf[:-len('<-o->')]
# PROCESS MESSAGE HERE
incoming_data = pickle.loads(message)
# print incoming_data[0], len(incoming_data[1]), len(incoming_data[2])
send_ros_msg(incoming_data[1], incoming_data[2], incoming_data[0]) #ts, acd, sn
buf = None
conn.close()
break
# print 'client disconnected'
except socket.error, exc:
print "Caught exception socket.error : %s" % exc
def main():
start_server(port=8082)
if __name__ == '__main__':
main()
| 33.145455 | 159 | 0.626166 |
01584c384f8f8eea463542d1db5fde7c51f316cc | 3,166 | py | Python | yubin/__init__.py | mritu301/NLP-Labeling | 403b06c25984646be9ed8f37c5777d32acc3dec1 | [
"MIT"
] | 3 | 2019-10-28T00:07:25.000Z | 2020-01-17T05:25:08.000Z | yubin/__init__.py | alvations/yubin | 403b06c25984646be9ed8f37c5777d32acc3dec1 | [
"MIT"
] | null | null | null | yubin/__init__.py | alvations/yubin | 403b06c25984646be9ed8f37c5777d32acc3dec1 | [
"MIT"
] | null | null | null | import re
from itertools import chain
from collections import defaultdict, namedtuple
import pandas as pd
from tqdm import tqdm
Address = namedtuple('Address', ['to', 'kai', 'ku', 'mune', 'chome', 'ban',
'go', 'postal', 'endgo', 'tokens'])
class Postman:
"""
Converts Japanese address in raw string to structured object.
"""
to_regex = r'(({})都)'.format('|'.join(['東京']))
ku_regex = r'#TO# (.*区(佃)?)'
postal_regex = r'((〒?[0-90-9]+([ー-‐-])){1,}([0-90-9]+))'
chome_ban_go = r'([0-90-9一⼀⼆三四五六七⼋九⼗ー]+([{}]+))' # Generic pattern for chome, ban and go.
chome_regex = chome_ban_go.format('丁目')
ban_regex = chome_ban_go.format('番街目')
go_regex = chome_ban_go.format('号棟室館')
endgo_regex = r'(([0-90-9一⼀⼆三四五六七⼋九⼗]+$))' # Number at the end.
ban_go_regex = ban_regex + go_regex + '?' # Not useful?
mune_regex = r'([0-90-9A-Z一⼀⼆三四五六七⼋九⼗]+([号]?[棟]))'
kai_regex = r'(([0-90-9ー]+|[一⼀⼆三四五六七⼋九⼗]+)[階F])'
JAPANESE_ADDRESS_REGEXES = [
(to_regex, ' #TO# '),
(kai_regex, ' #KAI# '),
(ku_regex, ' #KU# '),
(mune_regex, ' #MUNE# '),
(chome_regex, ' #CHOME# '), # Catch chome / ban / go individually.
(ban_regex, ' #BAN# '),
(go_regex, ' #GO# '),
(postal_regex, ' #POSTAL# '),
(endgo_regex, ' #ENDGO# ')
]
def __init__(self):
pass
def normalize(self, pattern, substitution, text):
matches = [match[0] for match in re.findall(pattern, text)]
for match in matches:
text = text.replace(match, substitution)
return matches, ' '.join(text.split()).strip()
def tokenize(self, addr, use_char=False, return_str=False):
"""
Returns an address object.
"""
# Go through the regex and keep the group matches.
regex_matches = defaultdict(list)
for pattern, substitute in self.JAPANESE_ADDRESS_REGEXES:
matches, addr = self.normalize(pattern, substitute, addr)
regex_matches[substitute.strip()] = matches
# Options to return different tokens.
if use_char: # Split the non-captured groups by characters.
tokens = list(chain(*[[token] if token.strip().startswith('#')
and token.strip().endswith('#')
else list(token)
for token in addr.split()]))
else: # Simply return the tokens split by spaces.
tokens = addr.split()
# Replace the placeholders back.
for k in regex_matches:
for v in regex_matches[k]:
tokens[tokens.index(k)] = v
address = Address(to=regex_matches['#TO#'], kai=regex_matches['#KAI#'],
ku=regex_matches['#KU#'], mune=regex_matches['#MUNE#'],
chome=regex_matches['#CHOME#'], ban=regex_matches['#BAN#'],
go=regex_matches['#GO#'], postal=regex_matches['#POSTAL#'],
endgo=regex_matches['#ENDGO#'], tokens=tokens)
return ' '.join(address.tokens) if return_str else address
| 40.075949 | 93 | 0.549905 |
015973c0cde41c6d39371ea08957e73b3b7deff2 | 476 | py | Python | setup.py | vmraid/vitalpbx | 3debc302763e53393ccb9610cb117a7d4872d59a | [
"MIT"
] | null | null | null | setup.py | vmraid/vitalpbx | 3debc302763e53393ccb9610cb117a7d4872d59a | [
"MIT"
] | null | null | null | setup.py | vmraid/vitalpbx | 3debc302763e53393ccb9610cb117a7d4872d59a | [
"MIT"
] | null | null | null | from setuptools import setup, find_packages
with open('requirements.txt') as f:
install_requires = f.read().strip().split('\n')
# get version from __version__ variable in vitalpbx/__init__.py
from vitalpbx import __version__ as version
setup(
name='vitalpbx',
version=version,
description='Something',
author='Someone',
author_email='someone@somewhere.com',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
install_requires=install_requires
)
| 23.8 | 63 | 0.781513 |
015a6e13245e57ce78b8a86522e4aae0bd1d03bc | 381 | py | Python | 1030.py | heltonricardo/URI | 160cca22d94aa667177c9ebf2a1c9864c5e55b41 | [
"MIT"
] | 6 | 2021-04-13T00:33:43.000Z | 2022-02-10T10:23:59.000Z | 1030.py | heltonricardo/URI | 160cca22d94aa667177c9ebf2a1c9864c5e55b41 | [
"MIT"
] | null | null | null | 1030.py | heltonricardo/URI | 160cca22d94aa667177c9ebf2a1c9864c5e55b41 | [
"MIT"
] | 3 | 2021-03-23T18:42:24.000Z | 2022-02-10T10:24:07.000Z | for g in range(int(input())):
n, k = [int(x) for x in input().split()]
v = [1 for x in range(1, n+1)]
m = 0
i = p = 1
while (m < n-1):
if v[i] == 1: p += 1
if p == k:
v[i] = 0
m += 1
p = 0
i += 1
if i == n: i = 0
i = 0
while v[i] == 0: i += 1
print('Case {}: {}'.format(g+1, i+1))
| 22.411765 | 44 | 0.333333 |
015a892c046052f40a511917cfd969000101e5c9 | 8,215 | py | Python | onionstudio.py | jarret/onionstudio | 5ebf0a75cf1e7960822c96a987668be5ed82aa41 | [
"MIT"
] | 11 | 2020-01-09T19:48:20.000Z | 2020-11-21T19:59:36.000Z | onionstudio.py | jarret/onionstudio | 5ebf0a75cf1e7960822c96a987668be5ed82aa41 | [
"MIT"
] | null | null | null | onionstudio.py | jarret/onionstudio | 5ebf0a75cf1e7960822c96a987668be5ed82aa41 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2020 Jarret Dyrbye
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php
import time
import sys
import json
import argparse
from twisted.internet import reactor
from twisted.internet.task import LoopingCall
from autobahn.twisted.websocket import WebSocketServerProtocol
from autobahn.twisted.websocket import WebSocketServerFactory
from txzmq import ZmqEndpoint, ZmqEndpointType
from txzmq import ZmqFactory
from txzmq import ZmqSubConnection
from bolt.util import h2b
from onionstudio.art_db import ArtDb
from onionstudio.compressor import compressor
from onionstudio.extension import Extension, PIXEL_TLV_TYPE
UNPAID_PRUNE_CHECK = 60
UNPAID_PRUNE_SECONDS = 120
###############################################################################
class AppClient(WebSocketServerProtocol):
def onConnect(self, request):
print("Client connecting: {0}".format(request.peer))
def onOpen(self):
print("WebSocket client connection open.")
self.server.clients.append(self)
art_bin = self.server.app.art_db.to_bin()
compressed_bin = compressor(art_bin)
print("compressed art bin len: %d" % len(compressed_bin))
self.sendMessage(compressed_bin, isBinary=True)
def onMessage(self, payload, isBinary):
print("got message?")
def onClose(self, wasClean, code, reason):
print("WebSocket connection closed: {0}".format(reason))
###############################################################################
class AppServer(WebSocketServerFactory):
def __init__(self, port, app):
ws_url = u"ws://0.0.0.0:%d" % port
super().__init__()
self.setProtocolOptions(openHandshakeTimeout=15, autoPingInterval=30,
autoPingTimeout=5)
self.protocol = AppClient
self.protocol.server = self
self.clients = []
print("listening on websocket %s" % ws_url)
reactor.listenTCP(port, self)
self.app = app
def echo_to_clients(self, pixels):
pixels = [{'x': p.x, 'y': p.y, 'rgb': p.rgb} for p in pixels]
message = {'pixels': pixels}
message = json.dumps(message)
print("echoing to clients: %s" % message)
for c in self.clients:
c.sendMessage(message.encode("utf8"))
###############################################################################
HTLC_ACCEPTED_TAG = "htlc_accepted".encode("utf8")
FORWARD_EVENT_TAG = "forward_event".encode("utf8")
class App(object):
def __init__(self, endpoint, mock_endpoint, port, art_db_dir):
self.endpoint = endpoint
self.mock_endpoint = mock_endpoint
self.port = port
self.art_db_dir = art_db_dir
self.unpaid_htlcs = {}
self.prune_loop = LoopingCall(self.prune_unpaid)
self.prune_loop.start(interval=UNPAID_PRUNE_CHECK, now=False)
###########################################################################
def setup_art_db(self):
self.art_db = ArtDb(self.art_db_dir)
###########################################################################
def setup_websocket(self):
self.ws_server = AppServer(self.port, self)
###########################################################################
def setup_zmq(self):
zmq_factory = ZmqFactory()
print("subscribing on: %s" % self.endpoint)
sub_endpoint = ZmqEndpoint(ZmqEndpointType.connect, self.endpoint)
sub_connection = ZmqSubConnection(zmq_factory, sub_endpoint)
sub_connection.gotMessage = self.zmq_message
sub_connection.subscribe(FORWARD_EVENT_TAG)
sub_connection.subscribe(HTLC_ACCEPTED_TAG)
print("subscribing on: %s" % self.mock_endpoint)
sub_mock_endpoint = ZmqEndpoint(ZmqEndpointType.connect,
self.mock_endpoint)
sub_mock_connection = ZmqSubConnection(zmq_factory, sub_mock_endpoint)
sub_mock_connection.gotMessage = self.zmq_message
sub_mock_connection.subscribe(FORWARD_EVENT_TAG)
sub_mock_connection.subscribe(HTLC_ACCEPTED_TAG)
def forward_event_message(self, message):
d = json.loads(message.decode('utf8'))['forward_event']
print("got %s" % json.dumps(d, indent=1))
if d['status'] != 'settled':
print("invoice is not settled")
return
if d['payment_hash'] in self.unpaid_htlcs.keys():
self.finish_htlc(d['payment_hash'])
def htlc_accepted_message(self, message):
d = json.loads(message.decode('utf8'))
payment_hash = d['htlc']['payment_hash']
amount = int(d['htlc']['amount'][:-4])
forward_amount = int(d['onion']['forward_amount'][:-4])
payload_hex = d['onion']['payload']
payload = h2b(payload_hex)
paid = amount - forward_amount
parsed_payload, err = Extension.parse(payload)
if err:
print("could not parse payload: %s" % err)
return
print("parsed payload %s" % parsed_payload)
pixels = parsed_payload['tlvs'][PIXEL_TLV_TYPE]['pixels']
if (amount - forward_amount) < len(pixels) * 1000:
print("forward fee not enough")
return
self.unpaid_htlcs[payment_hash] = {'payload_hex': payload_hex,
'recv_time': time.time()}
def finish_htlc(self, payment_hash):
payload_hex = self.unpaid_htlcs.pop(payment_hash)['payload_hex']
payload = h2b(payload_hex)
parsed_payload, err = Extension.parse(payload)
assert err is None, "could not parse the second time?"
pixels = parsed_payload['tlvs'][PIXEL_TLV_TYPE]['pixels']
self.art_db.record_pixels(payload_hex, pixels)
self.ws_server.echo_to_clients( pixels)
def zmq_message(self, message, tag):
if tag == FORWARD_EVENT_TAG:
self.forward_event_message(message)
elif tag == HTLC_ACCEPTED_TAG:
self.htlc_accepted_message(message)
else:
sys.exit("unknown tag: %s" % tag)
###########################################################################
def prune_unpaid(self):
now = time.time()
new = {k: v for k, v in self.unpaid_htlcs.items() if
(now - v['recv_time']) < UNPAID_PRUNE_SECONDS}
#print("pruning: %d to %d" % (len(self.unpaid_htlcs), len(new)))
self.unpaid_htlcs = new
###########################################################################
def run(self):
self.setup_websocket()
self.setup_zmq()
self.setup_art_db()
def stop(self):
self.art_db.unmap_art_bin()
###############################################################################
DEFAULT_WEBSOCKET_PORT = 9000
DEFAULT_ZMQ_SUBSCRIBE_ENDPOINT = "tcp://127.0.0.1:6666"
DEFAULT_MOCK_ZMQ_SUBSCRIBE_ENDPOINT = "tcp://127.0.0.1:5557"
DEFAULT_ART_DB_DIR = "/tmp/onionstudio/"
parser = argparse.ArgumentParser(prog="onionstudio.py")
parser.add_argument("-e", "--endpoint", type=str,
default=DEFAULT_ZMQ_SUBSCRIBE_ENDPOINT,
help="endpoint to subscribe to for zmq notifications from "
"c-lightning via cl-zmq.py plugin")
parser.add_argument("-m", "--mock-endpoint", type=str,
default=DEFAULT_MOCK_ZMQ_SUBSCRIBE_ENDPOINT,
help="endpoint to subscribe to zmq notifcations from a "
"test script such as mock-png.py")
parser.add_argument("-w", "--websocket-port", type=int,
default=DEFAULT_WEBSOCKET_PORT,
help="port to listen for incoming websocket connections")
parser.add_argument("-a", "--art-db-dir", type=str, default=DEFAULT_ART_DB_DIR,
help="directory to save the image state and logs")
settings = parser.parse_args()
a = App(settings.endpoint, settings.mock_endpoint, settings.websocket_port,
settings.art_db_dir)
a.run()
reactor.addSystemEventTrigger("before", "shutdown", a.stop)
reactor.run()
| 37.340909 | 79 | 0.597687 |
015a901776e9ca3956396eb5890b4a755eb6a932 | 4,078 | py | Python | mavsim_holodeck/rosflight_holodeck.py | sethmnielsen/mavsim_template_files | 453ec4f7d38fc2d1162198b554834b5bdb7de96f | [
"MIT"
] | null | null | null | mavsim_holodeck/rosflight_holodeck.py | sethmnielsen/mavsim_template_files | 453ec4f7d38fc2d1162198b554834b5bdb7de96f | [
"MIT"
] | null | null | null | mavsim_holodeck/rosflight_holodeck.py | sethmnielsen/mavsim_template_files | 453ec4f7d38fc2d1162198b554834b5bdb7de96f | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
from __future__ import print_function
from rosflight_holodeck_interface import ROSflightHolodeck
import numpy as np
import matplotlib.pyplot as plt
from tqdm import tqdm
import time
import holodeck
from holodeck import agents
from holodeck.environments import *
from holodeck import sensors
from IPython.core.debugger import Pdb
import os
import cv2
print( '\nHOLODECK PATH: {}\n'.format( os.path.dirname( holodeck.__file__ ) ) )
np.set_printoptions(precision=3, suppress=True, sign=' ', floatmode='fixed')
if __name__ == '__main__':
env = holodeck.make("Ocean")
RF = ROSflightHolodeck()
env.reset()
env.tick()
# wave intensity: 1-13(int), wave size: 1-8(int), wave direction: 0-360 degreese (float)
env.set_ocean_state(3, 3, 90)
env.set_aruco_code(False)
x0 = np.array([0, 0, 0, # position [0-2]
1, 0, 0, 0, # attitude [3-6]
0, 0, 0, # velocity [7-9]
0, 0, 0, # omega [10-12]
0, 0, 0], dtype=np.float64) # acc [13-15]
h0 = 41 # initial altitude [m]
x0[2] = -h0
# x0[0] = -34.5
RF.init()
RF.setState(x0)
RF.setTime(10)
uav_cmd = np.array([0, 0, 0, 0])
boat_cmd = 0
env.act("uav0", uav_cmd)
env.act("boat0", boat_cmd)
pos0 = x0[:3] * [100, 100, -100] # convert to cm, negate z
env.teleport("uav0", location=pos0, rotation=[0,0,0])
env.teleport("boat0", location=[-2000,0,0], rotation=[0,0,0])
frate = 30 # frame rate of camera/rendering [Hz]
simrate = 800 # rate of simulated dynamics [Hz]
n = simrate//frate # Number of sim iterations between frames
dt = 1.0/simrate
#'F' order because eigen matrices are column-major while numpy are row-major
# x_arr = np.zeros((16, n), order='F')
x = np.zeros(16)
t = np.zeros(n)
rf_outputs = np.zeros(4)
state = np.array([])
pos = np.zeros(3)
att = np.zeros(3)
ground = -0.1
collision = False
count = 0
while 1: # Main loop: 1 iteration = 1 rendered frame
if not collision:
for i in range(n): # Loop between frames (dynamics/control)
RF.run(dt)
time.sleep(dt)
RF.getState(x)
# Make sure mav doesn't fall through ground
if x[2] > ground and x[9] > 0:
# at ground level and not gaining altitude
x_ground = np.copy(x0)
x_ground[:3] = [x[0], x[1], ground]
att_eul = Quaternion2Euler(x[3:7])
ground_eul = np.array([0, 0, att_eul[2]])
x_ground[3:7] = Euler2Quaternion(ground_eul)
pos = x_ground[:3] * [100,100,-100]
att = ground_eul * 180/np.pi
RF.setState(x_ground)
state = env.set_state("uav0", pos, att, [0,0,0], [0,0,0])["uav0"]
else:
# format state for holodeck
pos = x[:3] * [100,100,-100] # cm, negate z to convert to LH frame
att = Quaternion2Euler(x[3:7]) * 180/np.pi
vel = x[7:10] * [100,100,-100]
angvel = np.copy(x[10:13])
state = env.set_state("uav0", pos, att, vel, angvel)["uav0"]
collision = state['CollisionSensor']
elif collision:
# Use position given by holodeck
state = env.tick()["uav0"]
x = np.copy(x0)
x[:3] = state['LocationSensor'] * [1,1,-1]
x[7:10] = state['VelocitySensor'] * [1,1,-1]
RF.setState(x)
for k in range(10):
RF.run(dt)
time.sleep(dt*(n/10))
RF.getState(x)
RF.getOutputs(rf_outputs)
if x[9] < 0:
# gaining altitude, switch back to RF dynamics
collision = False
# Show UAV's camera feed
# frame = state['RGBCamera']
# cv2.imshow('Camera', frame)
# cv2.waitKey(1)
# For debugging
RF.getState(x) | 34.268908 | 92 | 0.540216 |
015b648e126b7003dc4ab0b7712ea0d4e285061d | 6,762 | py | Python | projects/Graphing_and_DataEntry/graphing_calculator.py | DavidsonNext/WWW | cf486e641e19d0b8c3823cceafc5389f0b3d6bb7 | [
"Apache-2.0"
] | null | null | null | projects/Graphing_and_DataEntry/graphing_calculator.py | DavidsonNext/WWW | cf486e641e19d0b8c3823cceafc5389f0b3d6bb7 | [
"Apache-2.0"
] | null | null | null | projects/Graphing_and_DataEntry/graphing_calculator.py | DavidsonNext/WWW | cf486e641e19d0b8c3823cceafc5389f0b3d6bb7 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
from IPython.display import HTML
# <markdowncell>
# #Graphing Calculator - inspired by AP Calculus
# Graphing calculators are permitted for use during "calculator active" problems on AP exams. Here we attempt to provide open access to those functionalities required for the AP exam.
#
# https://apstudent.collegeboard.org/apcourse/ap-calculus-ab/calculator-policy
#
# * Plot the graph of a function within an arbitrary viewing window
# * Find the zeros of functions (solve equations numerically)
# * Numerically calculate the derivative of a function
# * Numerically calculate the value of a definite integral
# <codecell>
%%HTML
<!DOCTYPE html>
<html>
<head>
<style>
</style>
</head>
<body>
<div id="menu" style="width: 270px; float: left;">
<ul>
<input class="txtbox" type="text" id="inputF" placeholder="Type a function f(x)" value="x^3+x^2-6*x" font-size: "10px">
<input class="btn" type="button" value="Plot" onClick="plotter()">
<!--
<input class="btn" type="button" value="add tangent" onClick="addTangent()">
-->
<input class="btn" type="button" value="Add Derivative" onClick="plotDerivative()">
<input class="btn" type="button" value="Clear All" onClick="clearAll()">
</ul>
<br></br>
<ul>
<input class="txtbox" type="text" id="inputZstart" placeholder="Start value for zero search" font-size: "6">
<input class="btn" type="button" value="find Zero" onClick="findZeroes()">
</ul>
</div>
<div id='jxgbox' class='jxgbox' style='width:450px; height:350px; margin-left: 180px; border: solid #1f628d 1px;'></div>
<script type="text/javascript" src="https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/jsxgraph/0.98/jsxgraphcore.js"></script>
<script type='text/javascript'>
var initBoundingBox = [-11,11,11,-11];
var board = JXG.JSXGraph.initBoard('jxgbox', {boundingbox:initBoundingBox, axis:true, showCopyright:false});
var f, curve; // global objects
function plotter() {
var txtraw = document.getElementById('inputF').value;
f = board.jc.snippet(txtraw, true, 'x', true);
curve = board.create('functiongraph',[f,
function(){
var c = new JXG.Coords(JXG.COORDS_BY_SCREEN,[0,0],board);
return c.usrCoords[1];
},
function(){
var c = new JXG.Coords(JXG.COORDS_BY_SCREEN,[board.canvasWidth,0],board);
return c.usrCoords[1];
}
],
{name:txtraw, withLabel:false});
var q = board.create('glider', [2, 3, curve], {withLabel:false});
var t = board.create('text', [
function(){ return q.X()+0.2; },
function(){ return q.Y()+0.1; },
//function(){ return "The slope of the function f(x)=" + txtraw + "<br>at x=" + q.X().toFixed(2) + " is equal to " + (JXG.Math.Numerics.D(f))(q.X()).toFixed(2); }
//function(){ return "f(x)=" + txtraw + "<br>f(x=" + q.X().toFixed(2) + ") = " + f(q.X()).toFixed(2)}
function(){ return "f(x=" + q.X().toFixed(2) + ") = " + f(q.X()).toFixed(2)}
],
{fontSize:15});
}
function clearAll() {
JXG.JSXGraph.freeBoard(board);
board = JXG.JSXGraph.initBoard('jxgbox', {boundingbox:initBoundingBox, axis:true, showCopyright:false});
f = null;
curve = null;
}
function addTangent() {
if (JXG.isFunction(f)) {
board.suspendUpdate();
var p = board.create('glider',[1,0,curve], {name:'drag me'});
board.create('tangent',[p], {name:'drag me'});
board.unsuspendUpdate();
}
}
function plotDerivative() {
if (JXG.isFunction(f)) {
board.create('functiongraph',[JXG.Math.Numerics.D(f),
function(){
var c = new JXG.Coords(JXG.COORDS_BY_SCREEN,[0,0],board);
return c.usrCoords[1];
},
function(){
var c = new JXG.Coords(JXG.COORDS_BY_SCREEN,[board.canvasWidth,0],board);
return c.usrCoords[1];
}], {dash:2});
}
}
function isNumeric(num){
return !isNaN(num)
}
function findZeroes() {
var zeroraw = document.getElementById('inputZstart').value;
if (JXG.isFunction(f) && isNumeric(zeroraw)) {
board.suspendUpdate();
var zero = JXG.Math.Numerics.fzero(f,parseFloat(zeroraw));
var f_zero = f(zero);
var p = board.create('point',[zero,f_zero], {name:'f(x='+zero.toFixed(2)+')=0.0',
strokeColor:'gray', face:'<>',fixed:true});
//board.create('tangent',[p], {name:'drag me'});
board.unsuspendUpdate();
}
}
function findNumDerivative() {
var zeroraw = document.getElementById('inputNumDerivative').value;
if (JXG.isFunction(f) && isNumeric(zeroraw)) {
board.suspendUpdate();
var zero = JXG.Math.Numerics.fzero(f,parseFloat(zeroraw));
var f_zero = f(zero);
var p = board.create('point',[zero,f_zero], {name:'a zero of the function',
strokeColor:'gray', face:'<>'});
//board.create('tangent',[p], {name:'drag me'});
board.unsuspendUpdate();
}
}
</script>
</body>
</html>
# <markdowncell>
# ### No Grading setup at this time.
# <codecell>
# <codecell>
| 40.981818 | 184 | 0.484028 |
015c37dadd907a823b8a57e0e66db4c591cdd0d4 | 3,294 | py | Python | pyinsar/processing/discovery/coherence.py | MITeaps/pyinsar | 4d22e3ef90ef842d6b390074a8b5deedc7658a2b | [
"MIT"
] | 8 | 2019-03-15T19:51:27.000Z | 2022-02-16T07:27:36.000Z | pyinsar/processing/discovery/coherence.py | MITeaps/pyinsar | 4d22e3ef90ef842d6b390074a8b5deedc7658a2b | [
"MIT"
] | 1 | 2022-02-08T03:48:56.000Z | 2022-02-09T01:33:27.000Z | pyinsar/processing/discovery/coherence.py | MITeaps/pyinsar | 4d22e3ef90ef842d6b390074a8b5deedc7658a2b | [
"MIT"
] | 2 | 2021-01-12T05:32:21.000Z | 2021-01-13T08:35:26.000Z | # The MIT License (MIT)
# Copyright (c) 2017 Massachusetts Institute of Technology
#
# Authors: Cody Rude
# This software is part of the NSF DIBBS Project "An Infrastructure for
# Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
# NASA AIST Project "Computer-Aided Discovery of Earth Surface
# Deformation Phenomena" (PI: V. Pankratius)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Standard library imports
from collections import OrderedDict
# scikit discovery imports
from skdiscovery.data_structure.framework.base import PipelineItem
# Pyinsar imports
from pyinsar.processing.utilities.generic import coherence
# Scikit data access imports
from skdaccess.utilities.support import progress_bar
class Coherence(PipelineItem):
''' Calculate coherence between single-look complex SAR images '''
def __init__(self, str_description, window, pairing='neighbor', use_progress_bar = False):
'''
Initialize coherence pipeline item
@param str_description: Short string describing item
@param window: Tuple indicating the y and x window size
@param pairing: How to pair slc images. "neighbor" computes
coherence between neighboring images
@param use_progress_bar: Display progress using a progress bar
'''
self.window = window
self.pairing = pairing
self.use_progress_bar = use_progress_bar
super(Coherence, self).__init__(str_description,[])
def process(self, obj_data):
'''
Compute the coherency between two
@param obj_data: Data wrapper
'''
results_dict = OrderedDict()
if self.pairing == 'neighbor':
first_image_it = obj_data.getIterator()
second_image_it = obj_data.getIterator()
next(second_image_it)
for (label1, image1), (label2, image2) in progress_bar(zip(first_image_it, second_image_it),
total = len(obj_data)-1,
enabled = self.use_progress_bar):
results_dict[label2] = coherence(image1, image2, self.window)
obj_data.addResult(self.str_description, results_dict)
| 38.302326 | 104 | 0.697936 |
015dfc2575ba43d461c6f8e236abfb9df3bf731f | 997 | py | Python | conf.py | Kurento/doc-fiware | 053537edec34fff65e7044f0310ac8c539e011a7 | [
"Apache-2.0"
] | null | null | null | conf.py | Kurento/doc-fiware | 053537edec34fff65e7044f0310ac8c539e011a7 | [
"Apache-2.0"
] | 1 | 2018-11-22T12:48:37.000Z | 2018-11-22T12:48:37.000Z | conf.py | Kurento/doc-fiware | 053537edec34fff65e7044f0310ac8c539e011a7 | [
"Apache-2.0"
] | 3 | 2018-05-13T09:46:50.000Z | 2018-12-31T13:06:48.000Z | # -*- coding: utf-8 -*-
#
# on_rtd is whether we are on readthedocs.org
import os
import sys
sys.path.insert(0, os.path.abspath('.'))
sys.path.append(os.path.abspath('extensions'))
extensions = [
'sphinx.ext.graphviz',
'sphinx.ext.todo',
'wikipedia',
'examplecode'
]
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
html_theme_path = ['doc/themes']
html_theme = 'sphinx_rtd_theme'
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
# Using downloaded sphinx_rtd_theme
# import sphinx_rtd_theme
# html_theme = "sphinx_rtd_theme"
# html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
project = 'FIWARE-Stream-Oriented-GE'
master_doc = 'index'
html_context = {
'css_files': [
'_static/css/theme.css',
'https://fiware.org/style/fiware_readthedocs.css',
'https://fiware.org/style/fiware_readthedocs_media_streams.css'
]
}
| 24.925 | 82 | 0.706118 |
015e1ccb959f266ccad863b921198255b5930975 | 2,372 | py | Python | Day21/dice.py | squidbot/aoc2021 | aae5cf2017562150ec01d6cce5200ebc2b02f19d | [
"MIT"
] | null | null | null | Day21/dice.py | squidbot/aoc2021 | aae5cf2017562150ec01d6cce5200ebc2b02f19d | [
"MIT"
] | null | null | null | Day21/dice.py | squidbot/aoc2021 | aae5cf2017562150ec01d6cce5200ebc2b02f19d | [
"MIT"
] | null | null | null | test_input = [4, 8]
puzzle_input = [4, 9]
def part1():
position = puzzle_input
score = [0,0]
die_roll = 0
num_rolls = 0
cur_player = 0
while True:
die_total = 0
for _ in range(3):
num_rolls += 1
die_roll += 1
if die_roll > 100:
die_roll = 1
die_total += die_roll
for _ in range(die_total):
position[cur_player] += 1
if position[cur_player] > 10:
position[cur_player] = 1
score[cur_player] += position[cur_player]
if score[cur_player] >= 1000:
print("Game over, p1 score", score[0], "p2 score", score[1], "magic number", num_rolls * min(score[0],score[1]))
break
if cur_player == 0:
cur_player = 1
else:
cur_player = 0
#part1()
import collections
def part2():
position = puzzle_input
roll_dist = collections.Counter(i + j + k for i in range(1,4) for j in range(1,4) for k in range(1,4))
p1_wins = 0
p2_wins = 0
game_states = collections.defaultdict(int)
game_states[(position[0], 0, position[1], 0)] = 1
while game_states:
updated_game_states = collections.defaultdict(int)
for game_state in game_states:
for roll in roll_dist:
(pos1, score1, pos2, score2) = game_state
pos1 = (pos1 + roll - 1) % 10 + 1
score1 += pos1
if score1 >= 21:
p1_wins += game_states[game_state] * roll_dist[roll]
else:
updated_game_states[(pos1, score1, pos2, score2)] += game_states[game_state] * roll_dist[roll]
game_states = updated_game_states
updated_game_states = collections.defaultdict(int)
for game_state in game_states:
for roll in roll_dist:
(pos1, score1, pos2, score2) = game_state
pos2 = (pos2 + roll - 1) % 10 + 1
score2 += pos2
if score2 >= 21:
p2_wins += game_states[game_state] * roll_dist[roll]
else:
updated_game_states[(pos1, score1, pos2, score2)] += game_states[game_state] * roll_dist[roll]
game_states = updated_game_states
print("p1 wins", p1_wins, "p2 wins", p2_wins)
part2()
| 32.493151 | 124 | 0.544266 |
015f0c33210496779ba5007e68d3fe09693fe4a9 | 1,666 | py | Python | visualizeSIM.py | Jhko725/ProteinStructureReconstruction.jl | 18ec2f5a63e3c07d4498da363a8befc86e7ad68c | [
"MIT"
] | null | null | null | visualizeSIM.py | Jhko725/ProteinStructureReconstruction.jl | 18ec2f5a63e3c07d4498da363a8befc86e7ad68c | [
"MIT"
] | null | null | null | visualizeSIM.py | Jhko725/ProteinStructureReconstruction.jl | 18ec2f5a63e3c07d4498da363a8befc86e7ad68c | [
"MIT"
] | null | null | null | from typing import Optional
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.axis import Axis
from matplotlib.patches import Rectangle
from superresolution import SIM_3D_Data
def make_axis_if_none(ax: Optional[Axis]) -> Axis:
if ax is None:
_, ax = plt.subplots(1, 1, figsize = (10, 10))
return ax
def plot_overlay(SIM_image: SIM_3D_Data, plt_axis: Optional[Axis] = None, projection_dim: int = 0, **imshow_kwargs) -> Axis:
ax = make_axis_if_none(plt_axis)
overlay = np.mean(SIM_image.data, axis = projection_dim)
ax.imshow(overlay, **imshow_kwargs)
ax.set_xlabel('x (pixels)')
ax.set_ylabel('y (pixels)')
fig = ax.get_figure()
fig.tight_layout()
return ax
def plot_selection_box(x_range, y_range, ax, **rectangle_kwargs):
origin = (x_range[0], y_range[0])
widths = (x_range[1]-x_range[0], y_range[1]-y_range[0])
rect = Rectangle(origin, *widths, **rectangle_kwargs)
ax.add_patch(rect)
return ax
# TODO: add support for plotting slices along x and y axes as well.
# Will need to use transpose to swap that dimension with zero and proceed with the rest of the logic
def plot_slices(SIM_image: SIM_3D_Data, ncols: int, nrows: int, slice_dim: int = 0, **imshow_kwargs):
fig, axes = plt.subplots(nrows, ncols, figsize = (10, 10))
num_slices = SIM_image.shape[slice_dim]
plot_inds = np.linspace(0, num_slices, ncols*nrows, endpoint = False)
plot_inds = np.int_(np.floor(plot_inds))
for i, ax in zip(plot_inds, axes.flat):
ax.imshow(SIM_image.data[i], **imshow_kwargs)
ax.set_title(f'Slice #{i}/{num_slices}')
return fig, ax
| 36.217391 | 124 | 0.696279 |
01628e3852b55e9865865ff86cdf3e6ad3323fe8 | 1,097 | py | Python | misc/disablepasscomplexity.py | brianfinley/confluent | 6458eac93b1e3c6d45e26a7ddb434d692b5cdff2 | [
"Apache-2.0"
] | 27 | 2015-02-11T13:56:46.000Z | 2021-12-28T14:17:20.000Z | misc/disablepasscomplexity.py | brianfinley/confluent | 6458eac93b1e3c6d45e26a7ddb434d692b5cdff2 | [
"Apache-2.0"
] | 32 | 2015-09-23T13:19:04.000Z | 2022-03-15T13:50:45.000Z | misc/disablepasscomplexity.py | brianfinley/confluent | 6458eac93b1e3c6d45e26a7ddb434d692b5cdff2 | [
"Apache-2.0"
] | 24 | 2015-07-14T20:41:55.000Z | 2021-07-15T04:18:51.000Z | #!/usr/bin/python2
import pyghmi.util.webclient as webclient
import json
import os
import sys
missingargs = False
if 'XCCUSER' not in os.environ:
print('Must set XCCUSER environment variable')
missingargs = True
if 'XCCPASS' not in os.environ:
print('Must set XCCPASS environment variable')
missingargs = True
if missingargs:
sys.exit(1)
w = webclient.SecureHTTPConnection(sys.argv[1], 443, verifycallback=lambda x: True)
w.connect()
adata = json.dumps({'username': os.environ['XCCUSER'], 'password': os.environ['XCCPASS']})
headers = {'Connection': 'keep-alive', 'Content-Type': 'application/json'}
w.request('POST', '/api/login', adata, headers)
rsp = w.getresponse()
if rsp.status == 200:
rspdata = json.loads(rsp.read())
w.set_header('Content-Type', 'application/json')
w.set_header('Authorization', 'Bearer ' + rspdata['access_token'])
if '_csrf_token' in w.cookies:
w.set_header('X-XSRF-TOKEN', w.cookies['_csrf_token'])
print(repr(w.grab_json_response('/api/dataset', {
'USER_GlobalPassComplexRequired': '0',
})))
| 33.242424 | 90 | 0.688241 |
0162a9072742aec24078e0ec6e07600eec4b5259 | 193 | py | Python | get_topics.py | FoamyGuy/CircuitPython_Repo_Topics | 9a606e9549bcd663d6290c0648466022c1b964db | [
"MIT"
] | null | null | null | get_topics.py | FoamyGuy/CircuitPython_Repo_Topics | 9a606e9549bcd663d6290c0648466022c1b964db | [
"MIT"
] | null | null | null | get_topics.py | FoamyGuy/CircuitPython_Repo_Topics | 9a606e9549bcd663d6290c0648466022c1b964db | [
"MIT"
] | null | null | null | from github import Github
from my_token import token
g = Github(token)
repo = g.get_repo("adafruit/Adafruit_CircuitPython_Display_Text")
repo_topics = repo.get_topics()
print(repo_topics)
| 16.083333 | 65 | 0.797927 |
0163e63b0f0a4b6a54cef6dce6ac42cdbc68fb82 | 1,200 | py | Python | tests/test_write_simple.py | ZELLMECHANIK-DRESDEN/fcswrite | 3b696a0fd4a34f7d3999d4e28bd7981fe38494d2 | [
"BSD-3-Clause"
] | 8 | 2018-03-15T00:04:47.000Z | 2021-11-15T09:32:18.000Z | tests/test_write_simple.py | ZELLMECHANIK-DRESDEN/fcswrite | 3b696a0fd4a34f7d3999d4e28bd7981fe38494d2 | [
"BSD-3-Clause"
] | 6 | 2017-05-03T10:19:55.000Z | 2021-11-17T01:57:56.000Z | tests/test_write_simple.py | ZELLMECHANIK-DRESDEN/fcswrite | 3b696a0fd4a34f7d3999d4e28bd7981fe38494d2 | [
"BSD-3-Clause"
] | 2 | 2018-06-28T19:18:01.000Z | 2018-11-05T15:20:04.000Z | import hashlib
import os
import tempfile
import numpy as np
import fcswrite
def test_write_fcs():
"""test that fcm can read the data files"""
fname = tempfile.mktemp(suffix=".fcs", prefix="write_test")
data = 1.0*np.arange(400).reshape((100, 4))
chn_names = ['cha', 'chb', 'ch3', 'ch4']
# monkey-patch fcswrite version to have reproducible result
oldver = fcswrite.__version__
fcswrite.fcswrite.version = "0.5.0"
fcswrite.write_fcs(filename=fname,
chn_names=chn_names,
data=data
)
# write back correct version
fcswrite.fcswrite.version = oldver
with open(fname, "rb") as fd:
data = fd.read()
data = np.frombuffer(data, dtype=np.uint8)
# remove empty lines
data = data[data != 8224]
data = data.tostring()
hasher = hashlib.md5()
hasher.update(data)
hexval = hasher.hexdigest()
assert hexval == "2b4fdb7012b0693285c31aa91c606216"
os.remove(fname)
if __name__ == "__main__":
# Run all tests
loc = locals()
for key in list(loc.keys()):
if key.startswith("test_") and hasattr(loc[key], "__call__"):
loc[key]()
| 27.906977 | 69 | 0.615833 |
0165ca0a608b2f11c5571565ecd2b89540a7f4ec | 1,810 | py | Python | adam_visual_perception/preprocessor.py | isi-vista/adam-visual-perception | 8ad6ed883b184b5407a1bf793617b226c78b3a13 | [
"MIT"
] | 1 | 2020-07-21T10:52:26.000Z | 2020-07-21T10:52:26.000Z | adam_visual_perception/preprocessor.py | isi-vista/adam-visual-perception | 8ad6ed883b184b5407a1bf793617b226c78b3a13 | [
"MIT"
] | null | null | null | adam_visual_perception/preprocessor.py | isi-vista/adam-visual-perception | 8ad6ed883b184b5407a1bf793617b226c78b3a13 | [
"MIT"
] | 2 | 2020-07-21T15:30:42.000Z | 2021-01-20T21:54:09.000Z | from moviepy.editor import VideoFileClip
from datetime import datetime
import numpy as np
import time
import os
def manage_time(timestamp):
"""
Given the string representation of a the time using the
"minutes:seconds[:miliseconds]" representation, returns the number
of seconds using double precision
"""
time_strip = timestamp.split(":")
seconds = int(time_strip[0]) * 60 + int(time_strip[1])
# Add miliseconds
if len(time_strip) == 3:
seconds += int(time_strip[2]) / 60
return seconds
def preprocess_video(filename, start, end, target_name, audio, codec=None):
"""
Preprocess an input video by cutting it given start time to end time,
optionally removing the audio and changing video encoding
"""
# Load the video file
clip = VideoFileClip(filename)
# Calculate start and end points in seconds
starting_point = manage_time(start)
end_point = manage_time(end)
# Resize the video and save the file
subclip = clip.subclip(starting_point, end_point)
subclip.write_videofile(target_name, audio=audio, codec=codec)
def concatenate_videos(base="", ext="mp4"):
stringa = 'ffmpeg -i "concat:'
elenco_video = glob.glob(base + "*." + ext)
elenco_file_temp = []
for f in elenco_video:
file = "temp" + str(elenco_video.index(f) + 1) + ".ts"
os.system(
"ffmpeg -i " + f + " -c copy -bsf:v h264_mp4toannexb -f mpegts " + file
)
elenco_file_temp.append(file)
print(elenco_file_temp)
for f in elenco_file_temp:
stringa += f
if elenco_file_temp.index(f) != len(elenco_file_temp) - 1:
stringa += "|"
else:
stringa += '" -c copy -bsf:a aac_adtstoasc output.mp4'
print(stringa)
os.system(stringa)
| 29.672131 | 83 | 0.654696 |
0165f80525bcd690617df14c805c36b82363c9f9 | 119 | py | Python | experiments/localization.py | seba-1511/cervix.kaggle | 5bf956a85481a961fb9af237aba2d2254cf6921a | [
"Apache-2.0"
] | null | null | null | experiments/localization.py | seba-1511/cervix.kaggle | 5bf956a85481a961fb9af237aba2d2254cf6921a | [
"Apache-2.0"
] | null | null | null | experiments/localization.py | seba-1511/cervix.kaggle | 5bf956a85481a961fb9af237aba2d2254cf6921a | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
def get_localization(args):
# TODO: Implement the localization fitting the centers
pass
| 19.833333 | 58 | 0.731092 |
0166496ddadc3242ff31aa5f6df7726900b884d0 | 505 | py | Python | test/demo-random.py | ruth-ann/snap-python | fe98de7b5697b3d60eb3497893e24801ae1916f9 | [
"BSD-3-Clause"
] | 242 | 2015-01-01T08:40:28.000Z | 2022-03-18T05:22:09.000Z | test/demo-random.py | ruth-ann/snap-python | fe98de7b5697b3d60eb3497893e24801ae1916f9 | [
"BSD-3-Clause"
] | 99 | 2015-01-24T07:55:27.000Z | 2021-10-30T18:20:13.000Z | test/demo-random.py | ruth-ann/snap-python | fe98de7b5697b3d60eb3497893e24801ae1916f9 | [
"BSD-3-Clause"
] | 105 | 2015-03-03T06:45:17.000Z | 2022-02-24T15:52:40.000Z | import snap
G = snap.GenFull(snap.PNEANet, 100)
# get a new random generator, provide the seed value
Rnd = snap.TRnd(42)
# randomize the generator, every execution will produce a different sequence.
# Comment out the line to get the same sequence on every execution.
Rnd.Randomize()
for i in range(0,10):
# provide the random generator as a parameter to the function
NId = G.GetRndNId(Rnd)
print(NId)
# result is not well formed, the following statement fails
#print(NI.GetId())
| 25.25 | 77 | 0.724752 |
01685654bd4a20e989dd7468c19a080303cdaf20 | 1,846 | py | Python | learn-python/basics/basicCommands.py | pradeep-charism/python-projects | 5933bbbc309e6e271701ac2643a657e0928e7090 | [
"MIT"
] | null | null | null | learn-python/basics/basicCommands.py | pradeep-charism/python-projects | 5933bbbc309e6e271701ac2643a657e0928e7090 | [
"MIT"
] | null | null | null | learn-python/basics/basicCommands.py | pradeep-charism/python-projects | 5933bbbc309e6e271701ac2643a657e0928e7090 | [
"MIT"
] | null | null | null | print("Thank you Jesus")
# Read a value from standard input a value
# input("Thank you")
# Evaluate expression
x = 1
print(x)
x += 3
print(x)
# loops
if x > 1:
print("great than 1")
else:
print("less than 1")
n = 3
while n > 1:
print(n)
n -= 1
# Arithmetic operator
print({100 % 3}, {100 / 3})
z = 1
print(z, type(z))
z = "dsf"
print(z, type(z))
print(17 // 3) # floor division discards the fractional part
print(5 ** 2) # 5 squared
# use raw strings by adding an r before the first quote
print(r'C:\some\name')
# String literals can span multiple lines. One way is using triple-quotes: """...""" or '''...'''
print("""\
dsfdsfds
ddsf
dfdf
dfdfds
""")
w = 'thanks'
print(w[:3] + w[3:])
# All slice operations return a new list containing the requested elements.
# This means that the following slice returns a new (shallow) copy of the list:
squares = [1, 4, 9, 16, 25]
for n in squares:
print(n, end='-')
if n == 16:
squares.insert(n, 100)
print('\n')
print(squares[:])
squares.append(10)
print(squares[:])
squares = []
print(squares[:])
# Fibonacci
a, b = 0, 1
while b < 25:
print(b)
a, b = b, a + b
# Fibonacci
a, b = 0, 1
while b < 1000:
print(b, end=',')
a, b = b, a + b
print('\n')
# Loops
for i in range(3, 15, 4):
print(i)
a = ['Mary', 'had', 'a', 'little', 'lamb']
for i in range(len(a)):
print(i, a[i])
print(list(range(5)))
def f(ab, l=[]):
l.append(ab)
return l
print(f(1))
print(f(2))
print(f(3))
# if __name__ == "__main__":
# import sys
#
# print(int(sys.argv[1]))
import sys
print(dir(sys))
print('12'.zfill(5))
print('We are the {} who say "{}!"'.format('knights', 'Ni'))
print('{0} and {1}'.format('spam', 'eggs'))
# Formatting: https://docs.python.org/3.6/tutorial/inputoutput.html
quit(1)
| 15.644068 | 97 | 0.583965 |